Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/dotnet/runtime.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorTanner Gooding <tagoo@outlook.com>2022-09-30 04:24:23 +0300
committerGitHub <noreply@github.com>2022-09-30 04:24:23 +0300
commitbfef7ccc856ac307fd5102c00ea54ddbe0b06fa3 (patch)
tree643ba0cccdd0a11ab52192dc5a8ce514b842f7cc /src
parent43fa22f07449f24f6959b959e49c69aff91a8a14 (diff)
Remove GT_ADDEX and replace with more generalized containment handling (#76273)
* Remove GT_ADDEX and replace with more generalized containment handling * Handle small types for (extended register) instructions * Remember to handle IsRegOptional * Applying formatting patch * Preference containing the CAST of `ADD(op1, CAST(op2))` rather than the op2 of CAST * Update src/coreclr/jit/lowerarmarch.cpp * Adding a test covering the caught fuzzlyn scenario * Ensure the new test returns 100 * Skip a Fuzzlyn generated regression test on Mono wasm/llvmaot/llvmfullaot
Diffstat (limited to 'src')
-rw-r--r--src/coreclr/jit/codegen.h1
-rw-r--r--src/coreclr/jit/codegenarm64.cpp104
-rw-r--r--src/coreclr/jit/codegenarmarch.cpp4
-rw-r--r--src/coreclr/jit/codegencommon.cpp24
-rw-r--r--src/coreclr/jit/gtlist.h1
-rw-r--r--src/coreclr/jit/lower.cpp7
-rw-r--r--src/coreclr/jit/lowerarmarch.cpp111
-rw-r--r--src/coreclr/jit/lsraarm64.cpp1
-rw-r--r--src/tests/JIT/Regression/JitBlue/Runtime_76273/Runtime_76273.cs73
-rw-r--r--src/tests/JIT/Regression/JitBlue/Runtime_76273/Runtime_76273.csproj9
-rw-r--r--src/tests/issues.targets6
11 files changed, 207 insertions, 134 deletions
diff --git a/src/coreclr/jit/codegen.h b/src/coreclr/jit/codegen.h
index dcf00a6cb77..641aeaaef55 100644
--- a/src/coreclr/jit/codegen.h
+++ b/src/coreclr/jit/codegen.h
@@ -1397,7 +1397,6 @@ protected:
#if defined(TARGET_ARM64)
void genCodeForJumpCompare(GenTreeOp* tree);
void genCodeForBfiz(GenTreeOp* tree);
- void genCodeForAddEx(GenTreeOp* tree);
void genCodeForCond(GenTreeOp* tree);
#endif // TARGET_ARM64
diff --git a/src/coreclr/jit/codegenarm64.cpp b/src/coreclr/jit/codegenarm64.cpp
index 76dccfad593..be1e2894044 100644
--- a/src/coreclr/jit/codegenarm64.cpp
+++ b/src/coreclr/jit/codegenarm64.cpp
@@ -2639,6 +2639,62 @@ void CodeGen::genCodeForBinary(GenTreeOp* tree)
genProduceReg(tree);
return;
}
+ else if (op2->OperIs(GT_CAST) && op2->isContained())
+ {
+ assert(varTypeIsIntegral(tree));
+
+ GenTree* a = op1;
+ GenTree* b = op2->AsCast()->CastOp();
+
+ instruction ins = genGetInsForOper(tree->OperGet(), targetType);
+ insOpts opt = INS_OPTS_NONE;
+
+ if ((tree->gtFlags & GTF_SET_FLAGS) != 0)
+ {
+ // A subset of operations can still set flags
+
+ switch (oper)
+ {
+ case GT_ADD:
+ {
+ ins = INS_adds;
+ break;
+ }
+
+ case GT_SUB:
+ {
+ ins = INS_subs;
+ break;
+ }
+
+ default:
+ {
+ noway_assert(!"Unexpected BinaryOp with GTF_SET_FLAGS set");
+ }
+ }
+ }
+
+ bool isZeroExtending = op2->AsCast()->IsZeroExtending();
+
+ if (varTypeIsByte(op2->CastToType()))
+ {
+ opt = isZeroExtending ? INS_OPTS_UXTB : INS_OPTS_SXTB;
+ }
+ else if (varTypeIsShort(op2->CastToType()))
+ {
+ opt = isZeroExtending ? INS_OPTS_UXTH : INS_OPTS_SXTH;
+ }
+ else
+ {
+ assert(op2->TypeIs(TYP_LONG) && genActualTypeIsInt(b));
+ opt = isZeroExtending ? INS_OPTS_UXTW : INS_OPTS_SXTW;
+ }
+
+ emit->emitIns_R_R_R(ins, emitActualTypeSize(tree), targetReg, a->GetRegNum(), b->GetRegNum(), opt);
+
+ genProduceReg(tree);
+ return;
+ }
if (tree->OperIs(GT_AND) && op2->isContainedAndNotIntOrIImmed())
{
@@ -10565,54 +10621,6 @@ void CodeGen::genCodeForBfiz(GenTreeOp* tree)
}
//------------------------------------------------------------------------
-// genCodeForAddEx: Generates the code sequence for a GenTree node that
-// represents an addition with sign or zero extended
-//
-// Arguments:
-// tree - the add with extend node.
-//
-void CodeGen::genCodeForAddEx(GenTreeOp* tree)
-{
- assert(tree->OperIs(GT_ADDEX));
- genConsumeOperands(tree);
-
- GenTree* op;
- GenTree* containedOp;
- if (tree->gtGetOp1()->isContained())
- {
- containedOp = tree->gtGetOp1();
- op = tree->gtGetOp2();
- }
- else
- {
- containedOp = tree->gtGetOp2();
- op = tree->gtGetOp1();
- }
- assert(containedOp->isContained() && !op->isContained());
-
- regNumber dstReg = tree->GetRegNum();
- regNumber op1Reg = op->GetRegNum();
- regNumber op2Reg = containedOp->gtGetOp1()->GetRegNum();
-
- if (containedOp->OperIs(GT_CAST))
- {
- GenTreeCast* cast = containedOp->AsCast();
- assert(varTypeIsLong(cast->CastToType()));
- insOpts opts = cast->IsUnsigned() ? INS_OPTS_UXTW : INS_OPTS_SXTW;
- GetEmitter()->emitIns_R_R_R(tree->gtSetFlags() ? INS_adds : INS_add, emitActualTypeSize(tree), dstReg, op1Reg,
- op2Reg, opts);
- }
- else
- {
- assert(containedOp->OperIs(GT_LSH));
- ssize_t cns = containedOp->gtGetOp2()->AsIntCon()->IconValue();
- GetEmitter()->emitIns_R_R_R_I(tree->gtSetFlags() ? INS_adds : INS_add, emitActualTypeSize(tree), dstReg, op1Reg,
- op2Reg, cns, INS_OPTS_LSL);
- }
- genProduceReg(tree);
-}
-
-//------------------------------------------------------------------------
// genCodeForCond: Generates the code sequence for a GenTree node that
// represents a conditional instruction.
//
diff --git a/src/coreclr/jit/codegenarmarch.cpp b/src/coreclr/jit/codegenarmarch.cpp
index f7bb7a91f14..df593f7b5ce 100644
--- a/src/coreclr/jit/codegenarmarch.cpp
+++ b/src/coreclr/jit/codegenarmarch.cpp
@@ -315,10 +315,6 @@ void CodeGen::genCodeForTreeNode(GenTree* treeNode)
genCodeForSwap(treeNode->AsOp());
break;
- case GT_ADDEX:
- genCodeForAddEx(treeNode->AsOp());
- break;
-
case GT_BFIZ:
genCodeForBfiz(treeNode->AsOp());
break;
diff --git a/src/coreclr/jit/codegencommon.cpp b/src/coreclr/jit/codegencommon.cpp
index c365528cbf4..6e4e7ef0bab 100644
--- a/src/coreclr/jit/codegencommon.cpp
+++ b/src/coreclr/jit/codegencommon.cpp
@@ -1030,14 +1030,7 @@ bool CodeGen::genCreateAddrMode(
if (!addr->OperIs(GT_ADD))
{
-#if TARGET_ARM64
- if (!addr->OperIs(GT_ADDEX))
- {
- return false;
- }
-#else
return false;
-#endif
}
GenTree* rv1 = nullptr;
@@ -1064,23 +1057,6 @@ bool CodeGen::genCreateAddrMode(
op2 = addr->AsOp()->gtOp2;
}
-#if TARGET_ARM64
- if (addr->OperIs(GT_ADDEX))
- {
- if (op2->isContained() && op2->OperIs(GT_CAST))
- {
- *rv1Ptr = op1;
- *rv2Ptr = op2;
- *mulPtr = 1;
- *cnsPtr = 0;
- *revPtr = false; // op2 is never a gc type
- assert(!varTypeIsGC(op2));
- return true;
- }
- return false;
- }
-#endif
-
// Can't use indirect addressing mode as we need to check for overflow.
// Also, can't use 'lea' as it doesn't set the flags.
diff --git a/src/coreclr/jit/gtlist.h b/src/coreclr/jit/gtlist.h
index 56ea06b2a06..65a77bf6a3e 100644
--- a/src/coreclr/jit/gtlist.h
+++ b/src/coreclr/jit/gtlist.h
@@ -220,7 +220,6 @@ GTNODE(MUL_LONG , GenTreeOp ,1,GTK_BINOP|DBK_NOTHIR)
GTNODE(AND_NOT , GenTreeOp ,0,GTK_BINOP|DBK_NOTHIR)
#ifdef TARGET_ARM64
-GTNODE(ADDEX, GenTreeOp ,0,GTK_BINOP|DBK_NOTHIR) // Add with sign/zero extension.
GTNODE(BFIZ , GenTreeOp ,0,GTK_BINOP|DBK_NOTHIR) // Bitfield Insert in Zero.
GTNODE(CSNEG_MI , GenTreeOp ,0,GTK_BINOP|DBK_NOTHIR) // Conditional select, negate, minus result
GTNODE(CNEG_LT , GenTreeOp ,0,GTK_UNOP|DBK_NOTHIR) // Conditional, negate, signed less than result
diff --git a/src/coreclr/jit/lower.cpp b/src/coreclr/jit/lower.cpp
index 176828d17ed..a1d64932225 100644
--- a/src/coreclr/jit/lower.cpp
+++ b/src/coreclr/jit/lower.cpp
@@ -5213,14 +5213,7 @@ bool Lowering::TryCreateAddrMode(GenTree* addr, bool isContainable, GenTree* par
{
if (!addr->OperIs(GT_ADD) || addr->gtOverflow())
{
-#ifdef TARGET_ARM64
- if (!addr->OperIs(GT_ADDEX))
- {
- return false;
- }
-#else
return false;
-#endif
}
#ifdef TARGET_ARM64
diff --git a/src/coreclr/jit/lowerarmarch.cpp b/src/coreclr/jit/lowerarmarch.cpp
index f75711660d7..1eae6c6dbc0 100644
--- a/src/coreclr/jit/lowerarmarch.cpp
+++ b/src/coreclr/jit/lowerarmarch.cpp
@@ -244,17 +244,12 @@ bool Lowering::IsContainableBinaryOp(GenTree* parentNode, GenTree* childNode) co
return false;
}
- ssize_t shiftAmount = shiftAmountNode->AsIntCon()->IconValue();
+ const ssize_t shiftAmount = shiftAmountNode->AsIntCon()->IconValue();
+ const ssize_t maxShift = (static_cast<ssize_t>(genTypeSize(parentNode)) * BITS_IN_BYTE) - 1;
- if ((shiftAmount < 0x01) || (shiftAmount > 0x3F))
+ if ((shiftAmount < 0x01) || (shiftAmount > maxShift))
{
- // Cannot contain if the shift amount is less than 1 or greater than 63
- return false;
- }
-
- if (!varTypeIsLong(childNode) && (shiftAmount > 0x1F))
- {
- // Cannot contain if the shift amount is greater than 31
+ // Cannot contain if the shift amount is less than 1 or greater than maxShift
return false;
}
@@ -275,7 +270,7 @@ bool Lowering::IsContainableBinaryOp(GenTree* parentNode, GenTree* childNode) co
return false;
}
- if (parentNode->OperIs(GT_CMP, GT_AND, GT_OR, GT_XOR))
+ if (parentNode->OperIs(GT_CMP, GT_OR, GT_XOR))
{
if (IsSafeToContainMem(parentNode, childNode))
{
@@ -288,6 +283,64 @@ bool Lowering::IsContainableBinaryOp(GenTree* parentNode, GenTree* childNode) co
return false;
}
+ if (childNode->OperIs(GT_CAST))
+ {
+ // Find "a op cast(b)"
+ GenTree* castOp = childNode->AsCast()->CastOp();
+
+ // We want to prefer the combined op here over containment of the cast op
+ castOp->ClearContained();
+
+ bool isSupportedCast = false;
+
+ if (varTypeIsSmall(childNode->CastToType()))
+ {
+ // The JIT doesn't track upcasts from small types, instead most types
+ // are tracked as TYP_INT and then we get explicit downcasts to the
+ // desired small type instead.
+
+ assert(!varTypeIsFloating(castOp));
+ isSupportedCast = true;
+ }
+ else if (childNode->TypeIs(TYP_LONG) && genActualTypeIsInt(castOp))
+ {
+ // We can handle "INT -> LONG", "INT -> ULONG", "UINT -> LONG", and "UINT -> ULONG"
+ isSupportedCast = true;
+ }
+
+ if (!isSupportedCast)
+ {
+ return false;
+ }
+
+ if (parentNode->OperIs(GT_ADD, GT_SUB))
+ {
+ // These operations can still report flags
+
+ if (IsSafeToContainMem(parentNode, childNode))
+ {
+ return true;
+ }
+ }
+
+ if ((parentNode->gtFlags & GTF_SET_FLAGS) != 0)
+ {
+ // Cannot contain if the parent operation needs to set flags
+ return false;
+ }
+
+ if (parentNode->OperIs(GT_CMP))
+ {
+ if (IsSafeToContainMem(parentNode, childNode))
+ {
+ return true;
+ }
+ }
+
+ // TODO: Handle CMN
+ return false;
+ }
+
return false;
}
#endif // TARGET_ARM64
@@ -1968,44 +2021,6 @@ void Lowering::ContainCheckBinary(GenTreeOp* node)
return;
}
}
-
- // Change ADD TO ADDEX for ADD(X, CAST(Y)) or ADD(CAST(X), Y) where CAST is int->long
- // or for ADD(LSH(X, CNS), X) or ADD(X, LSH(X, CNS)) where CNS is in the (0..typeWidth) range
- if (node->OperIs(GT_ADD) && !op1->isContained() && !op2->isContained() && varTypeIsIntegral(node) &&
- !node->gtOverflow())
- {
- assert(!node->isContained());
-
- if (op1->OperIs(GT_CAST) || op2->OperIs(GT_CAST))
- {
- GenTree* cast = op1->OperIs(GT_CAST) ? op1 : op2;
- if (cast->gtGetOp1()->TypeIs(TYP_INT) && cast->TypeIs(TYP_LONG) && !cast->gtOverflow())
- {
- node->ChangeOper(GT_ADDEX);
- cast->AsCast()->CastOp()->ClearContained(); // Uncontain any memory operands.
- MakeSrcContained(node, cast);
- }
- }
- else if (op1->OperIs(GT_LSH) || op2->OperIs(GT_LSH))
- {
- GenTree* lsh = op1->OperIs(GT_LSH) ? op1 : op2;
- GenTree* shiftBy = lsh->gtGetOp2();
-
- if (shiftBy->IsCnsIntOrI())
- {
- const ssize_t shiftByCns = shiftBy->AsIntCon()->IconValue();
- const ssize_t maxShift = (ssize_t)genTypeSize(node) * BITS_IN_BYTE;
-
- if ((shiftByCns > 0) && (shiftByCns < maxShift))
- {
- // shiftBy is small so it has to be contained at this point.
- assert(shiftBy->isContained());
- node->ChangeOper(GT_ADDEX);
- MakeSrcContained(node, lsh);
- }
- }
- }
- }
#endif
}
diff --git a/src/coreclr/jit/lsraarm64.cpp b/src/coreclr/jit/lsraarm64.cpp
index 1d6ad58b67c..e3495ee6886 100644
--- a/src/coreclr/jit/lsraarm64.cpp
+++ b/src/coreclr/jit/lsraarm64.cpp
@@ -288,7 +288,6 @@ int LinearScan::BuildNode(GenTree* tree)
}
FALLTHROUGH;
- case GT_ADDEX:
case GT_AND:
case GT_AND_NOT:
case GT_OR:
diff --git a/src/tests/JIT/Regression/JitBlue/Runtime_76273/Runtime_76273.cs b/src/tests/JIT/Regression/JitBlue/Runtime_76273/Runtime_76273.cs
new file mode 100644
index 00000000000..fcb5e7bcb4e
--- /dev/null
+++ b/src/tests/JIT/Regression/JitBlue/Runtime_76273/Runtime_76273.cs
@@ -0,0 +1,73 @@
+// Generated by Fuzzlyn v1.5 on 2022-09-28 20:12:22
+// Run on Arm64 Windows
+// Seed: 2515290859567534747
+// Reduced from 46.0 KiB to 1.5 KiB in 00:02:39
+// Hits JIT assert in Release:
+// Assertion failed '!op1->isUsedFromMemory()' in 'Program:M11(C0,short)' during 'Generate code' (IL size 87; hash 0xb0dcda0a; FullOpts)
+//
+// File: D:\a\_work\1\s\src\coreclr\jit\codegenarm64.cpp Line: 4543
+//
+
+public class C0
+{
+ public int F0;
+ public bool F1;
+}
+
+public class Program
+{
+ public static IRuntime s_rt = new Runtime();
+ public static ulong s_3;
+
+ public static int Main()
+ {
+ CollectibleALC alc = new CollectibleALC();
+
+ System.Reflection.Assembly asm = alc.LoadFromAssemblyPath(System.Reflection.Assembly.GetExecutingAssembly().Location);
+ System.Reflection.MethodInfo mi = asm.GetType(typeof(Program).FullName).GetMethod(nameof(MainInner));
+ System.Type runtimeTy = asm.GetType(typeof(Runtime).FullName);
+
+ mi.Invoke(null, new object[]{System.Activator.CreateInstance(runtimeTy)});
+
+ return 100;
+ }
+
+ public static void MainInner(IRuntime rt)
+ {
+ var vr2 = new C0();
+ M11(vr2, 1);
+ }
+
+ public static void M11(C0 argThis, short arg0)
+ {
+ short var0 = default(short);
+
+ for (int var1 = 0; var1 < 2; var1++)
+ {
+ short var2 = var0++;
+
+ argThis.F1 = (byte)(argThis.F0 ^ arg0) > (ushort)(s_3 % 1);
+ s_rt.WriteLine("c_68", var2);
+
+ int var3 = argThis.F0;
+ s_rt.WriteLine("c_72", var3);
+ }
+ }
+}
+
+public interface IRuntime
+{
+ void WriteLine<T>(string site, T value);
+}
+
+public class Runtime : IRuntime
+{
+ public void WriteLine<T>(string site, T value) => System.Console.WriteLine(value);
+}
+
+public class CollectibleALC : System.Runtime.Loader.AssemblyLoadContext
+{
+ public CollectibleALC(): base(true)
+ {
+ }
+}
diff --git a/src/tests/JIT/Regression/JitBlue/Runtime_76273/Runtime_76273.csproj b/src/tests/JIT/Regression/JitBlue/Runtime_76273/Runtime_76273.csproj
new file mode 100644
index 00000000000..f492aeac9d0
--- /dev/null
+++ b/src/tests/JIT/Regression/JitBlue/Runtime_76273/Runtime_76273.csproj
@@ -0,0 +1,9 @@
+<Project Sdk="Microsoft.NET.Sdk">
+ <PropertyGroup>
+ <OutputType>Exe</OutputType>
+ <Optimize>True</Optimize>
+ </PropertyGroup>
+ <ItemGroup>
+ <Compile Include="$(MSBuildProjectName).cs" />
+ </ItemGroup>
+</Project> \ No newline at end of file
diff --git a/src/tests/issues.targets b/src/tests/issues.targets
index 9951a88e184..9f8c49da4df 100644
--- a/src/tests/issues.targets
+++ b/src/tests/issues.targets
@@ -3123,6 +3123,9 @@
<ExcludeList Include="$(XunitTestBinBase)/JIT/Regression/CLR-x86-JIT/V1.1-M1-Beta1/b143840/b143840/*">
<Issue>https://github.com/dotnet/runtime/issues/48914</Issue>
</ExcludeList>
+ <ExcludeList Include="$(XunitTestBinBase)/JIT/Regression/JitBlue/Runtime_76273/Runtime_76273/**">
+ <Issue>Fuzzlyn</Issue>
+ </ExcludeList>
<ExcludeList Include = "$(XUnitTestBinBase)/JIT/HardwareIntrinsics/X86/X86Base/Pause*/**">
<Issue>https://github.com/dotnet/runtime/issues/73454;https://github.com/dotnet/runtime/pull/61707#issuecomment-973122341</Issue>
</ExcludeList>
@@ -3491,6 +3494,9 @@
<ExcludeList Include = "$(XunitTestBinBase)/JIT/Regression/JitBlue/Runtime_40444/Runtime_40444/**">
<Issue>https://github.com/dotnet/runtime/issues/41472</Issue>
</ExcludeList>
+ <ExcludeList Include = "$(XunitTestBinBase)/JIT/Regression/JitBlue/Runtime_76273/Runtime_76273/**">
+ <Issue>Fuzzlyn</Issue>
+ </ExcludeList>
<ExcludeList Include = "$(XunitTestBinBase)/JIT/Regression/CLR-x86-JIT/V2.0-Beta2/b426654/b426654/**">
<Issue>https://github.com/dotnet/runtime/issues/41472</Issue>
</ExcludeList>