Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/dotnet/runtime.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/coreclr/inc/contract.h2
-rw-r--r--src/coreclr/inc/debugreturn.h6
-rw-r--r--src/coreclr/jit/assertionprop.cpp60
-rw-r--r--src/coreclr/jit/block.cpp2
-rw-r--r--src/coreclr/jit/block.h253
-rw-r--r--src/coreclr/jit/codegenlinear.cpp12
-rw-r--r--src/coreclr/jit/compiler.h123
-rw-r--r--src/coreclr/jit/compiler.hpp14
-rw-r--r--src/coreclr/jit/fgbasic.cpp10
-rw-r--r--src/coreclr/jit/fgdiagnostic.cpp10
-rw-r--r--src/coreclr/jit/fginline.cpp18
-rw-r--r--src/coreclr/jit/fgopt.cpp2
-rw-r--r--src/coreclr/jit/flowgraph.cpp6
-rw-r--r--src/coreclr/jit/gentree.cpp62
-rw-r--r--src/coreclr/jit/gentree.h855
-rw-r--r--src/coreclr/jit/importer.cpp12
-rw-r--r--src/coreclr/jit/lclmorph.cpp4
-rw-r--r--src/coreclr/jit/lower.cpp8
-rw-r--r--src/coreclr/jit/morph.cpp36
-rw-r--r--src/coreclr/jit/optimizer.cpp2
-rw-r--r--src/coreclr/jit/rangecheck.cpp8
-rw-r--r--src/coreclr/jit/rationalize.cpp2
-rw-r--r--src/coreclr/jit/regset.cpp10
-rw-r--r--src/coreclr/jit/treelifeupdater.cpp6
-rw-r--r--src/coreclr/jit/valuenum.cpp8
-rw-r--r--src/coreclr/jit/valuenum.h10
26 files changed, 847 insertions, 694 deletions
diff --git a/src/coreclr/inc/contract.h b/src/coreclr/inc/contract.h
index a50c09ed322..997f12b0b6a 100644
--- a/src/coreclr/inc/contract.h
+++ b/src/coreclr/inc/contract.h
@@ -217,7 +217,7 @@
// We only enable contracts in _DEBUG builds
-#if defined(_DEBUG) && !defined(DISABLE_CONTRACTS)
+#if defined(_DEBUG) && !defined(DISABLE_CONTRACTS) && !defined(JIT_BUILD)
#define ENABLE_CONTRACTS_DATA
#endif
diff --git a/src/coreclr/inc/debugreturn.h b/src/coreclr/inc/debugreturn.h
index 00c30b62ca6..d052364ff89 100644
--- a/src/coreclr/inc/debugreturn.h
+++ b/src/coreclr/inc/debugreturn.h
@@ -30,7 +30,7 @@
// earlier because those builds only support C++11 constexpr, which doesn't allow the
// use of 'if' statements within the body of a constexpr function. Later builds support
// C++14 constexpr.
-#if defined(_DEBUG) && (!defined(_MSC_FULL_VER) || _MSC_FULL_VER > 190024315)
+#if defined(_DEBUG) && !defined(JIT_BUILD) && (!defined(_MSC_FULL_VER) || _MSC_FULL_VER > 190024315)
// Code to generate a compile-time error if return statements appear where they
// shouldn't.
@@ -107,7 +107,7 @@ typedef __SafeToReturn __ReturnOK;
#define DEBUG_OK_TO_RETURN_BEGIN(arg) { typedef __SafeToReturn __ReturnOK; if (0 && __ReturnOK::used()) { } else {
#define DEBUG_OK_TO_RETURN_END(arg) } }
-#else // defined(_DEBUG) && (!defined(_MSC_FULL_VER) || _MSC_FULL_VER > 190024315)
+#else // defined(_DEBUG) && !defined(JIT_BUILD) && (!defined(_MSC_FULL_VER) || _MSC_FULL_VER > 190024315)
#define DEBUG_ASSURE_SAFE_TO_RETURN TRUE
@@ -117,7 +117,7 @@ typedef __SafeToReturn __ReturnOK;
#define DEBUG_OK_TO_RETURN_BEGIN(arg) {
#define DEBUG_OK_TO_RETURN_END(arg) }
-#endif // defined(_DEBUG) && (!defined(_MSC_FULL_VER) || _MSC_FULL_VER > 190024315)
+#endif // defined(_DEBUG) && !defined(JIT_BUILD) && (!defined(_MSC_FULL_VER) || _MSC_FULL_VER > 190024315)
#endif // !_PREFAST_
diff --git a/src/coreclr/jit/assertionprop.cpp b/src/coreclr/jit/assertionprop.cpp
index 8db49752c10..785bf68cdbd 100644
--- a/src/coreclr/jit/assertionprop.cpp
+++ b/src/coreclr/jit/assertionprop.cpp
@@ -694,12 +694,12 @@ void Compiler::optPrintAssertion(AssertionDsc* curAssertion, AssertionIndex asse
if (curAssertion->op1.kind == O1K_EXACT_TYPE)
{
printf("Exact Type MT(%08X)", dspPtr(curAssertion->op2.u1.iconVal));
- assert(curAssertion->op2.u1.iconFlags != 0);
+ assert(curAssertion->op2.u1.iconFlags != GTF_EMPTY);
}
else if (curAssertion->op1.kind == O1K_SUBTYPE)
{
printf("MT(%08X)", dspPtr(curAssertion->op2.u1.iconVal));
- assert(curAssertion->op2.u1.iconFlags != 0);
+ assert(curAssertion->op2.u1.iconFlags != GTF_EMPTY);
}
else if (curAssertion->op1.kind == O1K_BOUND_OPER_BND)
{
@@ -957,10 +957,10 @@ AssertionIndex Compiler::optCreateAssertion(GenTree* op1,
assertion.op2.kind = O2K_CONST_INT;
assertion.op2.vn = ValueNumStore::VNForNull();
assertion.op2.u1.iconVal = 0;
- assertion.op2.u1.iconFlags = 0;
+ assertion.op2.u1.iconFlags = GTF_EMPTY;
#ifdef TARGET_64BIT
- assertion.op2.u1.iconFlags |= 1; // Signify that this is really TYP_LONG
-#endif // TARGET_64BIT
+ assertion.op2.u1.iconFlags |= GTF_ASSERTION_PROP_LONG; // Signify that this is really TYP_LONG
+#endif // TARGET_64BIT
}
//
// Are we making an assertion about a local variable?
@@ -1096,7 +1096,8 @@ AssertionIndex Compiler::optCreateAssertion(GenTree* op1,
#ifdef TARGET_64BIT
if (op2->TypeGet() == TYP_LONG || op2->TypeGet() == TYP_BYREF)
{
- assertion.op2.u1.iconFlags |= 1; // Signify that this is really TYP_LONG
+ assertion.op2.u1.iconFlags |=
+ GTF_ASSERTION_PROP_LONG; // Signify that this is really TYP_LONG
}
#endif // TARGET_64BIT
}
@@ -1302,8 +1303,8 @@ AssertionIndex Compiler::optCreateAssertion(GenTree* op1,
vnStore->VNConservativeNormalValue(
lvaTable[lclNum].GetPerSsaData(assertion.op1.lcl.ssaNum)->m_vnPair)));
- ssize_t cnsValue = 0;
- unsigned iconFlags = 0;
+ ssize_t cnsValue = 0;
+ GenTreeFlags iconFlags = GTF_EMPTY;
// Ngen case
if (op2->gtOper == GT_IND)
{
@@ -1323,7 +1324,7 @@ AssertionIndex Compiler::optCreateAssertion(GenTree* op1,
#ifdef TARGET_64BIT
if (op2->AsOp()->gtOp1->TypeGet() == TYP_LONG)
{
- assertion.op2.u1.iconFlags |= 1; // Signify that this is really TYP_LONG
+ assertion.op2.u1.iconFlags |= GTF_ASSERTION_PROP_LONG; // Signify that this is really TYP_LONG
}
#endif // TARGET_64BIT
}
@@ -1341,7 +1342,7 @@ AssertionIndex Compiler::optCreateAssertion(GenTree* op1,
#ifdef TARGET_64BIT
if (op2->TypeGet() == TYP_LONG)
{
- assertion.op2.u1.iconFlags |= 1; // Signify that this is really TYP_LONG
+ assertion.op2.u1.iconFlags |= GTF_ASSERTION_PROP_LONG; // Signify that this is really TYP_LONG
}
#endif // TARGET_64BIT
}
@@ -1386,7 +1387,7 @@ DONE_ASSERTION:
* constant. Set "vnBased" to true to indicate local or global assertion prop.
* "pFlags" indicates if the constant is a handle marked by GTF_ICON_HDL_MASK.
*/
-bool Compiler::optIsTreeKnownIntValue(bool vnBased, GenTree* tree, ssize_t* pConstant, unsigned* pFlags)
+bool Compiler::optIsTreeKnownIntValue(bool vnBased, GenTree* tree, ssize_t* pConstant, GenTreeFlags* pFlags)
{
// Is Local assertion prop?
if (!vnBased)
@@ -1423,14 +1424,14 @@ bool Compiler::optIsTreeKnownIntValue(bool vnBased, GenTree* tree, ssize_t* pCon
if (vnType == TYP_INT)
{
*pConstant = vnStore->ConstantValue<int>(vn);
- *pFlags = vnStore->IsVNHandle(vn) ? vnStore->GetHandleFlags(vn) : 0;
+ *pFlags = vnStore->IsVNHandle(vn) ? vnStore->GetHandleFlags(vn) : GTF_EMPTY;
return true;
}
#ifdef TARGET_64BIT
else if (vnType == TYP_LONG)
{
*pConstant = vnStore->ConstantValue<INT64>(vn);
- *pFlags = vnStore->IsVNHandle(vn) ? vnStore->GetHandleFlags(vn) : 0;
+ *pFlags = vnStore->IsVNHandle(vn) ? vnStore->GetHandleFlags(vn) : GTF_EMPTY;
return true;
}
#endif
@@ -1618,14 +1619,18 @@ void Compiler::optDebugCheckAssertion(AssertionDsc* assertion)
case O2K_IND_CNS_INT:
case O2K_CONST_INT:
{
- // The only flags that can be set are those in the GTF_ICON_HDL_MASK, or bit 0, which is
- // used to indicate a long constant.
- assert((assertion->op2.u1.iconFlags & ~(GTF_ICON_HDL_MASK | 1)) == 0);
+// The only flags that can be set are those in the GTF_ICON_HDL_MASK, or GTF_ASSERTION_PROP_LONG, which is
+// used to indicate a long constant.
+#ifdef TARGET_64BIT
+ assert((assertion->op2.u1.iconFlags & ~(GTF_ICON_HDL_MASK | GTF_ASSERTION_PROP_LONG)) == 0);
+#else
+ assert((assertion->op2.u1.iconFlags & ~GTF_ICON_HDL_MASK) == 0);
+#endif
switch (assertion->op1.kind)
{
case O1K_EXACT_TYPE:
case O1K_SUBTYPE:
- assert(assertion->op2.u1.iconFlags != 0);
+ assert(assertion->op2.u1.iconFlags != GTF_EMPTY);
break;
case O1K_LCLVAR:
assert((lvaTable[assertion->op1.lcl.lclNum].lvType != TYP_REF) ||
@@ -1793,7 +1798,7 @@ AssertionInfo Compiler::optCreateJTrueBoundsAssertion(GenTree* tree)
dsc.op2.kind = O2K_CONST_INT;
dsc.op2.vn = vnStore->VNZeroForType(op2->TypeGet());
dsc.op2.u1.iconVal = 0;
- dsc.op2.u1.iconFlags = 0;
+ dsc.op2.u1.iconFlags = GTF_EMPTY;
AssertionIndex index = optAddAssertion(&dsc);
optCreateComplementaryAssertion(index, nullptr, nullptr);
return index;
@@ -1810,7 +1815,7 @@ AssertionInfo Compiler::optCreateJTrueBoundsAssertion(GenTree* tree)
dsc.op2.kind = O2K_CONST_INT;
dsc.op2.vn = vnStore->VNZeroForType(op2->TypeGet());
dsc.op2.u1.iconVal = 0;
- dsc.op2.u1.iconFlags = 0;
+ dsc.op2.u1.iconFlags = GTF_EMPTY;
AssertionIndex index = optAddAssertion(&dsc);
optCreateComplementaryAssertion(index, nullptr, nullptr);
return index;
@@ -1827,7 +1832,7 @@ AssertionInfo Compiler::optCreateJTrueBoundsAssertion(GenTree* tree)
dsc.op2.kind = O2K_CONST_INT;
dsc.op2.vn = vnStore->VNZeroForType(op2->TypeGet());
dsc.op2.u1.iconVal = 0;
- dsc.op2.u1.iconFlags = 0;
+ dsc.op2.u1.iconFlags = GTF_EMPTY;
AssertionIndex index = optAddAssertion(&dsc);
optCreateComplementaryAssertion(index, nullptr, nullptr);
return index;
@@ -1844,7 +1849,7 @@ AssertionInfo Compiler::optCreateJTrueBoundsAssertion(GenTree* tree)
dsc.op2.kind = O2K_CONST_INT;
dsc.op2.vn = vnStore->VNZeroForType(TYP_INT);
dsc.op2.u1.iconVal = 0;
- dsc.op2.u1.iconFlags = 0;
+ dsc.op2.u1.iconFlags = GTF_EMPTY;
AssertionIndex index = optAddAssertion(&dsc);
optCreateComplementaryAssertion(index, nullptr, nullptr);
return index;
@@ -1887,7 +1892,7 @@ AssertionInfo Compiler::optCreateJTrueBoundsAssertion(GenTree* tree)
dsc.op2.kind = O2K_CONST_INT;
dsc.op2.vn = vnStore->VNZeroForType(op2->TypeGet());
dsc.op2.u1.iconVal = 0;
- dsc.op2.u1.iconFlags = 0;
+ dsc.op2.u1.iconFlags = GTF_EMPTY;
AssertionIndex index = optAddAssertion(&dsc);
optCreateComplementaryAssertion(index, nullptr, nullptr);
return index;
@@ -1904,7 +1909,7 @@ AssertionInfo Compiler::optCreateJTrueBoundsAssertion(GenTree* tree)
dsc.op2.kind = O2K_CONST_INT;
dsc.op2.vn = vnStore->VNZeroForType(TYP_INT);
dsc.op2.u1.iconVal = 0;
- dsc.op2.u1.iconFlags = 0;
+ dsc.op2.u1.iconFlags = GTF_EMPTY;
AssertionIndex index = optAddAssertion(&dsc);
optCreateComplementaryAssertion(index, nullptr, nullptr);
return index;
@@ -2001,7 +2006,7 @@ AssertionInfo Compiler::optAssertionGenJtrue(GenTree* tree)
dsc.op1.bnd.vnLen = op1VN;
dsc.op2.vn = vnStore->VNConservativeNormalValue(op2->gtVNPair);
dsc.op2.kind = O2K_CONST_INT;
- dsc.op2.u1.iconFlags = 0;
+ dsc.op2.u1.iconFlags = GTF_EMPTY;
dsc.op2.u1.iconVal = 0;
// when con is not zero, create an assertion on the arr.Length == con edge
@@ -2400,8 +2405,8 @@ AssertionIndex Compiler::optAssertionIsSubtype(GenTree* tree, GenTree* methodTab
continue;
}
- ssize_t methodTableVal = 0;
- unsigned iconFlags = 0;
+ ssize_t methodTableVal = 0;
+ GenTreeFlags iconFlags = GTF_EMPTY;
if (!optIsTreeKnownIntValue(!optLocalAssertionProp, methodTableArg, &methodTableVal, &iconFlags))
{
continue;
@@ -2765,7 +2770,8 @@ GenTree* Compiler::optConstantAssertionProp(AssertionDsc* curAssertion,
if (varTypeIsIntegral(newTree->TypeGet()))
{
#ifdef TARGET_64BIT
- var_types newType = (var_types)((curAssertion->op2.u1.iconFlags & 1) ? TYP_LONG : TYP_INT);
+ var_types newType =
+ (var_types)((curAssertion->op2.u1.iconFlags & GTF_ASSERTION_PROP_LONG) ? TYP_LONG : TYP_INT);
if (newTree->TypeGet() != newType)
{
noway_assert(newTree->gtType != TYP_REF);
diff --git a/src/coreclr/jit/block.cpp b/src/coreclr/jit/block.cpp
index 1c132abbac6..5266bed7507 100644
--- a/src/coreclr/jit/block.cpp
+++ b/src/coreclr/jit/block.cpp
@@ -762,7 +762,7 @@ bool BasicBlock::CloneBlockState(
for (Statement* fromStmt : from->Statements())
{
- auto newExpr = compiler->gtCloneExpr(fromStmt->GetRootNode(), 0, varNum, varVal);
+ auto newExpr = compiler->gtCloneExpr(fromStmt->GetRootNode(), GTF_EMPTY, varNum, varVal);
if (!newExpr)
{
// gtCloneExpr doesn't handle all opcodes, so may fail to clone a statement.
diff --git a/src/coreclr/jit/block.h b/src/coreclr/jit/block.h
index 6f267e0a2eb..dfc3d53fc25 100644
--- a/src/coreclr/jit/block.h
+++ b/src/coreclr/jit/block.h
@@ -357,74 +357,49 @@ public:
};
//------------------------------------------------------------------------
-// BasicBlock: describes a basic block in the flowgraph.
+// BasicBlockFlags: a bitmask of flags for BasicBlock
//
-// Note that this type derives from LIR::Range in order to make the LIR
-// utilities that are polymorphic over basic block and scratch ranges
-// faster and simpler.
-//
-struct BasicBlock : private LIR::Range
+// clang-format off
+enum BasicBlockFlags : unsigned __int64
{
- friend class LIR;
-
- BasicBlock* bbNext; // next BB in ascending PC offset order
- BasicBlock* bbPrev;
-
- void setNext(BasicBlock* next)
- {
- bbNext = next;
- if (next)
- {
- next->bbPrev = this;
- }
- }
-
- unsigned __int64 bbFlags; // see BBF_xxxx below
-
- unsigned bbNum; // the block's number
-
- unsigned bbRefs; // number of blocks that can reach here, either by fall-through or a branch. If this falls to zero,
- // the block is unreachable.
-
#define MAKE_BBFLAG(bit) (1ULL << (bit))
-
-// clang-format off
-
-#define BBF_VISITED MAKE_BBFLAG( 0) // BB visited during optimizations
-#define BBF_MARKED MAKE_BBFLAG( 1) // BB marked during optimizations
-#define BBF_CHANGED MAKE_BBFLAG( 2) // input/output of this block has changed
-#define BBF_REMOVED MAKE_BBFLAG( 3) // BB has been removed from bb-list
-
-#define BBF_DONT_REMOVE MAKE_BBFLAG( 4) // BB should not be removed during flow graph optimizations
-#define BBF_IMPORTED MAKE_BBFLAG( 5) // BB byte-code has been imported
-#define BBF_INTERNAL MAKE_BBFLAG( 6) // BB has been added by the compiler
-#define BBF_FAILED_VERIFICATION MAKE_BBFLAG( 7) // BB has verification exception
-
-#define BBF_TRY_BEG MAKE_BBFLAG( 8) // BB starts a 'try' block
-#define BBF_FUNCLET_BEG MAKE_BBFLAG( 9) // BB is the beginning of a funclet
-#define BBF_HAS_NULLCHECK MAKE_BBFLAG(10) // BB contains a null check
-#define BBF_HAS_SUPPRESSGC_CALL MAKE_BBFLAG(11) // BB contains a call to a method with SuppressGCTransitionAttribute
-
-#define BBF_RUN_RARELY MAKE_BBFLAG(12) // BB is rarely run (catch clauses, blocks with throws etc)
-#define BBF_LOOP_HEAD MAKE_BBFLAG(13) // BB is the head of a loop
-#define BBF_LOOP_CALL0 MAKE_BBFLAG(14) // BB starts a loop that sometimes won't call
-#define BBF_LOOP_CALL1 MAKE_BBFLAG(15) // BB starts a loop that will always call
-
-#define BBF_HAS_LABEL MAKE_BBFLAG(16) // BB needs a label
-// Unused MAKE_BBFLAG(17)
-#define BBF_HAS_JMP MAKE_BBFLAG(18) // BB executes a JMP instruction (instead of return)
-#define BBF_GC_SAFE_POINT MAKE_BBFLAG(19) // BB has a GC safe point (a call). More abstractly, BB does not require a
+ BBF_EMPTY = 0,
+
+ BBF_VISITED = MAKE_BBFLAG( 0), // BB visited during optimizations
+ BBF_MARKED = MAKE_BBFLAG( 1), // BB marked during optimizations
+ BBF_CHANGED = MAKE_BBFLAG( 2), // input/output of this block has changed
+ BBF_REMOVED = MAKE_BBFLAG( 3), // BB has been removed from bb-list
+
+ BBF_DONT_REMOVE = MAKE_BBFLAG( 4), // BB should not be removed during flow graph optimizations
+ BBF_IMPORTED = MAKE_BBFLAG( 5), // BB byte-code has been imported
+ BBF_INTERNAL = MAKE_BBFLAG( 6), // BB has been added by the compiler
+ BBF_FAILED_VERIFICATION = MAKE_BBFLAG( 7), // BB has verification exception
+
+ BBF_TRY_BEG = MAKE_BBFLAG( 8), // BB starts a 'try' block
+ BBF_FUNCLET_BEG = MAKE_BBFLAG( 9), // BB is the beginning of a funclet
+ BBF_HAS_NULLCHECK = MAKE_BBFLAG(10), // BB contains a null check
+ BBF_HAS_SUPPRESSGC_CALL = MAKE_BBFLAG(11), // BB contains a call to a method with SuppressGCTransitionAttribute
+
+ BBF_RUN_RARELY = MAKE_BBFLAG(12), // BB is rarely run (catch clauses, blocks with throws etc)
+ BBF_LOOP_HEAD = MAKE_BBFLAG(13), // BB is the head of a loop
+ BBF_LOOP_CALL0 = MAKE_BBFLAG(14), // BB starts a loop that sometimes won't call
+ BBF_LOOP_CALL1 = MAKE_BBFLAG(15), // BB starts a loop that will always call
+
+ BBF_HAS_LABEL = MAKE_BBFLAG(16), // BB needs a label
+ BBF_LOOP_ALIGN = MAKE_BBFLAG(17), // Block is lexically the first block in a loop we intend to align.
+ BBF_HAS_JMP = MAKE_BBFLAG(18), // BB executes a JMP instruction (instead of return)
+ BBF_GC_SAFE_POINT = MAKE_BBFLAG(19), // BB has a GC safe point (a call). More abstractly, BB does not require a
// (further) poll -- this may be because this BB has a call, or, in some
// cases, because the BB occurs in a loop, and we've determined that all
// paths in the loop body leading to BB include a call.
-#define BBF_HAS_IDX_LEN MAKE_BBFLAG(20) // BB contains simple index or length expressions on an array local var.
-#define BBF_HAS_NEWARRAY MAKE_BBFLAG(21) // BB contains 'new' of an array
-#define BBF_HAS_NEWOBJ MAKE_BBFLAG(22) // BB contains 'new' of an object type.
+ BBF_HAS_IDX_LEN = MAKE_BBFLAG(20), // BB contains simple index or length expressions on an array local var.
+ BBF_HAS_NEWARRAY = MAKE_BBFLAG(21), // BB contains 'new' of an array
+ BBF_HAS_NEWOBJ = MAKE_BBFLAG(22), // BB contains 'new' of an object type.
#if defined(FEATURE_EH_FUNCLETS) && defined(TARGET_ARM)
-#define BBF_FINALLY_TARGET MAKE_BBFLAG(23) // BB is the target of a finally return: where a finally will return during
+ BBF_FINALLY_TARGET = MAKE_BBFLAG(23), // BB is the target of a finally return: where a finally will return during
// non-exceptional flow. Because the ARM calling sequence for calling a
// finally explicitly sets the return address to the finally target and jumps
// to the finally, instead of using a call instruction, ARM needs this to
@@ -433,82 +408,135 @@ struct BasicBlock : private LIR::Range
#endif // defined(FEATURE_EH_FUNCLETS) && defined(TARGET_ARM)
-#define BBF_BACKWARD_JUMP MAKE_BBFLAG(24) // BB is surrounded by a backward jump/switch arc
-#define BBF_RETLESS_CALL MAKE_BBFLAG(25) // BBJ_CALLFINALLY that will never return (and therefore, won't need a paired
+ BBF_BACKWARD_JUMP = MAKE_BBFLAG(24), // BB is surrounded by a backward jump/switch arc
+ BBF_RETLESS_CALL = MAKE_BBFLAG(25), // BBJ_CALLFINALLY that will never return (and therefore, won't need a paired
// BBJ_ALWAYS); see isBBCallAlwaysPair().
-#define BBF_LOOP_PREHEADER MAKE_BBFLAG(26) // BB is a loop preheader block
-#define BBF_COLD MAKE_BBFLAG(27) // BB is cold
+ BBF_LOOP_PREHEADER = MAKE_BBFLAG(26), // BB is a loop preheader block
+ BBF_COLD = MAKE_BBFLAG(27), // BB is cold
-#define BBF_PROF_WEIGHT MAKE_BBFLAG(28) // BB weight is computed from profile data
-#define BBF_IS_LIR MAKE_BBFLAG(29) // Set if the basic block contains LIR (as opposed to HIR)
-#define BBF_KEEP_BBJ_ALWAYS MAKE_BBFLAG(30) // A special BBJ_ALWAYS block, used by EH code generation. Keep the jump kind
+ BBF_PROF_WEIGHT = MAKE_BBFLAG(28), // BB weight is computed from profile data
+ BBF_IS_LIR = MAKE_BBFLAG(29), // Set if the basic block contains LIR (as opposed to HIR)
+ BBF_KEEP_BBJ_ALWAYS = MAKE_BBFLAG(30), // A special BBJ_ALWAYS block, used by EH code generation. Keep the jump kind
// as BBJ_ALWAYS. Used for the paired BBJ_ALWAYS block following the
// BBJ_CALLFINALLY block, as well as, on x86, the final step block out of a
// finally.
-#define BBF_CLONED_FINALLY_BEGIN MAKE_BBFLAG(31) // First block of a cloned finally region
+ BBF_CLONED_FINALLY_BEGIN = MAKE_BBFLAG(31), // First block of a cloned finally region
-#define BBF_CLONED_FINALLY_END MAKE_BBFLAG(32) // Last block of a cloned finally region
-#define BBF_HAS_CALL MAKE_BBFLAG(33) // BB contains a call
-#define BBF_DOMINATED_BY_EXCEPTIONAL_ENTRY MAKE_BBFLAG(34) // Block is dominated by exceptional entry.
-#define BBF_BACKWARD_JUMP_TARGET MAKE_BBFLAG(35) // Block is a target of a backward jump
+ BBF_CLONED_FINALLY_END = MAKE_BBFLAG(32), // Last block of a cloned finally region
+ BBF_HAS_CALL = MAKE_BBFLAG(33), // BB contains a call
+ BBF_DOMINATED_BY_EXCEPTIONAL_ENTRY = MAKE_BBFLAG(34), // Block is dominated by exceptional entry.
+ BBF_BACKWARD_JUMP_TARGET = MAKE_BBFLAG(35), // Block is a target of a backward jump
-#define BBF_PATCHPOINT MAKE_BBFLAG(36) // Block is a patchpoint
-#define BBF_HAS_CLASS_PROFILE MAKE_BBFLAG(37) // BB contains a call needing a class profile
-#define BBF_LOOP_ALIGN MAKE_BBFLAG(39) // Block is lexically the first block in a loop we intend to align.
+ BBF_PATCHPOINT = MAKE_BBFLAG(36), // Block is a patchpoint
+ BBF_HAS_CLASS_PROFILE = MAKE_BBFLAG(37), // BB contains a call needing a class profile
-// clang-format on
+ // The following are sets of flags.
-// Flags that relate blocks to loop structure.
+ // Flags that relate blocks to loop structure.
-#define BBF_LOOP_FLAGS (BBF_LOOP_PREHEADER | BBF_LOOP_HEAD | BBF_LOOP_CALL0 | BBF_LOOP_CALL1)
+ BBF_LOOP_FLAGS = BBF_LOOP_PREHEADER | BBF_LOOP_HEAD | BBF_LOOP_CALL0 | BBF_LOOP_CALL1,
- bool isRunRarely() const
- {
- return ((bbFlags & BBF_RUN_RARELY) != 0);
- }
- bool isLoopHead() const
- {
- return ((bbFlags & BBF_LOOP_HEAD) != 0);
- }
- bool isLoopAlign() const
- {
- return ((bbFlags & BBF_LOOP_ALIGN) != 0);
- }
+ // Flags to update when two blocks are compacted
-// Flags to update when two blocks are compacted
+ BBF_COMPACT_UPD = BBF_CHANGED | BBF_GC_SAFE_POINT | BBF_HAS_JMP | BBF_HAS_IDX_LEN | BBF_BACKWARD_JUMP | BBF_HAS_NEWARRAY | \
+ BBF_HAS_NEWOBJ | BBF_HAS_NULLCHECK,
-#define BBF_COMPACT_UPD \
- (BBF_CHANGED | BBF_GC_SAFE_POINT | BBF_HAS_JMP | BBF_HAS_IDX_LEN | BBF_BACKWARD_JUMP | BBF_HAS_NEWARRAY | \
- BBF_HAS_NEWOBJ | BBF_HAS_NULLCHECK)
+ // Flags a block should not have had before it is split.
-// Flags a block should not have had before it is split.
+ BBF_SPLIT_NONEXIST = BBF_CHANGED | BBF_LOOP_HEAD | BBF_LOOP_CALL0 | BBF_LOOP_CALL1 | BBF_RETLESS_CALL | BBF_LOOP_PREHEADER | BBF_COLD,
-#define BBF_SPLIT_NONEXIST \
- (BBF_CHANGED | BBF_LOOP_HEAD | BBF_LOOP_CALL0 | BBF_LOOP_CALL1 | BBF_RETLESS_CALL | BBF_LOOP_PREHEADER | BBF_COLD)
+ // Flags lost by the top block when a block is split.
+ // Note, this is a conservative guess.
+ // For example, the top block might or might not have BBF_GC_SAFE_POINT,
+ // but we assume it does not have BBF_GC_SAFE_POINT any more.
-// Flags lost by the top block when a block is split.
-// Note, this is a conservative guess.
-// For example, the top block might or might not have BBF_GC_SAFE_POINT,
-// but we assume it does not have BBF_GC_SAFE_POINT any more.
+ BBF_SPLIT_LOST = BBF_GC_SAFE_POINT | BBF_HAS_JMP | BBF_KEEP_BBJ_ALWAYS | BBF_CLONED_FINALLY_END,
-#define BBF_SPLIT_LOST (BBF_GC_SAFE_POINT | BBF_HAS_JMP | BBF_KEEP_BBJ_ALWAYS | BBF_CLONED_FINALLY_END)
+ // Flags gained by the bottom block when a block is split.
+ // Note, this is a conservative guess.
+ // For example, the bottom block might or might not have BBF_HAS_NEWARRAY or BBF_HAS_NULLCHECK,
+ // but we assume it has BBF_HAS_NEWARRAY and BBF_HAS_NULLCHECK.
+ // TODO: Should BBF_RUN_RARELY be added to BBF_SPLIT_GAINED ?
-// Flags gained by the bottom block when a block is split.
-// Note, this is a conservative guess.
-// For example, the bottom block might or might not have BBF_HAS_NEWARRAY or BBF_HAS_NULLCHECK,
-// but we assume it has BBF_HAS_NEWARRAY and BBF_HAS_NULLCHECK.
+ BBF_SPLIT_GAINED = BBF_DONT_REMOVE | BBF_HAS_JMP | BBF_BACKWARD_JUMP | BBF_HAS_IDX_LEN | BBF_HAS_NEWARRAY | BBF_PROF_WEIGHT | \
+ BBF_HAS_NEWOBJ | BBF_KEEP_BBJ_ALWAYS | BBF_CLONED_FINALLY_END | BBF_HAS_NULLCHECK | BBF_HAS_CLASS_PROFILE,
+};
-// TODO: Should BBF_RUN_RARELY be added to BBF_SPLIT_GAINED ?
+inline constexpr BasicBlockFlags operator ~(BasicBlockFlags a)
+{
+ return (BasicBlockFlags)(~(unsigned __int64)a);
+}
-#define BBF_SPLIT_GAINED \
- (BBF_DONT_REMOVE | BBF_HAS_JMP | BBF_BACKWARD_JUMP | BBF_HAS_IDX_LEN | BBF_HAS_NEWARRAY | BBF_PROF_WEIGHT | \
- BBF_HAS_NEWOBJ | BBF_KEEP_BBJ_ALWAYS | BBF_CLONED_FINALLY_END | BBF_HAS_NULLCHECK | BBF_HAS_CLASS_PROFILE)
+inline constexpr BasicBlockFlags operator |(BasicBlockFlags a, BasicBlockFlags b)
+{
+ return (BasicBlockFlags)((unsigned __int64)a | (unsigned __int64)b);
+}
+
+inline constexpr BasicBlockFlags operator &(BasicBlockFlags a, BasicBlockFlags b)
+{
+ return (BasicBlockFlags)((unsigned __int64)a & (unsigned __int64)b);
+}
+
+inline BasicBlockFlags& operator |=(BasicBlockFlags& a, BasicBlockFlags b)
+{
+ return a = (BasicBlockFlags)((unsigned __int64)a | (unsigned __int64)b);
+}
+
+inline BasicBlockFlags& operator &=(BasicBlockFlags& a, BasicBlockFlags b)
+{
+ return a = (BasicBlockFlags)((unsigned __int64)a & (unsigned __int64)b);
+}
+
+// clang-format on
+
+//------------------------------------------------------------------------
+// BasicBlock: describes a basic block in the flowgraph.
+//
+// Note that this type derives from LIR::Range in order to make the LIR
+// utilities that are polymorphic over basic block and scratch ranges
+// faster and simpler.
+//
+struct BasicBlock : private LIR::Range
+{
+ friend class LIR;
+
+ BasicBlock* bbNext; // next BB in ascending PC offset order
+ BasicBlock* bbPrev;
+
+ void setNext(BasicBlock* next)
+ {
+ bbNext = next;
+ if (next)
+ {
+ next->bbPrev = this;
+ }
+ }
+
+ BasicBlockFlags bbFlags;
#ifndef __GNUC__ // GCC doesn't like C_ASSERT at global scope
static_assert_no_msg((BBF_SPLIT_NONEXIST & BBF_SPLIT_LOST) == 0);
static_assert_no_msg((BBF_SPLIT_NONEXIST & BBF_SPLIT_GAINED) == 0);
#endif
+ unsigned bbNum; // the block's number
+
+ unsigned bbRefs; // number of blocks that can reach here, either by fall-through or a branch. If this falls to zero,
+ // the block is unreachable.
+
+ bool isRunRarely() const
+ {
+ return ((bbFlags & BBF_RUN_RARELY) != 0);
+ }
+ bool isLoopHead() const
+ {
+ return ((bbFlags & BBF_LOOP_HEAD) != 0);
+ }
+ bool isLoopAlign() const
+ {
+ return ((bbFlags & BBF_LOOP_ALIGN) != 0);
+ }
+
#ifdef DEBUG
void dspFlags(); // Print the flags
unsigned dspCheapPreds(); // Print the predecessors (bbCheapPreds)
@@ -516,11 +544,10 @@ struct BasicBlock : private LIR::Range
unsigned dspSuccs(Compiler* compiler); // Print the successors. The 'compiler' argument determines whether EH
// regions are printed: see NumSucc() for details.
void dspJumpKind(); // Print the block jump kind (e.g., BBJ_NONE, BBJ_COND, etc.).
- void dspBlockHeader(Compiler* compiler,
- bool showKind = true,
- bool showFlags = false,
- bool showPreds = true); // Print a simple basic block header for various output, including a
- // list of predecessors and successors.
+
+ // Print a simple basic block header for various output, including a list of predecessors and successors.
+ void dspBlockHeader(Compiler* compiler, bool showKind = true, bool showFlags = false, bool showPreds = true);
+
const char* dspToString(int blockNumPadding = 0);
#endif // DEBUG
diff --git a/src/coreclr/jit/codegenlinear.cpp b/src/coreclr/jit/codegenlinear.cpp
index da9214f2042..609fa7a22b6 100644
--- a/src/coreclr/jit/codegenlinear.cpp
+++ b/src/coreclr/jit/codegenlinear.cpp
@@ -1109,7 +1109,7 @@ void CodeGen::genUnspillRegIfNeeded(GenTree* tree, unsigned multiRegIndex)
{
return;
}
- unsigned spillFlags = unspillTree->GetRegSpillFlagByIdx(multiRegIndex);
+ GenTreeFlags spillFlags = unspillTree->GetRegSpillFlagByIdx(multiRegIndex);
if ((spillFlags & GTF_SPILLED) == 0)
{
return;
@@ -1231,7 +1231,7 @@ void CodeGen::genUnspillRegIfNeeded(GenTree* tree)
for (unsigned i = 0; i < regCount; ++i)
{
- unsigned spillFlags = lclNode->GetRegSpillFlagByIdx(i);
+ GenTreeFlags spillFlags = lclNode->GetRegSpillFlagByIdx(i);
if ((spillFlags & GTF_SPILLED) != 0)
{
regNumber reg = lclNode->GetRegNumByIdx(i);
@@ -2111,7 +2111,7 @@ void CodeGen::genProduceReg(GenTree* tree)
for (unsigned i = 0; i < regCount; ++i)
{
- unsigned flags = lclNode->GetRegSpillFlagByIdx(i);
+ GenTreeFlags flags = lclNode->GetRegSpillFlagByIdx(i);
if ((flags & GTF_SPILL) != 0)
{
const regNumber reg = lclNode->GetRegNumByIdx(i);
@@ -2135,7 +2135,7 @@ void CodeGen::genProduceReg(GenTree* tree)
for (unsigned i = 0; i < regCount; ++i)
{
- unsigned flags = call->GetRegSpillFlagByIdx(i);
+ GenTreeFlags flags = call->GetRegSpillFlagByIdx(i);
if ((flags & GTF_SPILL) != 0)
{
regNumber reg = call->GetRegNumByIdx(i);
@@ -2152,7 +2152,7 @@ void CodeGen::genProduceReg(GenTree* tree)
for (unsigned i = 0; i < regCount; ++i)
{
- unsigned flags = argSplit->GetRegSpillFlagByIdx(i);
+ GenTreeFlags flags = argSplit->GetRegSpillFlagByIdx(i);
if ((flags & GTF_SPILL) != 0)
{
regNumber reg = argSplit->GetRegNumByIdx(i);
@@ -2169,7 +2169,7 @@ void CodeGen::genProduceReg(GenTree* tree)
for (unsigned i = 0; i < regCount; ++i)
{
- unsigned flags = multiReg->GetRegSpillFlagByIdx(i);
+ GenTreeFlags flags = multiReg->GetRegSpillFlagByIdx(i);
if ((flags & GTF_SPILL) != 0)
{
regNumber reg = multiReg->GetRegNumByIdx(i);
diff --git a/src/coreclr/jit/compiler.h b/src/coreclr/jit/compiler.h
index 5c2b9033b4a..fd955ef4a41 100644
--- a/src/coreclr/jit/compiler.h
+++ b/src/coreclr/jit/compiler.h
@@ -2230,6 +2230,60 @@ typedef JitHashTable<GenTree*, JitPtrKeyFuncs<GenTree>, TestLabelAndNum> NodeToT
// XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
#endif // DEBUG
+//-------------------------------------------------------------------------
+// LoopFlags: flags for the loop table.
+//
+enum LoopFlags : unsigned short
+{
+ LPFLG_EMPTY = 0,
+
+ LPFLG_DO_WHILE = 0x0001, // it's a do-while loop (i.e ENTRY is at the TOP)
+ LPFLG_ONE_EXIT = 0x0002, // the loop has only one exit
+ LPFLG_ITER = 0x0004, // loop of form: for (i = icon or lclVar; test_condition(); i++)
+ LPFLG_HOISTABLE = 0x0008, // the loop is in a form that is suitable for hoisting expressions
+
+ LPFLG_CONST = 0x0010, // loop of form: for (i=icon;i<icon;i++){ ... } - constant loop
+ LPFLG_VAR_INIT = 0x0020, // iterator is initialized with a local var (var # found in lpVarInit)
+ LPFLG_CONST_INIT = 0x0040, // iterator is initialized with a constant (found in lpConstInit)
+ LPFLG_SIMD_LIMIT = 0x0080, // iterator is compared with vector element count (found in lpConstLimit)
+
+ LPFLG_VAR_LIMIT = 0x0100, // iterator is compared with a local var (var # found in lpVarLimit)
+ LPFLG_CONST_LIMIT = 0x0200, // iterator is compared with a constant (found in lpConstLimit)
+ LPFLG_ARRLEN_LIMIT = 0x0400, // iterator is compared with a.len or a[i].len (found in lpArrLenLimit)
+ LPFLG_HAS_PREHEAD = 0x0800, // lpHead is known to be a preHead for this loop
+
+ LPFLG_REMOVED = 0x1000, // has been removed from the loop table (unrolled or optimized away)
+ LPFLG_DONT_UNROLL = 0x2000, // do not unroll this loop
+ LPFLG_ASGVARS_YES = 0x4000, // "lpAsgVars" has been computed
+ LPFLG_ASGVARS_INC = 0x8000, // "lpAsgVars" is incomplete -- vars beyond those representable in an AllVarSet
+ // type are assigned to.
+};
+
+inline constexpr LoopFlags operator~(LoopFlags a)
+{
+ return (LoopFlags)(~(unsigned short)a);
+}
+
+inline constexpr LoopFlags operator|(LoopFlags a, LoopFlags b)
+{
+ return (LoopFlags)((unsigned short)a | (unsigned short)b);
+}
+
+inline constexpr LoopFlags operator&(LoopFlags a, LoopFlags b)
+{
+ return (LoopFlags)((unsigned short)a & (unsigned short)b);
+}
+
+inline LoopFlags& operator|=(LoopFlags& a, LoopFlags b)
+{
+ return a = (LoopFlags)((unsigned short)a | (unsigned short)b);
+}
+
+inline LoopFlags& operator&=(LoopFlags& a, LoopFlags b)
+{
+ return a = (LoopFlags)((unsigned short)a & (unsigned short)b);
+}
+
/*
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
@@ -2724,13 +2778,13 @@ public:
GenTree* gtNewJmpTableNode();
- GenTree* gtNewIndOfIconHandleNode(var_types indType, size_t value, unsigned iconFlags, bool isInvariant);
+ GenTree* gtNewIndOfIconHandleNode(var_types indType, size_t value, GenTreeFlags iconFlags, bool isInvariant);
- GenTree* gtNewIconHandleNode(size_t value, unsigned flags, FieldSeqNode* fields = nullptr);
+ GenTree* gtNewIconHandleNode(size_t value, GenTreeFlags flags, FieldSeqNode* fields = nullptr);
- unsigned gtTokenToIconFlags(unsigned token);
+ GenTreeFlags gtTokenToIconFlags(unsigned token);
- GenTree* gtNewIconEmbHndNode(void* value, void* pValue, unsigned flags, void* compileTimeHandle);
+ GenTree* gtNewIconEmbHndNode(void* value, void* pValue, GenTreeFlags flags, void* compileTimeHandle);
GenTree* gtNewIconEmbScpHndNode(CORINFO_MODULE_HANDLE scpHnd);
GenTree* gtNewIconEmbClsHndNode(CORINFO_CLASS_HANDLE clsHnd);
@@ -2913,7 +2967,7 @@ public:
GenTree* gtNewMustThrowException(unsigned helper, var_types type, CORINFO_CLASS_HANDLE clsHnd);
GenTreeLclFld* gtNewLclFldNode(unsigned lnum, var_types type, unsigned offset);
- GenTree* gtNewInlineCandidateReturnExpr(GenTree* inlineCandidate, var_types type, unsigned __int64 bbFlags);
+ GenTree* gtNewInlineCandidateReturnExpr(GenTree* inlineCandidate, var_types type, BasicBlockFlags bbFlags);
GenTree* gtNewFieldRef(var_types typ, CORINFO_FIELD_HANDLE fldHnd, GenTree* obj = nullptr, DWORD offset = 0);
@@ -2981,11 +3035,14 @@ public:
// create a copy of `tree`, adding specified flags, replacing uses of lclVar `deepVarNum` with
// IntCnses with value `deepVarVal`.
GenTree* gtCloneExpr(
- GenTree* tree, unsigned addFlags, unsigned varNum, int varVal, unsigned deepVarNum, int deepVarVal);
+ GenTree* tree, GenTreeFlags addFlags, unsigned varNum, int varVal, unsigned deepVarNum, int deepVarVal);
// Create a copy of `tree`, optionally adding specifed flags, and optionally mapping uses of local
// `varNum` to int constants with value `varVal`.
- GenTree* gtCloneExpr(GenTree* tree, unsigned addFlags = 0, unsigned varNum = BAD_VAR_NUM, int varVal = 0)
+ GenTree* gtCloneExpr(GenTree* tree,
+ GenTreeFlags addFlags = GTF_EMPTY,
+ unsigned varNum = BAD_VAR_NUM,
+ int varVal = 0)
{
return gtCloneExpr(tree, addFlags, varNum, varVal, varNum, varVal);
}
@@ -2998,7 +3055,7 @@ public:
// Internal helper for cloning a call
GenTreeCall* gtCloneExprCallHelper(GenTreeCall* call,
- unsigned addFlags = 0,
+ GenTreeFlags addFlags = GTF_EMPTY,
unsigned deepVarNum = BAD_VAR_NUM,
int deepVarVal = 0);
@@ -4174,7 +4231,7 @@ public:
GenTree* impLookupToTree(CORINFO_RESOLVED_TOKEN* pResolvedToken,
CORINFO_LOOKUP* pLookup,
- unsigned flags,
+ GenTreeFlags flags,
void* compileTimeHandle);
GenTree* getRuntimeContextTree(CORINFO_RUNTIME_LOOKUP_KIND kind);
@@ -4183,7 +4240,7 @@ public:
CORINFO_LOOKUP* pLookup,
void* compileTimeHandle);
- GenTree* impReadyToRunLookupToTree(CORINFO_CONST_LOOKUP* pLookup, unsigned flags, void* compileTimeHandle);
+ GenTree* impReadyToRunLookupToTree(CORINFO_CONST_LOOKUP* pLookup, GenTreeFlags flags, void* compileTimeHandle);
GenTreeCall* impReadyToRunHelperToTree(CORINFO_RESOLVED_TOKEN* pResolvedToken,
CorInfoHelpFunc helper,
@@ -5542,8 +5599,8 @@ public:
void fgDebugCheckLoopTable();
void fgDebugCheckFlags(GenTree* tree);
- void fgDebugCheckDispFlags(GenTree* tree, unsigned dispFlags, unsigned debugFlags);
- void fgDebugCheckFlagsHelper(GenTree* tree, unsigned treeFlags, unsigned chkFlags);
+ void fgDebugCheckDispFlags(GenTree* tree, GenTreeFlags dispFlags, GenTreeDebugFlags debugFlags);
+ void fgDebugCheckFlagsHelper(GenTree* tree, GenTreeFlags treeFlags, GenTreeFlags chkFlags);
void fgDebugCheckTryFinallyExits();
void fgDebugCheckProfileData();
bool fgDebugCheckIncomingProfileData(BasicBlock* block);
@@ -5878,7 +5935,7 @@ private:
CORINFO_METHOD_HANDLE dispatcherHnd);
GenTree* getLookupTree(CORINFO_RESOLVED_TOKEN* pResolvedToken,
CORINFO_LOOKUP* pLookup,
- unsigned handleFlags,
+ GenTreeFlags handleFlags,
void* compileTimeHandle);
GenTree* getRuntimeLookupTree(CORINFO_RESOLVED_TOKEN* pResolvedToken,
CORINFO_LOOKUP* pLookup,
@@ -6270,7 +6327,7 @@ public:
ALLVARSET_TP lpAsgVars; // set of vars assigned within the loop (all vars, not just tracked)
varRefKinds lpAsgInds : 8; // set of inds modified within the loop
- unsigned short lpFlags; // Mask of the LPFLG_* constants
+ LoopFlags lpFlags;
unsigned char lpExitCnt; // number of exits from the loop
@@ -6283,29 +6340,6 @@ public:
// or else BasicBlock::NOT_IN_LOOP. One can enumerate all the children of a loop
// by following "lpChild" then "lpSibling" links.
-#define LPFLG_DO_WHILE 0x0001 // it's a do-while loop (i.e ENTRY is at the TOP)
-#define LPFLG_ONE_EXIT 0x0002 // the loop has only one exit
-
-#define LPFLG_ITER 0x0004 // for (i = icon or lclVar; test_condition(); i++)
-#define LPFLG_HOISTABLE 0x0008 // the loop is in a form that is suitable for hoisting expressions
-#define LPFLG_CONST 0x0010 // for (i=icon;i<icon;i++){ ... } - constant loop
-
-#define LPFLG_VAR_INIT 0x0020 // iterator is initialized with a local var (var # found in lpVarInit)
-#define LPFLG_CONST_INIT 0x0040 // iterator is initialized with a constant (found in lpConstInit)
-
-#define LPFLG_VAR_LIMIT 0x0100 // iterator is compared with a local var (var # found in lpVarLimit)
-#define LPFLG_CONST_LIMIT 0x0200 // iterator is compared with a constant (found in lpConstLimit)
-#define LPFLG_ARRLEN_LIMIT 0x0400 // iterator is compared with a.len or a[i].len (found in lpArrLenLimit)
-#define LPFLG_SIMD_LIMIT 0x0080 // iterator is compared with vector element count (found in lpConstLimit)
-
-#define LPFLG_HAS_PREHEAD 0x0800 // lpHead is known to be a preHead for this loop
-#define LPFLG_REMOVED 0x1000 // has been removed from the loop table (unrolled or optimized away)
-#define LPFLG_DONT_UNROLL 0x2000 // do not unroll this loop
-
-#define LPFLG_ASGVARS_YES 0x4000 // "lpAsgVars" has been computed
-#define LPFLG_ASGVARS_INC 0x8000 // "lpAsgVars" is incomplete -- vars beyond those representable in an AllVarSet
- // type are assigned to.
-
bool lpLoopHasMemoryHavoc[MemoryKindCount]; // The loop contains an operation that we assume has arbitrary
// memory side effects. If this is set, the fields below
// may not be accurate (since they become irrelevant.)
@@ -6354,9 +6388,10 @@ public:
var_types lpIterOperType() const; // For overflow instructions
union {
- int lpConstInit; // initial constant value of iterator : Valid if LPFLG_CONST_INIT
- unsigned lpVarInit; // initial local var number to which we initialize the iterator : Valid if
- // LPFLG_VAR_INIT
+ int lpConstInit; // initial constant value of iterator
+ // : Valid if LPFLG_CONST_INIT
+ unsigned lpVarInit; // initial local var number to which we initialize the iterator
+ // : Valid if LPFLG_VAR_INIT
};
// The following is for LPFLG_ITER loops only (i.e. the loop condition is "i RELOP const or var"
@@ -7084,9 +7119,9 @@ public:
ValueNum vn;
struct IntVal
{
- ssize_t iconVal; // integer
- unsigned padding; // unused; ensures iconFlags does not overlap lconVal
- unsigned iconFlags; // gtFlags
+ ssize_t iconVal; // integer
+ unsigned padding; // unused; ensures iconFlags does not overlap lconVal
+ GenTreeFlags iconFlags; // gtFlags
};
struct Range // integer subrange
{
@@ -7318,7 +7353,7 @@ public:
// Assertion prop data flow functions.
void optAssertionPropMain();
Statement* optVNAssertionPropCurStmt(BasicBlock* block, Statement* stmt);
- bool optIsTreeKnownIntValue(bool vnBased, GenTree* tree, ssize_t* pConstant, unsigned* pIconFlags);
+ bool optIsTreeKnownIntValue(bool vnBased, GenTree* tree, ssize_t* pConstant, GenTreeFlags* pIconFlags);
ASSERT_TP* optInitAssertionDataflowFlags();
ASSERT_TP* optComputeAssertionGen();
diff --git a/src/coreclr/jit/compiler.hpp b/src/coreclr/jit/compiler.hpp
index 444214e5203..8b0f3e51a18 100644
--- a/src/coreclr/jit/compiler.hpp
+++ b/src/coreclr/jit/compiler.hpp
@@ -867,10 +867,10 @@ inline GenTree::GenTree(genTreeOps oper, var_types type DEBUGARG(bool largeNode)
{
gtOper = oper;
gtType = type;
- gtFlags = 0;
+ gtFlags = GTF_EMPTY;
gtLIRFlags = 0;
#ifdef DEBUG
- gtDebugFlags = 0;
+ gtDebugFlags = GTF_DEBUG_NONE;
#endif // DEBUG
gtCSEnum = NO_CSE;
#if ASSERTION_PROP
@@ -999,7 +999,7 @@ inline GenTree* Compiler::gtNewLargeOperNode(genTreeOps oper, var_types type, Ge
* that may need to be fixed up).
*/
-inline GenTree* Compiler::gtNewIconHandleNode(size_t value, unsigned flags, FieldSeqNode* fields)
+inline GenTree* Compiler::gtNewIconHandleNode(size_t value, GenTreeFlags flags, FieldSeqNode* fields)
{
GenTree* node;
assert((flags & (GTF_ICON_HDL_MASK | GTF_ICON_FIELD_OFF)) != 0);
@@ -1092,7 +1092,7 @@ inline GenTree* Compiler::gtNewIconEmbFldHndNode(CORINFO_FIELD_HANDLE fldHnd)
inline GenTreeCall* Compiler::gtNewHelperCallNode(unsigned helper, var_types type, GenTreeCall::Use* args)
{
- unsigned flags = s_helperCallProperties.NoThrow((CorInfoHelpFunc)helper) ? 0 : GTF_EXCEPT;
+ GenTreeFlags flags = s_helperCallProperties.NoThrow((CorInfoHelpFunc)helper) ? GTF_EMPTY : GTF_EXCEPT;
GenTreeCall* result = gtNewCallNode(CT_HELPER, eeFindHelper(helper), type, args);
result->gtFlags |= flags;
@@ -1473,7 +1473,7 @@ inline void GenTree::ChangeOper(genTreeOps oper, ValueNumberUpdate vnUpdate)
{
assert(!OperIsConst(oper)); // use ChangeOperConst() instead
- unsigned mask = GTF_COMMON_MASK;
+ GenTreeFlags mask = GTF_COMMON_MASK;
if (this->OperIsIndirOrArrLength() && OperIsIndirOrArrLength(oper))
{
mask |= GTF_IND_NONFAULTING;
@@ -1510,7 +1510,7 @@ inline void GenTree::ChangeOper(genTreeOps oper, ValueNumberUpdate vnUpdate)
inline void GenTree::ChangeOperUnchecked(genTreeOps oper)
{
- unsigned mask = GTF_COMMON_MASK;
+ GenTreeFlags mask = GTF_COMMON_MASK;
if (this->OperIsIndirOrArrLength() && OperIsIndirOrArrLength(oper))
{
mask |= GTF_IND_NONFAULTING;
@@ -4648,7 +4648,7 @@ inline void DEBUG_DESTROY_NODE(GenTree* tree)
tree->gtOperSave = tree->gtOper;
tree->gtType = TYP_UNDEF;
- tree->gtFlags |= 0xFFFFFFFF & ~GTF_NODE_MASK;
+ tree->gtFlags |= ~GTF_NODE_MASK;
if (tree->OperIsSimple())
{
tree->AsOp()->gtOp1 = tree->AsOp()->gtOp2 = nullptr;
diff --git a/src/coreclr/jit/fgbasic.cpp b/src/coreclr/jit/fgbasic.cpp
index 7992d3b696a..c05a1e6ab86 100644
--- a/src/coreclr/jit/fgbasic.cpp
+++ b/src/coreclr/jit/fgbasic.cpp
@@ -1912,11 +1912,11 @@ unsigned Compiler::fgMakeBasicBlocks(const BYTE* codeAddr, IL_OFFSET codeSize, F
do
{
- unsigned jmpAddr = DUMMY_INIT(BAD_IL_OFFSET);
- unsigned bbFlags = 0;
- BBswtDesc* swtDsc = nullptr;
- unsigned nxtBBoffs;
- OPCODE opcode = (OPCODE)getU1LittleEndian(codeAddr);
+ unsigned jmpAddr = DUMMY_INIT(BAD_IL_OFFSET);
+ BasicBlockFlags bbFlags = BBF_EMPTY;
+ BBswtDesc* swtDsc = nullptr;
+ unsigned nxtBBoffs;
+ OPCODE opcode = (OPCODE)getU1LittleEndian(codeAddr);
codeAddr += sizeof(__int8);
BBjumpKinds jmpKind = BBJ_NONE;
diff --git a/src/coreclr/jit/fgdiagnostic.cpp b/src/coreclr/jit/fgdiagnostic.cpp
index 3aefdcbb434..fae0a224a75 100644
--- a/src/coreclr/jit/fgdiagnostic.cpp
+++ b/src/coreclr/jit/fgdiagnostic.cpp
@@ -2753,8 +2753,8 @@ void Compiler::fgDebugCheckFlags(GenTree* tree)
{
const genTreeOps oper = tree->OperGet();
const unsigned kind = tree->OperKind();
- unsigned treeFlags = tree->gtFlags & GTF_ALL_EFFECT;
- unsigned chkFlags = 0;
+ GenTreeFlags treeFlags = tree->gtFlags & GTF_ALL_EFFECT;
+ GenTreeFlags chkFlags = GTF_EMPTY;
if (tree->OperMayThrow(this))
{
@@ -3207,7 +3207,7 @@ void Compiler::fgDebugCheckFlags(GenTree* tree)
// ands hold GTF_IND_INVARIANT and GTF_IND_NONFLUALTING
// debugFlags - the second argument to gtDispFlags
//
-void Compiler::fgDebugCheckDispFlags(GenTree* tree, unsigned dispFlags, unsigned debugFlags)
+void Compiler::fgDebugCheckDispFlags(GenTree* tree, GenTreeFlags dispFlags, GenTreeDebugFlags debugFlags)
{
if (tree->OperGet() == GT_IND)
{
@@ -3230,7 +3230,7 @@ void Compiler::fgDebugCheckDispFlags(GenTree* tree, unsigned dispFlags, unsigned
// Note:
// Checking that all bits that are set in treeFlags are also set in chkFlags is currently disabled.
-void Compiler::fgDebugCheckFlagsHelper(GenTree* tree, unsigned treeFlags, unsigned chkFlags)
+void Compiler::fgDebugCheckFlagsHelper(GenTree* tree, GenTreeFlags treeFlags, GenTreeFlags chkFlags)
{
if (chkFlags & ~treeFlags)
{
@@ -3252,7 +3252,7 @@ void Compiler::fgDebugCheckFlagsHelper(GenTree* tree, unsigned treeFlags, unsign
{
// We can't/don't consider these flags (GTF_GLOB_REF or GTF_ORDER_SIDEEFF) as being "extra" flags
//
- unsigned flagsToCheck = ~GTF_GLOB_REF & ~GTF_ORDER_SIDEEFF;
+ GenTreeFlags flagsToCheck = ~GTF_GLOB_REF & ~GTF_ORDER_SIDEEFF;
if ((treeFlags & ~chkFlags & flagsToCheck) != 0)
{
diff --git a/src/coreclr/jit/fginline.cpp b/src/coreclr/jit/fginline.cpp
index eb899783972..06d750eb9c1 100644
--- a/src/coreclr/jit/fginline.cpp
+++ b/src/coreclr/jit/fginline.cpp
@@ -520,10 +520,10 @@ Compiler::fgWalkResult Compiler::fgUpdateInlineReturnExpressionPlaceHolder(GenTr
// This folding may uncover more GT_RET_EXPRs, so we loop around
// until we've got something distinct.
//
- unsigned __int64 bbFlags = 0;
- GenTree* inlineCandidate = tree->gtRetExprVal(&bbFlags);
- inlineCandidate = comp->gtFoldExpr(inlineCandidate);
- var_types retType = tree->TypeGet();
+ BasicBlockFlags bbFlags = BBF_EMPTY;
+ GenTree* inlineCandidate = tree->gtRetExprVal(&bbFlags);
+ inlineCandidate = comp->gtFoldExpr(inlineCandidate);
+ var_types retType = tree->TypeGet();
#ifdef DEBUG
if (comp->verbose)
@@ -1158,7 +1158,7 @@ void Compiler::fgInsertInlineeBlocks(InlineInfo* pInlineInfo)
}
// Copy inlinee bbFlags to caller bbFlags.
- const unsigned __int64 inlineeBlockFlags = InlineeCompiler->fgFirstBB->bbFlags;
+ const BasicBlockFlags inlineeBlockFlags = InlineeCompiler->fgFirstBB->bbFlags;
noway_assert((inlineeBlockFlags & BBF_HAS_JMP) == 0);
noway_assert((inlineeBlockFlags & BBF_KEEP_BBJ_ALWAYS) == 0);
@@ -1206,7 +1206,7 @@ void Compiler::fgInsertInlineeBlocks(InlineInfo* pInlineInfo)
// Update block flags
{
- const unsigned __int64 originalFlags = topBlock->bbFlags;
+ const BasicBlockFlags originalFlags = topBlock->bbFlags;
noway_assert((originalFlags & BBF_SPLIT_NONEXIST) == 0);
topBlock->bbFlags &= ~(BBF_SPLIT_LOST);
bottomBlock->bbFlags |= originalFlags & BBF_SPLIT_GAINED;
@@ -1530,9 +1530,9 @@ Statement* Compiler::fgInlinePrependStatements(InlineInfo* inlineInfo)
GenTree* argNode = inlArgInfo[argNum].argNode;
const bool argHasPutArg = argNode->OperIs(GT_PUTARG_TYPE);
- unsigned __int64 bbFlags = 0;
- argNode = argNode->gtSkipPutArgType();
- argNode = argNode->gtRetExprVal(&bbFlags);
+ BasicBlockFlags bbFlags = BBF_EMPTY;
+ argNode = argNode->gtSkipPutArgType();
+ argNode = argNode->gtRetExprVal(&bbFlags);
if (argInfo.argHasTmp)
{
diff --git a/src/coreclr/jit/fgopt.cpp b/src/coreclr/jit/fgopt.cpp
index 1d7e96fe623..dc477428645 100644
--- a/src/coreclr/jit/fgopt.cpp
+++ b/src/coreclr/jit/fgopt.cpp
@@ -858,7 +858,7 @@ void Compiler::fgComputeDoms()
bbRoot.bbNum = 0;
bbRoot.bbIDom = &bbRoot;
bbRoot.bbPostOrderNum = 0;
- bbRoot.bbFlags = 0;
+ bbRoot.bbFlags = BBF_EMPTY;
flowList flRoot(&bbRoot, nullptr);
diff --git a/src/coreclr/jit/flowgraph.cpp b/src/coreclr/jit/flowgraph.cpp
index 5c24e7725d3..a76cc93dc40 100644
--- a/src/coreclr/jit/flowgraph.cpp
+++ b/src/coreclr/jit/flowgraph.cpp
@@ -321,7 +321,7 @@ BasicBlock* Compiler::fgCreateGCPoll(GCPollType pollType, BasicBlock* block)
unsigned char lpIndex = top->bbNatLoopNum;
// Update block flags
- const unsigned __int64 originalFlags = top->bbFlags | BBF_GC_SAFE_POINT;
+ const BasicBlockFlags originalFlags = top->bbFlags | BBF_GC_SAFE_POINT;
// We are allowed to split loops and we need to keep a few other flags...
//
@@ -812,8 +812,8 @@ GenTreeLclVar* Compiler::fgIsIndirOfAddrOfLocal(GenTree* tree)
GenTreeCall* Compiler::fgGetStaticsCCtorHelper(CORINFO_CLASS_HANDLE cls, CorInfoHelpFunc helper)
{
- bool bNeedClassID = true;
- unsigned callFlags = 0;
+ bool bNeedClassID = true;
+ GenTreeFlags callFlags = GTF_EMPTY;
var_types type = TYP_BYREF;
diff --git a/src/coreclr/jit/gentree.cpp b/src/coreclr/jit/gentree.cpp
index 6548e158966..e69cadaf2db 100644
--- a/src/coreclr/jit/gentree.cpp
+++ b/src/coreclr/jit/gentree.cpp
@@ -5530,7 +5530,6 @@ void GenTree::ReplaceOperand(GenTree** useEdge, GenTree* replacement)
// pointer to the child so that it can be modified.
//
// Arguments:
-
// parentChildPointer - A pointer to a GenTree** (yes, that's three
// levels, i.e. GenTree ***), which if non-null,
// will be set to point to the field in the parent
@@ -5564,6 +5563,44 @@ GenTree* GenTree::gtGetParent(GenTree*** parentChildPtrPtr) const
return parent;
}
+//-------------------------------------------------------------------------
+// gtRetExprVal - walk back through GT_RET_EXPRs
+//
+// Arguments:
+// pbbFlags - out-parameter that is set to the flags of the basic block
+// containing the inlinee return value. The value is 0
+// for unsuccessful inlines.
+//
+// Returns:
+// tree representing return value from a successful inline,
+// or original call for failed or yet to be determined inline.
+//
+// Notes:
+// Multi-level inlines can form chains of GT_RET_EXPRs.
+// This method walks back to the root of the chain.
+//
+GenTree* GenTree::gtRetExprVal(BasicBlockFlags* pbbFlags /* = nullptr */)
+{
+ GenTree* retExprVal = this;
+ BasicBlockFlags bbFlags = BBF_EMPTY;
+
+ assert(!retExprVal->OperIs(GT_PUTARG_TYPE));
+
+ while (retExprVal->OperIs(GT_RET_EXPR))
+ {
+ const GenTreeRetExpr* retExpr = retExprVal->AsRetExpr();
+ bbFlags = retExpr->bbFlags;
+ retExprVal = retExpr->gtInlineCandidate;
+ }
+
+ if (pbbFlags != nullptr)
+ {
+ *pbbFlags = bbFlags;
+ }
+
+ return retExprVal;
+}
+
//------------------------------------------------------------------------------
// OperRequiresAsgFlag : Check whether the operation requires GTF_ASG flag regardless
// of the children's flags.
@@ -6035,9 +6072,9 @@ GenTree* Compiler::gtNewJmpTableNode()
* node)
*/
-unsigned Compiler::gtTokenToIconFlags(unsigned token)
+GenTreeFlags Compiler::gtTokenToIconFlags(unsigned token)
{
- unsigned flags = 0;
+ GenTreeFlags flags = GTF_EMPTY;
switch (TypeFromToken(token))
{
@@ -6080,7 +6117,7 @@ unsigned Compiler::gtTokenToIconFlags(unsigned token)
// If the indType is GT_REF we also mark the indNode as GTF_GLOB_REF
//
-GenTree* Compiler::gtNewIndOfIconHandleNode(var_types indType, size_t addr, unsigned iconFlags, bool isInvariant)
+GenTree* Compiler::gtNewIndOfIconHandleNode(var_types indType, size_t addr, GenTreeFlags iconFlags, bool isInvariant)
{
GenTree* addrNode = gtNewIconHandleNode(addr, iconFlags);
GenTree* indNode = gtNewOperNode(GT_IND, indType, addrNode);
@@ -6128,7 +6165,7 @@ GenTree* Compiler::gtNewIndOfIconHandleNode(var_types indType, size_t addr, unsi
* If the handle needs to be accessed via an indirection, pValue points to it.
*/
-GenTree* Compiler::gtNewIconEmbHndNode(void* value, void* pValue, unsigned iconFlags, void* compileTimeHandle)
+GenTree* Compiler::gtNewIconEmbHndNode(void* value, void* pValue, GenTreeFlags iconFlags, void* compileTimeHandle)
{
GenTree* iconNode;
GenTree* handleNode;
@@ -6392,7 +6429,7 @@ GenTreeCall* Compiler::gtNewCallNode(
node->tailCallInfo = nullptr;
node->gtRetClsHnd = nullptr;
node->gtControlExpr = nullptr;
- node->gtCallMoreFlags = 0;
+ node->gtCallMoreFlags = GTF_CALL_M_EMPTY;
if (callType == CT_INDIRECT)
{
@@ -6537,7 +6574,7 @@ GenTreeLclFld* Compiler::gtNewLclFldNode(unsigned lnum, var_types type, unsigned
return node;
}
-GenTree* Compiler::gtNewInlineCandidateReturnExpr(GenTree* inlineCandidate, var_types type, unsigned __int64 bbFlags)
+GenTree* Compiler::gtNewInlineCandidateReturnExpr(GenTree* inlineCandidate, var_types type, BasicBlockFlags bbFlags)
{
assert(GenTree::s_gtNodeSizes[GT_RET_EXPR] == TREE_NODE_SZ_LARGE);
@@ -7611,7 +7648,7 @@ GenTree* Compiler::gtClone(GenTree* tree, bool complexOK)
// recursive invocations to avoid replacing defs.
GenTree* Compiler::gtCloneExpr(
- GenTree* tree, unsigned addFlags, unsigned varNum, int varVal, unsigned deepVarNum, int deepVarVal)
+ GenTree* tree, GenTreeFlags addFlags, unsigned varNum, int varVal, unsigned deepVarNum, int deepVarVal)
{
if (tree == nullptr)
{
@@ -8213,7 +8250,10 @@ DONE:
// Returns:
// Cloned copy of call and all subtrees.
-GenTreeCall* Compiler::gtCloneExprCallHelper(GenTreeCall* tree, unsigned addFlags, unsigned deepVarNum, int deepVarVal)
+GenTreeCall* Compiler::gtCloneExprCallHelper(GenTreeCall* tree,
+ GenTreeFlags addFlags,
+ unsigned deepVarNum,
+ int deepVarVal)
{
GenTreeCall* copy = new (this, GT_CALL) GenTreeCall(tree->TypeGet());
@@ -9884,7 +9924,7 @@ void GenTree::SetIndirExceptionFlags(Compiler* comp)
#ifdef DEBUG
-/* static */ int GenTree::gtDispFlags(unsigned flags, unsigned debugFlags)
+/* static */ int GenTree::gtDispFlags(GenTreeFlags flags, GenTreeDebugFlags debugFlags)
{
int charsDisplayed = 11; // 11 is the "baseline" number of flag characters displayed
@@ -10520,7 +10560,7 @@ void Compiler::gtDispNode(GenTree* tree, IndentStack* indentStack, __in __in_z _
}
/* Then print the general purpose flags */
- unsigned flags = tree->gtFlags;
+ GenTreeFlags flags = tree->gtFlags;
if (tree->OperIsBinary())
{
diff --git a/src/coreclr/jit/gentree.h b/src/coreclr/jit/gentree.h
index bd9323f64d0..5808ac20acb 100644
--- a/src/coreclr/jit/gentree.h
+++ b/src/coreclr/jit/gentree.h
@@ -153,6 +153,7 @@ enum TargetHandleType : BYTE
/*****************************************************************************/
struct BasicBlock;
+enum BasicBlockFlags : unsigned __int64;
struct InlineCandidateInfo;
struct GuardedDevirtualizationCandidateInfo;
struct ClassProfileCandidateInfo;
@@ -350,6 +351,341 @@ struct Statement;
/*****************************************************************************/
+// Don't format the GenTreeFlags declaration
+// clang-format off
+
+//------------------------------------------------------------------------
+// GenTreeFlags: a bitmask of flags for GenTree stored in gtFlags
+//
+enum GenTreeFlags : unsigned int
+{
+ GTF_EMPTY = 0,
+
+//---------------------------------------------------------------------
+// The first set of flags can be used with a large set of nodes, and
+// thus they must all have distinct values. That is, one can test any
+// expression node for one of these flags.
+//---------------------------------------------------------------------
+
+ GTF_ASG = 0x00000001, // sub-expression contains an assignment
+ GTF_CALL = 0x00000002, // sub-expression contains a func. call
+ GTF_EXCEPT = 0x00000004, // sub-expression might throw an exception
+ GTF_GLOB_REF = 0x00000008, // sub-expression uses global variable(s)
+ GTF_ORDER_SIDEEFF = 0x00000010, // sub-expression has a re-ordering side effect
+
+// If you set these flags, make sure that code:gtExtractSideEffList knows how to find the tree,
+// otherwise the C# (run csc /o-) code:
+// var v = side_eff_operation
+// with no use of `v` will drop your tree on the floor.
+
+ GTF_PERSISTENT_SIDE_EFFECTS = GTF_ASG | GTF_CALL,
+ GTF_SIDE_EFFECT = GTF_PERSISTENT_SIDE_EFFECTS | GTF_EXCEPT,
+ GTF_GLOB_EFFECT = GTF_SIDE_EFFECT | GTF_GLOB_REF,
+ GTF_ALL_EFFECT = GTF_GLOB_EFFECT | GTF_ORDER_SIDEEFF,
+
+ GTF_REVERSE_OPS = 0x00000020, // operand op2 should be evaluated before op1 (normally, op1 is evaluated first and op2 is evaluated second)
+ GTF_CONTAINED = 0x00000040, // This node is contained (executed as part of its parent)
+ GTF_SPILLED = 0x00000080, // the value has been spilled
+
+ GTF_NOREG_AT_USE = 0x00000100, // tree node is in memory at the point of use
+
+ GTF_SET_FLAGS = 0x00000200, // Requires that codegen for this node set the flags. Use gtSetFlags() to check this flag.
+ GTF_USE_FLAGS = 0x00000400, // Indicates that this node uses the flags bits.
+
+ GTF_MAKE_CSE = 0x00000800, // Hoisted expression: try hard to make this into CSE (see optPerformHoistExpr)
+ GTF_DONT_CSE = 0x00001000, // Don't bother CSE'ing this expr
+ GTF_COLON_COND = 0x00002000, // This node is conditionally executed (part of ? :)
+
+ GTF_NODE_MASK = GTF_COLON_COND,
+
+ GTF_BOOLEAN = 0x00004000, // value is known to be 0/1
+
+ GTF_UNSIGNED = 0x00008000, // With GT_CAST: the source operand is an unsigned type
+ // With operators: the specified node is an unsigned operator
+ GTF_LATE_ARG = 0x00010000, // The specified node is evaluated to a temp in the arg list, and this temp is added to gtCallLateArgs.
+ GTF_SPILL = 0x00020000, // Needs to be spilled here
+
+// The extra flag GTF_IS_IN_CSE is used to tell the consumer of the side effect flags
+// that we are calling in the context of performing a CSE, thus we
+// should allow the run-once side effects of running a class constructor.
+//
+// The only requirement of this flag is that it not overlap any of the
+// side-effect flags. The actual bit used is otherwise arbitrary.
+
+ GTF_IS_IN_CSE = GTF_BOOLEAN,
+
+ GTF_COMMON_MASK = 0x0003FFFF, // mask of all the flags above
+
+ GTF_REUSE_REG_VAL = 0x00800000, // This is set by the register allocator on nodes whose value already exists in the
+ // register assigned to this node, so the code generator does not have to generate
+ // code to produce the value. It is currently used only on constant nodes.
+ // It CANNOT be set on var (GT_LCL*) nodes, or on indir (GT_IND or GT_STOREIND) nodes, since
+ // it is not needed for lclVars and is highly unlikely to be useful for indir nodes.
+
+//---------------------------------------------------------------------
+// The following flags can be used only with a small set of nodes, and
+// thus their values need not be distinct (other than within the set
+// that goes with a particular node/nodes, of course). That is, one can
+// only test for one of these flags if the 'gtOper' value is tested as
+// well to make sure it's the right operator for the particular flag.
+//---------------------------------------------------------------------
+
+// NB: GTF_VAR_* and GTF_REG_* share the same namespace of flags.
+// These flags are also used by GT_LCL_FLD, and the last-use (DEATH) flags are also used by GenTreeCopyOrReload.
+
+ GTF_VAR_DEF = 0x80000000, // GT_LCL_VAR -- this is a definition
+ GTF_VAR_USEASG = 0x40000000, // GT_LCL_VAR -- this is a partial definition, a use of the previous definition is implied
+ // A partial definition usually occurs when a struct field is assigned to (s.f = ...) or
+ // when a scalar typed variable is assigned to via a narrow store (*((byte*)&i) = ...).
+
+// Last-use bits.
+// Note that a node marked GTF_VAR_MULTIREG can only be a pure definition of all the fields, or a pure use of all the fields,
+// so we don't need the equivalent of GTF_VAR_USEASG.
+
+ GTF_VAR_MULTIREG_DEATH0 = 0x04000000, // GT_LCL_VAR -- The last-use bit for a lclVar (the first register if it is multireg).
+ GTF_VAR_DEATH = GTF_VAR_MULTIREG_DEATH0,
+ GTF_VAR_MULTIREG_DEATH1 = 0x08000000, // GT_LCL_VAR -- The last-use bit for the second register of a multireg lclVar.
+ GTF_VAR_MULTIREG_DEATH2 = 0x10000000, // GT_LCL_VAR -- The last-use bit for the third register of a multireg lclVar.
+ GTF_VAR_MULTIREG_DEATH3 = 0x20000000, // GT_LCL_VAR -- The last-use bit for the fourth register of a multireg lclVar.
+ GTF_VAR_DEATH_MASK = GTF_VAR_MULTIREG_DEATH0 | GTF_VAR_MULTIREG_DEATH1 | GTF_VAR_MULTIREG_DEATH2 | GTF_VAR_MULTIREG_DEATH3,
+
+// This is the amount we have to shift, plus the regIndex, to get the last use bit we want.
+#define MULTIREG_LAST_USE_SHIFT 26
+
+ GTF_VAR_MULTIREG = 0x02000000, // This is a struct or (on 32-bit platforms) long variable that is used or defined
+ // to/from a multireg source or destination (e.g. a call arg or return, or an op
+ // that returns its result in multiple registers such as a long multiply).
+
+ GTF_LIVENESS_MASK = GTF_VAR_DEF | GTF_VAR_USEASG | GTF_VAR_DEATH_MASK,
+
+ GTF_VAR_CAST = 0x01000000, // GT_LCL_VAR -- has been explictly cast (variable node may not be type of local)
+ GTF_VAR_ITERATOR = 0x00800000, // GT_LCL_VAR -- this is a iterator reference in the loop condition
+ GTF_VAR_CLONED = 0x00400000, // GT_LCL_VAR -- this node has been cloned or is a clone
+ GTF_VAR_CONTEXT = 0x00200000, // GT_LCL_VAR -- this node is part of a runtime lookup
+ GTF_VAR_FOLDED_IND = 0x00100000, // GT_LCL_VAR -- this node was folded from *(typ*)&lclVar expression tree in fgMorphSmpOp()
+ // where 'typ' is a small type and 'lclVar' corresponds to a normalized-on-store local variable.
+ // This flag identifies such nodes in order to make sure that fgDoNormalizeOnStore() is called
+ // on their parents in post-order morph.
+ // Relevant for inlining optimizations (see fgInlinePrependStatements)
+
+ GTF_VAR_ARR_INDEX = 0x00000020, // The variable is part of (the index portion of) an array index expression.
+ // Shares a value with GTF_REVERSE_OPS, which is meaningless for local var.
+
+ // For additional flags for GT_CALL node see GTF_CALL_M_*
+
+ GTF_CALL_UNMANAGED = 0x80000000, // GT_CALL -- direct call to unmanaged code
+ GTF_CALL_INLINE_CANDIDATE = 0x40000000, // GT_CALL -- this call has been marked as an inline candidate
+
+ GTF_CALL_VIRT_KIND_MASK = 0x30000000, // GT_CALL -- mask of the below call kinds
+ GTF_CALL_NONVIRT = 0x00000000, // GT_CALL -- a non virtual call
+ GTF_CALL_VIRT_STUB = 0x10000000, // GT_CALL -- a stub-dispatch virtual call
+ GTF_CALL_VIRT_VTABLE = 0x20000000, // GT_CALL -- a vtable-based virtual call
+
+ GTF_CALL_NULLCHECK = 0x08000000, // GT_CALL -- must check instance pointer for null
+ GTF_CALL_POP_ARGS = 0x04000000, // GT_CALL -- caller pop arguments?
+ GTF_CALL_HOISTABLE = 0x02000000, // GT_CALL -- call is hoistable
+
+ GTF_MEMORYBARRIER_LOAD = 0x40000000, // GT_MEMORYBARRIER -- Load barrier
+
+ GTF_NOP_DEATH = 0x40000000, // GT_NOP -- operand dies here
+
+ GTF_FLD_VOLATILE = 0x40000000, // GT_FIELD/GT_CLS_VAR -- same as GTF_IND_VOLATILE
+ GTF_FLD_INITCLASS = 0x20000000, // GT_FIELD/GT_CLS_VAR -- field access requires preceding class/static init helper
+
+ GTF_INX_RNGCHK = 0x80000000, // GT_INDEX/GT_INDEX_ADDR -- the array reference should be range-checked.
+ GTF_INX_STRING_LAYOUT = 0x40000000, // GT_INDEX -- this uses the special string array layout
+
+ GTF_IND_TGT_NOT_HEAP = 0x80000000, // GT_IND -- the target is not on the heap
+ GTF_IND_VOLATILE = 0x40000000, // GT_IND -- the load or store must use volatile sematics (this is a nop on X86)
+ GTF_IND_NONFAULTING = 0x20000000, // Operations for which OperIsIndir() is true -- An indir that cannot fault.
+ // Same as GTF_ARRLEN_NONFAULTING.
+ GTF_IND_TGTANYWHERE = 0x10000000, // GT_IND -- the target could be anywhere
+ GTF_IND_TLS_REF = 0x08000000, // GT_IND -- the target is accessed via TLS
+ GTF_IND_ASG_LHS = 0x04000000, // GT_IND -- this GT_IND node is (the effective val) of the LHS of an
+ // assignment; don't evaluate it independently.
+ GTF_IND_REQ_ADDR_IN_REG = GTF_IND_ASG_LHS, // GT_IND -- requires its addr operand to be evaluated
+ // into a register. This flag is useful in cases where it
+ // is required to generate register indirect addressing mode.
+ // One such case is virtual stub calls on xarch. This is only
+ // valid in the backend, where GTF_IND_ASG_LHS is not necessary
+ // (all such indirections will be lowered to GT_STOREIND).
+ GTF_IND_UNALIGNED = 0x02000000, // GT_IND -- the load or store is unaligned (we assume worst case
+ // alignment of 1 byte)
+ GTF_IND_INVARIANT = 0x01000000, // GT_IND -- the target is invariant (a prejit indirection)
+ GTF_IND_ARR_INDEX = 0x00800000, // GT_IND -- the indirection represents an (SZ) array index
+ GTF_IND_NONNULL = 0x00400000, // GT_IND -- the indirection never returns null (zero)
+
+ GTF_IND_FLAGS = GTF_IND_VOLATILE | GTF_IND_TGTANYWHERE | GTF_IND_NONFAULTING | GTF_IND_TLS_REF | \
+ GTF_IND_UNALIGNED | GTF_IND_INVARIANT | GTF_IND_NONNULL | GTF_IND_ARR_INDEX | GTF_IND_TGT_NOT_HEAP,
+
+ GTF_CLS_VAR_VOLATILE = 0x40000000, // GT_FIELD/GT_CLS_VAR -- same as GTF_IND_VOLATILE
+ GTF_CLS_VAR_INITCLASS = 0x20000000, // GT_FIELD/GT_CLS_VAR -- same as GTF_FLD_INITCLASS
+ GTF_CLS_VAR_ASG_LHS = 0x04000000, // GT_CLS_VAR -- this GT_CLS_VAR node is (the effective val) of the LHS
+ // of an assignment; don't evaluate it independently.
+
+ GTF_ADDRMODE_NO_CSE = 0x80000000, // GT_ADD/GT_MUL/GT_LSH -- Do not CSE this node only, forms complex
+ // addressing mode
+
+ GTF_MUL_64RSLT = 0x40000000, // GT_MUL -- produce 64-bit result
+
+ GTF_RELOP_NAN_UN = 0x80000000, // GT_<relop> -- Is branch taken if ops are NaN?
+ GTF_RELOP_JMP_USED = 0x40000000, // GT_<relop> -- result of compare used for jump or ?:
+ GTF_RELOP_QMARK = 0x20000000, // GT_<relop> -- the node is the condition for ?:
+ GTF_RELOP_ZTT = 0x08000000, // GT_<relop> -- Loop test cloned for converting while-loops into do-while
+ // with explicit "loop test" in the header block.
+
+ GTF_JCMP_EQ = 0x80000000, // GTF_JCMP_EQ -- Branch on equal rather than not equal
+ GTF_JCMP_TST = 0x40000000, // GTF_JCMP_TST -- Use bit test instruction rather than compare against zero instruction
+
+ GTF_RET_MERGED = 0x80000000, // GT_RETURN -- This is a return generated during epilog merging.
+
+ GTF_QMARK_CAST_INSTOF = 0x80000000, // GT_QMARK -- Is this a top (not nested) level qmark created for
+ // castclass or instanceof?
+
+ GTF_BOX_VALUE = 0x80000000, // GT_BOX -- "box" is on a value type
+
+ GTF_ICON_HDL_MASK = 0xF0000000, // Bits used by handle types below
+ GTF_ICON_SCOPE_HDL = 0x10000000, // GT_CNS_INT -- constant is a scope handle
+ GTF_ICON_CLASS_HDL = 0x20000000, // GT_CNS_INT -- constant is a class handle
+ GTF_ICON_METHOD_HDL = 0x30000000, // GT_CNS_INT -- constant is a method handle
+ GTF_ICON_FIELD_HDL = 0x40000000, // GT_CNS_INT -- constant is a field handle
+ GTF_ICON_STATIC_HDL = 0x50000000, // GT_CNS_INT -- constant is a handle to static data
+ GTF_ICON_STR_HDL = 0x60000000, // GT_CNS_INT -- constant is a string handle
+ GTF_ICON_CONST_PTR = 0x70000000, // GT_CNS_INT -- constant is a pointer to immutable data, (e.g. IAT_PPVALUE)
+ GTF_ICON_GLOBAL_PTR = 0x80000000, // GT_CNS_INT -- constant is a pointer to mutable data (e.g. from the VM state)
+ GTF_ICON_VARG_HDL = 0x90000000, // GT_CNS_INT -- constant is a var arg cookie handle
+ GTF_ICON_PINVKI_HDL = 0xA0000000, // GT_CNS_INT -- constant is a pinvoke calli handle
+ GTF_ICON_TOKEN_HDL = 0xB0000000, // GT_CNS_INT -- constant is a token handle (other than class, method or field)
+ GTF_ICON_TLS_HDL = 0xC0000000, // GT_CNS_INT -- constant is a TLS ref with offset
+ GTF_ICON_FTN_ADDR = 0xD0000000, // GT_CNS_INT -- constant is a function address
+ GTF_ICON_CIDMID_HDL = 0xE0000000, // GT_CNS_INT -- constant is a class ID or a module ID
+ GTF_ICON_BBC_PTR = 0xF0000000, // GT_CNS_INT -- constant is a basic block count pointer
+
+ GTF_ICON_FIELD_OFF = 0x08000000, // GT_CNS_INT -- constant is a field offset
+ GTF_ICON_SIMD_COUNT = 0x04000000, // GT_CNS_INT -- constant is Vector<T>.Count
+
+ GTF_ICON_INITCLASS = 0x02000000, // GT_CNS_INT -- Constant is used to access a static that requires preceding
+ // class/static init helper. In some cases, the constant is
+ // the address of the static field itself, and in other cases
+ // there's an extra layer of indirection and it is the address
+ // of the cell that the runtime will fill in with the address
+ // of the static field; in both of those cases, the constant
+ // is what gets flagged.
+
+ GTF_BLK_VOLATILE = GTF_IND_VOLATILE, // GT_ASG, GT_STORE_BLK, GT_STORE_OBJ, GT_STORE_DYNBLK -- is a volatile block operation
+ GTF_BLK_UNALIGNED = GTF_IND_UNALIGNED, // GT_ASG, GT_STORE_BLK, GT_STORE_OBJ, GT_STORE_DYNBLK -- is an unaligned block operation
+
+ GTF_OVERFLOW = 0x10000000, // Supported for: GT_ADD, GT_SUB, GT_MUL and GT_CAST.
+ // Requires an overflow check. Use gtOverflow(Ex)() to check this flag.
+
+ GTF_DIV_BY_CNS_OPT = 0x80000000, // GT_DIV -- Uses the division by constant optimization to compute this division
+
+ GTF_ARR_BOUND_INBND = 0x80000000, // GT_ARR_BOUNDS_CHECK -- have proved this check is always in-bounds
+
+ GTF_ARRLEN_ARR_IDX = 0x80000000, // GT_ARR_LENGTH -- Length which feeds into an array index expression
+ GTF_ARRLEN_NONFAULTING = 0x20000000, // GT_ARR_LENGTH -- An array length operation that cannot fault. Same as GT_IND_NONFAULTING.
+
+ GTF_SIMDASHW_OP = 0x80000000, // GT_HWINTRINSIC -- Indicates that the structHandle should be gotten from gtGetStructHandleForSIMD
+ // rather than from gtGetStructHandleForHWSIMD.
+
+ // Flag used by assertion prop to indicate that a type is a TYP_LONG
+#ifdef TARGET_64BIT
+ GTF_ASSERTION_PROP_LONG = 0x00000001,
+#endif // TARGET_64BIT
+};
+
+inline constexpr GenTreeFlags operator ~(GenTreeFlags a)
+{
+ return (GenTreeFlags)(~(unsigned int)a);
+}
+
+inline constexpr GenTreeFlags operator |(GenTreeFlags a, GenTreeFlags b)
+{
+ return (GenTreeFlags)((unsigned int)a | (unsigned int)b);
+}
+
+inline constexpr GenTreeFlags operator &(GenTreeFlags a, GenTreeFlags b)
+{
+ return (GenTreeFlags)((unsigned int)a & (unsigned int)b);
+}
+
+inline GenTreeFlags& operator |=(GenTreeFlags& a, GenTreeFlags b)
+{
+ return a = (GenTreeFlags)((unsigned int)a | (unsigned int)b);
+}
+
+inline GenTreeFlags& operator &=(GenTreeFlags& a, GenTreeFlags b)
+{
+ return a = (GenTreeFlags)((unsigned int)a & (unsigned int)b);
+}
+
+inline GenTreeFlags& operator ^=(GenTreeFlags& a, GenTreeFlags b)
+{
+ return a = (GenTreeFlags)((unsigned int)a ^ (unsigned int)b);
+}
+
+// Can any side-effects be observed externally, say by a caller method?
+// For assignments, only assignments to global memory can be observed
+// externally, whereas simple assignments to local variables can not.
+//
+// Be careful when using this inside a "try" protected region as the
+// order of assignments to local variables would need to be preserved
+// wrt side effects if the variables are alive on entry to the
+// "catch/finally" region. In such cases, even assignments to locals
+// will have to be restricted.
+#define GTF_GLOBALLY_VISIBLE_SIDE_EFFECTS(flags) \
+ (((flags) & (GTF_CALL | GTF_EXCEPT)) || (((flags) & (GTF_ASG | GTF_GLOB_REF)) == (GTF_ASG | GTF_GLOB_REF)))
+
+#if defined(DEBUG)
+
+//------------------------------------------------------------------------
+// GenTreeDebugFlags: a bitmask of debug-only flags for GenTree stored in gtDebugFlags
+//
+enum GenTreeDebugFlags : unsigned int
+{
+ GTF_DEBUG_NONE = 0x00000000, // No debug flags.
+
+ GTF_DEBUG_NODE_MORPHED = 0x00000001, // the node has been morphed (in the global morphing phase)
+ GTF_DEBUG_NODE_SMALL = 0x00000002,
+ GTF_DEBUG_NODE_LARGE = 0x00000004,
+ GTF_DEBUG_NODE_CG_PRODUCED = 0x00000008, // genProduceReg has been called on this node
+ GTF_DEBUG_NODE_CG_CONSUMED = 0x00000010, // genConsumeReg has been called on this node
+ GTF_DEBUG_NODE_LSRA_ADDED = 0x00000020, // This node was added by LSRA
+
+ GTF_DEBUG_NODE_MASK = 0x0000003F, // These flags are all node (rather than operation) properties.
+
+ GTF_DEBUG_VAR_CSE_REF = 0x00800000, // GT_LCL_VAR -- This is a CSE LCL_VAR node
+};
+
+inline constexpr GenTreeDebugFlags operator ~(GenTreeDebugFlags a)
+{
+ return (GenTreeDebugFlags)(~(unsigned int)a);
+}
+
+inline constexpr GenTreeDebugFlags operator |(GenTreeDebugFlags a, GenTreeDebugFlags b)
+{
+ return (GenTreeDebugFlags)((unsigned int)a | (unsigned int)b);
+}
+
+inline constexpr GenTreeDebugFlags operator &(GenTreeDebugFlags a, GenTreeDebugFlags b)
+{
+ return (GenTreeDebugFlags)((unsigned int)a & (unsigned int)b);
+}
+
+inline GenTreeDebugFlags& operator |=(GenTreeDebugFlags& a, GenTreeDebugFlags b)
+{
+ return a = (GenTreeDebugFlags)((unsigned int)a | (unsigned int)b);
+}
+
+inline GenTreeDebugFlags& operator &=(GenTreeDebugFlags& a, GenTreeDebugFlags b)
+{
+ return a = (GenTreeDebugFlags)((unsigned int)a & (unsigned int)b);
+}
+
+#endif // defined(DEBUG)
+
+// clang-format on
+
#ifndef HOST_64BIT
#include <pshpack4.h>
#endif
@@ -643,11 +979,11 @@ public:
regMaskTP gtGetRegMask() const;
- unsigned gtFlags; // see GTF_xxxx below
+ GenTreeFlags gtFlags;
#if defined(DEBUG)
- unsigned gtDebugFlags; // see GTF_DEBUG_xxx below
-#endif // defined(DEBUG)
+ GenTreeDebugFlags gtDebugFlags;
+#endif // defined(DEBUG)
ValueNumPair gtVNPair;
@@ -695,278 +1031,6 @@ public:
gtVNPair = ValueNumPair(); // Initializes both elements to "NoVN".
}
-// clang-format off
-
-//---------------------------------------------------------------------
-//
-// GenTree flags stored in gtFlags.
-//
-//---------------------------------------------------------------------
-
-//---------------------------------------------------------------------
-// The first set of flags can be used with a large set of nodes, and
-// thus they must all have distinct values. That is, one can test any
-// expression node for one of these flags.
-//---------------------------------------------------------------------
-
-#define GTF_ASG 0x00000001 // sub-expression contains an assignment
-#define GTF_CALL 0x00000002 // sub-expression contains a func. call
-#define GTF_EXCEPT 0x00000004 // sub-expression might throw an exception
-#define GTF_GLOB_REF 0x00000008 // sub-expression uses global variable(s)
-#define GTF_ORDER_SIDEEFF 0x00000010 // sub-expression has a re-ordering side effect
-
-// If you set these flags, make sure that code:gtExtractSideEffList knows how to find the tree,
-// otherwise the C# (run csc /o-) code:
-// var v = side_eff_operation
-// with no use of v will drop your tree on the floor.
-#define GTF_PERSISTENT_SIDE_EFFECTS (GTF_ASG | GTF_CALL)
-#define GTF_SIDE_EFFECT (GTF_PERSISTENT_SIDE_EFFECTS | GTF_EXCEPT)
-#define GTF_GLOB_EFFECT (GTF_SIDE_EFFECT | GTF_GLOB_REF)
-#define GTF_ALL_EFFECT (GTF_GLOB_EFFECT | GTF_ORDER_SIDEEFF)
-
-// The extra flag GTF_IS_IN_CSE is used to tell the consumer of these flags
-// that we are calling in the context of performing a CSE, thus we
-// should allow the run-once side effects of running a class constructor.
-//
-// The only requirement of this flag is that it not overlap any of the
-// side-effect flags. The actual bit used is otherwise arbitrary.
-#define GTF_IS_IN_CSE GTF_BOOLEAN
-
-// Can any side-effects be observed externally, say by a caller method?
-// For assignments, only assignments to global memory can be observed
-// externally, whereas simple assignments to local variables can not.
-//
-// Be careful when using this inside a "try" protected region as the
-// order of assignments to local variables would need to be preserved
-// wrt side effects if the variables are alive on entry to the
-// "catch/finally" region. In such cases, even assignments to locals
-// will have to be restricted.
-#define GTF_GLOBALLY_VISIBLE_SIDE_EFFECTS(flags) \
- (((flags) & (GTF_CALL | GTF_EXCEPT)) || (((flags) & (GTF_ASG | GTF_GLOB_REF)) == (GTF_ASG | GTF_GLOB_REF)))
-
-#define GTF_REVERSE_OPS 0x00000020 // operand op2 should be evaluated before op1 (normally, op1 is evaluated first and op2 is evaluated second)
-#define GTF_CONTAINED 0x00000040 // This node is contained (executed as part of its parent)
-#define GTF_SPILLED 0x00000080 // the value has been spilled
-
-#define GTF_NOREG_AT_USE 0x00000100 // tree node is in memory at the point of use
-
-#define GTF_SET_FLAGS 0x00000200 // Requires that codegen for this node set the flags. Use gtSetFlags() to check this flag.
-#define GTF_USE_FLAGS 0x00000400 // Indicates that this node uses the flags bits.
-
-#define GTF_MAKE_CSE 0x00000800 // Hoisted expression: try hard to make this into CSE (see optPerformHoistExpr)
-#define GTF_DONT_CSE 0x00001000 // Don't bother CSE'ing this expr
-#define GTF_COLON_COND 0x00002000 // This node is conditionally executed (part of ? :)
-
-#define GTF_NODE_MASK (GTF_COLON_COND)
-
-#define GTF_BOOLEAN 0x00004000 // value is known to be 0/1
-
-#define GTF_UNSIGNED 0x00008000 // With GT_CAST: the source operand is an unsigned type
- // With operators: the specified node is an unsigned operator
- //
-#define GTF_LATE_ARG 0x00010000 // The specified node is evaluated to a temp in the arg list, and this temp is added to gtCallLateArgs.
-#define GTF_SPILL 0x00020000 // Needs to be spilled here
-
-#define GTF_COMMON_MASK 0x0003FFFF // mask of all the flags above
-
-#define GTF_REUSE_REG_VAL 0x00800000 // This is set by the register allocator on nodes whose value already exists in the
- // register assigned to this node, so the code generator does not have to generate
- // code to produce the value. It is currently used only on constant nodes.
- // It CANNOT be set on var (GT_LCL*) nodes, or on indir (GT_IND or GT_STOREIND) nodes, since
- // it is not needed for lclVars and is highly unlikely to be useful for indir nodes.
-
-//---------------------------------------------------------------------
-// The following flags can be used only with a small set of nodes, and
-// thus their values need not be distinct (other than within the set
-// that goes with a particular node/nodes, of course). That is, one can
-// only test for one of these flags if the 'gtOper' value is tested as
-// well to make sure it's the right operator for the particular flag.
-//---------------------------------------------------------------------
-
-// NB: GTF_VAR_* and GTF_REG_* share the same namespace of flags.
-// These flags are also used by GT_LCL_FLD, and the last-use (DEATH) flags are also used by GenTreeCopyOrReload.
-#define GTF_VAR_DEF 0x80000000 // GT_LCL_VAR -- this is a definition
-#define GTF_VAR_USEASG 0x40000000 // GT_LCL_VAR -- this is a partial definition, a use of the previous definition is implied
- // A partial definition usually occurs when a struct field is assigned to (s.f = ...) or
- // when a scalar typed variable is assigned to via a narrow store (*((byte*)&i) = ...).
-// Last-use bits.
-// Note that a node marked GTF_VAR_MULTIREG can only be a pure definition of all the fields, or a pure use of all the fields,
-// so we don't need the equivalent of GTF_VAR_USEASG.
-
-#define GTF_VAR_MULTIREG_DEATH0 0x04000000 // GT_LCL_VAR -- The last-use bit for a lclVar (the first register if it is multireg).
-#define GTF_VAR_DEATH GTF_VAR_MULTIREG_DEATH0
-#define GTF_VAR_MULTIREG_DEATH1 0x08000000 // GT_LCL_VAR -- The last-use bit for the second register of a multireg lclVar.
-#define GTF_VAR_MULTIREG_DEATH2 0x10000000 // GT_LCL_VAR -- The last-use bit for the third register of a multireg lclVar.
-#define GTF_VAR_MULTIREG_DEATH3 0x20000000 // GT_LCL_VAR -- The last-use bit for the fourth register of a multireg lclVar.
-#define GTF_VAR_DEATH_MASK (GTF_VAR_MULTIREG_DEATH0|GTF_VAR_MULTIREG_DEATH1 | GTF_VAR_MULTIREG_DEATH2 | GTF_VAR_MULTIREG_DEATH3)
-// This is the amount we have to shift, plus the regIndex, to get the last use bit we want.
-#define MULTIREG_LAST_USE_SHIFT 26
-#define GTF_VAR_MULTIREG 0x02000000 // This is a struct or (on 32-bit platforms) long variable that is used or defined
- // to/from a multireg source or destination (e.g. a call arg or return, or an op
- // that returns its result in multiple registers such as a long multiply).
-
-#define GTF_LIVENESS_MASK (GTF_VAR_DEF | GTF_VAR_USEASG | GTF_VAR_DEATH_MASK)
-
-#define GTF_VAR_CAST 0x01000000 // GT_LCL_VAR -- has been explictly cast (variable node may not be type of local)
-#define GTF_VAR_ITERATOR 0x00800000 // GT_LCL_VAR -- this is a iterator reference in the loop condition
-#define GTF_VAR_CLONED 0x00400000 // GT_LCL_VAR -- this node has been cloned or is a clone
-#define GTF_VAR_CONTEXT 0x00200000 // GT_LCL_VAR -- this node is part of a runtime lookup
-#define GTF_VAR_FOLDED_IND 0x00100000 // GT_LCL_VAR -- this node was folded from *(typ*)&lclVar expression tree in fgMorphSmpOp()
-// where 'typ' is a small type and 'lclVar' corresponds to a normalized-on-store local variable.
-// This flag identifies such nodes in order to make sure that fgDoNormalizeOnStore() is called on their parents in post-order morph.
-
- // Relevant for inlining optimizations (see fgInlinePrependStatements)
-
-#define GTF_VAR_ARR_INDEX 0x00000020 // The variable is part of (the index portion of) an array index expression.
- // Shares a value with GTF_REVERSE_OPS, which is meaningless for local var.
-
- // For additional flags for GT_CALL node see GTF_CALL_M_*
-
-#define GTF_CALL_UNMANAGED 0x80000000 // GT_CALL -- direct call to unmanaged code
-#define GTF_CALL_INLINE_CANDIDATE 0x40000000 // GT_CALL -- this call has been marked as an inline candidate
-
-#define GTF_CALL_VIRT_KIND_MASK 0x30000000 // GT_CALL -- mask of the below call kinds
-#define GTF_CALL_NONVIRT 0x00000000 // GT_CALL -- a non virtual call
-#define GTF_CALL_VIRT_STUB 0x10000000 // GT_CALL -- a stub-dispatch virtual call
-#define GTF_CALL_VIRT_VTABLE 0x20000000 // GT_CALL -- a vtable-based virtual call
-
-#define GTF_CALL_NULLCHECK 0x08000000 // GT_CALL -- must check instance pointer for null
-#define GTF_CALL_POP_ARGS 0x04000000 // GT_CALL -- caller pop arguments?
-#define GTF_CALL_HOISTABLE 0x02000000 // GT_CALL -- call is hoistable
-
-#define GTF_MEMORYBARRIER_LOAD 0x40000000 // GT_MEMORYBARRIER -- Load barrier
-
-#define GTF_NOP_DEATH 0x40000000 // GT_NOP -- operand dies here
-
-#define GTF_FLD_VOLATILE 0x40000000 // GT_FIELD/GT_CLS_VAR -- same as GTF_IND_VOLATILE
-#define GTF_FLD_INITCLASS 0x20000000 // GT_FIELD/GT_CLS_VAR -- field access requires preceding class/static init helper
-
-#define GTF_INX_RNGCHK 0x80000000 // GT_INDEX/GT_INDEX_ADDR -- the array reference should be range-checked.
-#define GTF_INX_STRING_LAYOUT 0x40000000 // GT_INDEX -- this uses the special string array layout
-
-#define GTF_IND_TGT_NOT_HEAP 0x80000000 // GT_IND -- the target is not on the heap
-#define GTF_IND_VOLATILE 0x40000000 // GT_IND -- the load or store must use volatile sematics (this is a nop on X86)
-#define GTF_IND_NONFAULTING 0x20000000 // Operations for which OperIsIndir() is true -- An indir that cannot fault.
- // Same as GTF_ARRLEN_NONFAULTING.
-#define GTF_IND_TGTANYWHERE 0x10000000 // GT_IND -- the target could be anywhere
-#define GTF_IND_TLS_REF 0x08000000 // GT_IND -- the target is accessed via TLS
-#define GTF_IND_ASG_LHS 0x04000000 // GT_IND -- this GT_IND node is (the effective val) of the LHS of an
- // assignment; don't evaluate it independently.
-#define GTF_IND_REQ_ADDR_IN_REG GTF_IND_ASG_LHS // GT_IND -- requires its addr operand to be evaluated
- // into a register. This flag is useful in cases where it
- // is required to generate register indirect addressing mode.
- // One such case is virtual stub calls on xarch. This is only
- // valid in the backend, where GTF_IND_ASG_LHS is not necessary
- // (all such indirections will be lowered to GT_STOREIND).
-#define GTF_IND_UNALIGNED 0x02000000 // GT_IND -- the load or store is unaligned (we assume worst case
- // alignment of 1 byte)
-#define GTF_IND_INVARIANT 0x01000000 // GT_IND -- the target is invariant (a prejit indirection)
-#define GTF_IND_ARR_INDEX 0x00800000 // GT_IND -- the indirection represents an (SZ) array index
-#define GTF_IND_NONNULL 0x00400000 // GT_IND -- the indirection never returns null (zero)
-
-#define GTF_IND_FLAGS \
- (GTF_IND_VOLATILE | GTF_IND_TGTANYWHERE | GTF_IND_NONFAULTING | GTF_IND_TLS_REF | \
- GTF_IND_UNALIGNED | GTF_IND_INVARIANT | GTF_IND_NONNULL | GTF_IND_ARR_INDEX | GTF_IND_TGT_NOT_HEAP)
-
-#define GTF_CLS_VAR_VOLATILE 0x40000000 // GT_FIELD/GT_CLS_VAR -- same as GTF_IND_VOLATILE
-#define GTF_CLS_VAR_INITCLASS 0x20000000 // GT_FIELD/GT_CLS_VAR -- same as GTF_FLD_INITCLASS
-#define GTF_CLS_VAR_ASG_LHS 0x04000000 // GT_CLS_VAR -- this GT_CLS_VAR node is (the effective val) of the LHS
- // of an assignment; don't evaluate it independently.
-
-#define GTF_ADDRMODE_NO_CSE 0x80000000 // GT_ADD/GT_MUL/GT_LSH -- Do not CSE this node only, forms complex
- // addressing mode
-
-#define GTF_MUL_64RSLT 0x40000000 // GT_MUL -- produce 64-bit result
-
-#define GTF_RELOP_NAN_UN 0x80000000 // GT_<relop> -- Is branch taken if ops are NaN?
-#define GTF_RELOP_JMP_USED 0x40000000 // GT_<relop> -- result of compare used for jump or ?:
-#define GTF_RELOP_QMARK 0x20000000 // GT_<relop> -- the node is the condition for ?:
-#define GTF_RELOP_ZTT 0x08000000 // GT_<relop> -- Loop test cloned for converting while-loops into do-while
- // with explicit "loop test" in the header block.
-
-#define GTF_JCMP_EQ 0x80000000 // GTF_JCMP_EQ -- Branch on equal rather than not equal
-#define GTF_JCMP_TST 0x40000000 // GTF_JCMP_TST -- Use bit test instruction rather than compare against zero instruction
-
-#define GTF_RET_MERGED 0x80000000 // GT_RETURN -- This is a return generated during epilog merging.
-
-#define GTF_QMARK_CAST_INSTOF 0x80000000 // GT_QMARK -- Is this a top (not nested) level qmark created for
- // castclass or instanceof?
-
-#define GTF_BOX_VALUE 0x80000000 // GT_BOX -- "box" is on a value type
-
-#define GTF_ICON_HDL_MASK 0xF0000000 // Bits used by handle types below
-#define GTF_ICON_SCOPE_HDL 0x10000000 // GT_CNS_INT -- constant is a scope handle
-#define GTF_ICON_CLASS_HDL 0x20000000 // GT_CNS_INT -- constant is a class handle
-#define GTF_ICON_METHOD_HDL 0x30000000 // GT_CNS_INT -- constant is a method handle
-#define GTF_ICON_FIELD_HDL 0x40000000 // GT_CNS_INT -- constant is a field handle
-#define GTF_ICON_STATIC_HDL 0x50000000 // GT_CNS_INT -- constant is a handle to static data
-#define GTF_ICON_STR_HDL 0x60000000 // GT_CNS_INT -- constant is a string handle
-#define GTF_ICON_CONST_PTR 0x70000000 // GT_CNS_INT -- constant is a pointer to immutable data, (e.g. IAT_PPVALUE)
-#define GTF_ICON_GLOBAL_PTR 0x80000000 // GT_CNS_INT -- constant is a pointer to mutable data (e.g. from the VM state)
-#define GTF_ICON_VARG_HDL 0x90000000 // GT_CNS_INT -- constant is a var arg cookie handle
-#define GTF_ICON_PINVKI_HDL 0xA0000000 // GT_CNS_INT -- constant is a pinvoke calli handle
-#define GTF_ICON_TOKEN_HDL 0xB0000000 // GT_CNS_INT -- constant is a token handle (other than class, method or field)
-#define GTF_ICON_TLS_HDL 0xC0000000 // GT_CNS_INT -- constant is a TLS ref with offset
-#define GTF_ICON_FTN_ADDR 0xD0000000 // GT_CNS_INT -- constant is a function address
-#define GTF_ICON_CIDMID_HDL 0xE0000000 // GT_CNS_INT -- constant is a class ID or a module ID
-#define GTF_ICON_BBC_PTR 0xF0000000 // GT_CNS_INT -- constant is a basic block count pointer
-
-#define GTF_ICON_FIELD_OFF 0x08000000 // GT_CNS_INT -- constant is a field offset
-#define GTF_ICON_SIMD_COUNT 0x04000000 // GT_CNS_INT -- constant is Vector<T>.Count
-
-#define GTF_ICON_INITCLASS 0x02000000 // GT_CNS_INT -- Constant is used to access a static that requires preceding
- // class/static init helper. In some cases, the constant is
- // the address of the static field itself, and in other cases
- // there's an extra layer of indirection and it is the address
- // of the cell that the runtime will fill in with the address
- // of the static field; in both of those cases, the constant
- // is what gets flagged.
-
-#define GTF_BLK_VOLATILE GTF_IND_VOLATILE // GT_ASG, GT_STORE_BLK, GT_STORE_OBJ, GT_STORE_DYNBLK -- is a volatile block operation
-#define GTF_BLK_UNALIGNED GTF_IND_UNALIGNED // GT_ASG, GT_STORE_BLK, GT_STORE_OBJ, GT_STORE_DYNBLK -- is an unaligned block operation
-
-#define GTF_OVERFLOW 0x10000000 // Supported for: GT_ADD, GT_SUB, GT_MUL and GT_CAST.
- // Requires an overflow check. Use gtOverflow(Ex)() to check this flag.
-
-#define GTF_DIV_BY_CNS_OPT 0x80000000 // GT_DIV -- Uses the division by constant optimization to compute this division
-
-#define GTF_ARR_BOUND_INBND 0x80000000 // GT_ARR_BOUNDS_CHECK -- have proved this check is always in-bounds
-
-#define GTF_ARRLEN_ARR_IDX 0x80000000 // GT_ARR_LENGTH -- Length which feeds into an array index expression
-#define GTF_ARRLEN_NONFAULTING 0x20000000 // GT_ARR_LENGTH -- An array length operation that cannot fault. Same as GT_IND_NONFAULTING.
-
-#define GTF_SIMDASHW_OP 0x80000000 // GT_HWINTRINSIC -- Indicates that the structHandle should be gotten from gtGetStructHandleForSIMD
- // rarther than from gtGetStructHandleForHWSIMD.
-
-//---------------------------------------------------------------------
-//
-// GenTree flags stored in gtDebugFlags.
-//
-//---------------------------------------------------------------------
-
-#if defined(DEBUG)
-#define GTF_DEBUG_NONE 0x00000000 // No debug flags.
-
-#define GTF_DEBUG_NODE_MORPHED 0x00000001 // the node has been morphed (in the global morphing phase)
-#define GTF_DEBUG_NODE_SMALL 0x00000002
-#define GTF_DEBUG_NODE_LARGE 0x00000004
-#define GTF_DEBUG_NODE_CG_PRODUCED 0x00000008 // genProduceReg has been called on this node
-#define GTF_DEBUG_NODE_CG_CONSUMED 0x00000010 // genConsumeReg has been called on this node
-#define GTF_DEBUG_NODE_LSRA_ADDED 0x00000020 // This node was added by LSRA
-
-#define GTF_DEBUG_NODE_MASK 0x0000003F // These flags are all node (rather than operation) properties.
-
-#define GTF_DEBUG_VAR_CSE_REF 0x00800000 // GT_LCL_VAR -- This is a CSE LCL_VAR node
-#endif // defined(DEBUG)
-
-//---------------------------------------------------------------------
-//
-// end of GenTree flags definitions
-//
-//---------------------------------------------------------------------
-
- // clang-format on
-
GenTree* gtNext;
GenTree* gtPrev;
@@ -1755,7 +1819,7 @@ public:
inline GenTree* gtCommaAssignVal();
// Tunnel through any GT_RET_EXPRs
- inline GenTree* gtRetExprVal(unsigned __int64* pbbFlags = nullptr);
+ GenTree* gtRetExprVal(BasicBlockFlags* pbbFlags = nullptr);
inline GenTree* gtSkipPutArgType();
@@ -1781,11 +1845,11 @@ public:
var_types GetRegTypeByIndex(int regIndex);
// Returns the GTF flag equivalent for the regIndex'th register of a multi-reg node.
- unsigned int GetRegSpillFlagByIdx(int regIndex) const;
+ GenTreeFlags GetRegSpillFlagByIdx(int regIndex) const;
// Last-use information for either GenTreeLclVar or GenTreeCopyOrReload nodes.
private:
- unsigned int GetLastUseBit(int regIndex);
+ GenTreeFlags GetLastUseBit(int regIndex);
public:
bool IsLastUse(int regIndex);
@@ -2065,7 +2129,7 @@ public:
return (gtFlags & GTF_ICON_HDL_MASK) ? true : false;
}
- bool IsIconHandle(unsigned handleType) const
+ bool IsIconHandle(GenTreeFlags handleType) const
{
assert(gtOper == GT_CNS_INT);
assert((handleType & GTF_ICON_HDL_MASK) != 0); // check that handleType is one of the valid GTF_ICON_* values
@@ -2076,7 +2140,7 @@ public:
// Return just the part of the flags corresponding to the GTF_ICON_*_HDL flag. For example,
// GTF_ICON_SCOPE_HDL. The tree node must be a const int, but it might not be a handle, in which
// case we'll return zero.
- unsigned GetIconHandleFlag() const
+ GenTreeFlags GetIconHandleFlag() const
{
assert(gtOper == GT_CNS_INT);
return (gtFlags & GTF_ICON_HDL_MASK);
@@ -2112,7 +2176,7 @@ public:
#ifdef DEBUG
bool gtIsValid64RsltMul();
- static int gtDispFlags(unsigned flags, unsigned debugFlags);
+ static int gtDispFlags(GenTreeFlags flags, GenTreeDebugFlags debugFlags);
#endif
// cast operations
@@ -3251,13 +3315,13 @@ static const unsigned PACKED_GTF_SPILLED = 2;
// Return Value:
// Returns GTF_* flags associated with the register. Only GTF_SPILL and GTF_SPILLED are considered.
//
-inline unsigned GetMultiRegSpillFlagsByIdx(MultiRegSpillFlags flags, unsigned idx)
+inline GenTreeFlags GetMultiRegSpillFlagsByIdx(MultiRegSpillFlags flags, unsigned idx)
{
static_assert_no_msg(MAX_RET_REG_COUNT * 2 <= sizeof(unsigned char) * BITS_PER_BYTE);
assert(idx < MAX_RET_REG_COUNT);
- unsigned bits = flags >> (idx * 2); // It doesn't matter that we possibly leave other high bits here.
- unsigned spillFlags = 0;
+ unsigned bits = flags >> (idx * 2); // It doesn't matter that we possibly leave other high bits here.
+ GenTreeFlags spillFlags = GTF_EMPTY;
if (bits & PACKED_GTF_SPILL)
{
spillFlags |= GTF_SPILL;
@@ -3283,7 +3347,7 @@ inline unsigned GetMultiRegSpillFlagsByIdx(MultiRegSpillFlags flags, unsigned id
// Return Value:
// The new value for the node's MultiRegSpillFlags.
//
-inline MultiRegSpillFlags SetMultiRegSpillFlagsByIdx(MultiRegSpillFlags oldFlags, unsigned flagsToSet, unsigned idx)
+inline MultiRegSpillFlags SetMultiRegSpillFlagsByIdx(MultiRegSpillFlags oldFlags, GenTreeFlags flagsToSet, unsigned idx)
{
static_assert_no_msg(MAX_RET_REG_COUNT * 2 <= sizeof(unsigned char) * BITS_PER_BYTE);
assert(idx < MAX_RET_REG_COUNT);
@@ -3351,12 +3415,12 @@ public:
}
}
- unsigned GetRegSpillFlagByIdx(unsigned idx) const
+ GenTreeFlags GetRegSpillFlagByIdx(unsigned idx) const
{
return GetMultiRegSpillFlagsByIdx(gtSpillFlags, idx);
}
- void SetRegSpillFlagByIdx(unsigned flags, unsigned idx)
+ void SetRegSpillFlagByIdx(GenTreeFlags flags, unsigned idx)
{
gtSpillFlags = SetMultiRegSpillFlagsByIdx(gtSpillFlags, flags, idx);
}
@@ -3467,7 +3531,7 @@ struct GenTreeCast : public GenTreeOp
GenTreeCast(var_types type, GenTree* op, bool fromUnsigned, var_types castType DEBUGARG(bool largeNode = false))
: GenTreeOp(GT_CAST, type, op, nullptr DEBUGARG(largeNode)), gtCastType(castType)
{
- gtFlags |= fromUnsigned ? GTF_UNSIGNED : 0;
+ gtFlags |= fromUnsigned ? GTF_UNSIGNED : GTF_EMPTY;
}
#if DEBUGGABLE_GENTREE
GenTreeCast() : GenTreeOp()
@@ -3625,6 +3689,89 @@ struct GenTreeColon : public GenTreeOp
// gtCall -- method call (GT_CALL)
enum class InlineObservation;
+//------------------------------------------------------------------------
+// GenTreeCallFlags: a bitmask of flags for GenTreeCall stored in gtCallMoreFlags.
+//
+// clang-format off
+enum GenTreeCallFlags : unsigned int
+{
+ GTF_CALL_M_EMPTY = 0,
+
+ GTF_CALL_M_EXPLICIT_TAILCALL = 0x00000001, // the call is "tail" prefixed and importer has performed tail call checks
+ GTF_CALL_M_TAILCALL = 0x00000002, // the call is a tailcall
+ GTF_CALL_M_VARARGS = 0x00000004, // the call uses varargs ABI
+ GTF_CALL_M_RETBUFFARG = 0x00000008, // call has a return buffer argument
+ GTF_CALL_M_DELEGATE_INV = 0x00000010, // call to Delegate.Invoke
+ GTF_CALL_M_NOGCCHECK = 0x00000020, // not a call for computing full interruptability and therefore no GC check is required.
+ GTF_CALL_M_SPECIAL_INTRINSIC = 0x00000040, // function that could be optimized as an intrinsic
+ // in special cases. Used to optimize fast way out in morphing
+ GTF_CALL_M_UNMGD_THISCALL = 0x00000080, // "this" pointer (first argument) should be enregistered (only for GTF_CALL_UNMANAGED)
+ GTF_CALL_M_VIRTSTUB_REL_INDIRECT = 0x00000080, // the virtstub is indirected through a relative address (only for GTF_CALL_VIRT_STUB)
+ GTF_CALL_M_NONVIRT_SAME_THIS = 0x00000080, // callee "this" pointer is equal to caller this pointer (only for GTF_CALL_NONVIRT)
+ GTF_CALL_M_FRAME_VAR_DEATH = 0x00000100, // the compLvFrameListRoot variable dies here (last use)
+ GTF_CALL_M_TAILCALL_VIA_JIT_HELPER = 0x00000200, // call is a tail call dispatched via tail call JIT helper.
+
+#if FEATURE_TAILCALL_OPT
+ GTF_CALL_M_IMPLICIT_TAILCALL = 0x00000400, // call is an opportunistic tail call and importer has performed tail call checks
+ GTF_CALL_M_TAILCALL_TO_LOOP = 0x00000800, // call is a fast recursive tail call that can be converted into a loop
+#endif
+
+ GTF_CALL_M_PINVOKE = 0x00001000, // call is a pinvoke. This mirrors VM flag CORINFO_FLG_PINVOKE.
+ // A call marked as Pinvoke is not necessarily a GT_CALL_UNMANAGED. For e.g.
+ // an IL Stub dynamically generated for a PInvoke declaration is flagged as
+ // a Pinvoke but not as an unmanaged call. See impCheckForPInvokeCall() to
+ // know when these flags are set.
+
+ GTF_CALL_M_R2R_REL_INDIRECT = 0x00002000, // ready to run call is indirected through a relative address
+ GTF_CALL_M_DOES_NOT_RETURN = 0x00004000, // call does not return
+ GTF_CALL_M_WRAPPER_DELEGATE_INV = 0x00008000, // call is in wrapper delegate
+ GTF_CALL_M_FAT_POINTER_CHECK = 0x00010000, // CoreRT managed calli needs transformation, that checks
+ // special bit in calli address. If it is set, then it is necessary
+ // to restore real function address and load hidden argument
+ // as the first argument for calli. It is CoreRT replacement for instantiating
+ // stubs, because executable code cannot be generated at runtime.
+ GTF_CALL_M_HELPER_SPECIAL_DCE = 0x00020000, // this helper call can be removed if it is part of a comma and
+ // the comma result is unused.
+ GTF_CALL_M_DEVIRTUALIZED = 0x00040000, // this call was devirtualized
+ GTF_CALL_M_UNBOXED = 0x00080000, // this call was optimized to use the unboxed entry point
+ GTF_CALL_M_GUARDED_DEVIRT = 0x00100000, // this call is a candidate for guarded devirtualization
+ GTF_CALL_M_GUARDED_DEVIRT_CHAIN = 0x00200000, // this call is a candidate for chained guarded devirtualization
+ GTF_CALL_M_GUARDED = 0x00400000, // this call was transformed by guarded devirtualization
+ GTF_CALL_M_ALLOC_SIDE_EFFECTS = 0x00800000, // this is a call to an allocator with side effects
+ GTF_CALL_M_SUPPRESS_GC_TRANSITION = 0x01000000, // suppress the GC transition (i.e. during a pinvoke) but a separate GC safe point is required.
+ GTF_CALL_M_EXP_RUNTIME_LOOKUP = 0x02000000, // this call needs to be tranformed into CFG for the dynamic dictionary expansion feature.
+ GTF_CALL_M_STRESS_TAILCALL = 0x04000000, // the call is NOT "tail" prefixed but GTF_CALL_M_EXPLICIT_TAILCALL was added because of tail call stress mode
+ GTF_CALL_M_EXPANDED_EARLY = 0x08000000, // the Virtual Call target address is expanded and placed in gtControlExpr in Morph rather than in Lower
+
+};
+
+inline constexpr GenTreeCallFlags operator ~(GenTreeCallFlags a)
+{
+ return (GenTreeCallFlags)(~(unsigned int)a);
+}
+
+inline constexpr GenTreeCallFlags operator |(GenTreeCallFlags a, GenTreeCallFlags b)
+{
+ return (GenTreeCallFlags)((unsigned int)a | (unsigned int)b);
+}
+
+inline constexpr GenTreeCallFlags operator &(GenTreeCallFlags a, GenTreeCallFlags b)
+{
+ return (GenTreeCallFlags)((unsigned int)a & (unsigned int)b);
+}
+
+inline GenTreeCallFlags& operator |=(GenTreeCallFlags& a, GenTreeCallFlags b)
+{
+ return a = (GenTreeCallFlags)((unsigned int)a | (unsigned int)b);
+}
+
+inline GenTreeCallFlags& operator &=(GenTreeCallFlags& a, GenTreeCallFlags b)
+{
+ return a = (GenTreeCallFlags)((unsigned int)a & (unsigned int)b);
+}
+
+// clang-format on
+
// Return type descriptor of a GT_CALL node.
// x64 Unix, Arm64, Arm32 and x86 allow a value to be returned in multiple
// registers. For such calls this struct provides the following info
@@ -4131,17 +4278,17 @@ struct GenTreeCall final : public GenTree
// Get reg mask of all the valid registers of gtOtherRegs array
regMaskTP GetOtherRegMask() const;
- unsigned GetRegSpillFlagByIdx(unsigned idx) const
+ GenTreeFlags GetRegSpillFlagByIdx(unsigned idx) const
{
#if FEATURE_MULTIREG_RET
return GetMultiRegSpillFlagsByIdx(gtSpillFlags, idx);
#else
assert(!"unreached");
- return 0;
+ return GTF_EMPTY;
#endif
}
- void SetRegSpillFlagByIdx(unsigned flags, unsigned idx)
+ void SetRegSpillFlagByIdx(GenTreeFlags flags, unsigned idx)
{
#if FEATURE_MULTIREG_RET
gtSpillFlags = SetMultiRegSpillFlagsByIdx(gtSpillFlags, flags, idx);
@@ -4180,62 +4327,6 @@ struct GenTreeCall final : public GenTree
#endif
}
-// clang-format off
-
-#define GTF_CALL_M_EXPLICIT_TAILCALL 0x00000001 // GT_CALL -- the call is "tail" prefixed and
- // importer has performed tail call checks
-#define GTF_CALL_M_TAILCALL 0x00000002 // GT_CALL -- the call is a tailcall
-#define GTF_CALL_M_VARARGS 0x00000004 // GT_CALL -- the call uses varargs ABI
-#define GTF_CALL_M_RETBUFFARG 0x00000008 // GT_CALL -- call has a return buffer argument
-#define GTF_CALL_M_DELEGATE_INV 0x00000010 // GT_CALL -- call to Delegate.Invoke
-#define GTF_CALL_M_NOGCCHECK 0x00000020 // GT_CALL -- not a call for computing full interruptability and therefore no GC check is required.
-#define GTF_CALL_M_SPECIAL_INTRINSIC 0x00000040 // GT_CALL -- function that could be optimized as an intrinsic
- // in special cases. Used to optimize fast way out in morphing
-#define GTF_CALL_M_UNMGD_THISCALL 0x00000080 // GT_CALL -- "this" pointer (first argument)
- // should be enregistered (only for GTF_CALL_UNMANAGED)
-#define GTF_CALL_M_VIRTSTUB_REL_INDIRECT 0x00000080 // the virtstub is indirected through
- // a relative address (only for GTF_CALL_VIRT_STUB)
-#define GTF_CALL_M_NONVIRT_SAME_THIS 0x00000080 // GT_CALL -- callee "this" pointer is
- // equal to caller this pointer (only for GTF_CALL_NONVIRT)
-#define GTF_CALL_M_FRAME_VAR_DEATH 0x00000100 // GT_CALL -- the compLvFrameListRoot variable dies here (last use)
-#define GTF_CALL_M_TAILCALL_VIA_JIT_HELPER 0x00000200 // GT_CALL -- call is a tail call dispatched via tail call JIT helper.
-
-#if FEATURE_TAILCALL_OPT
-#define GTF_CALL_M_IMPLICIT_TAILCALL 0x00000400 // GT_CALL -- call is an opportunistic
- // tail call and importer has performed tail call checks
-#define GTF_CALL_M_TAILCALL_TO_LOOP 0x00000800 // GT_CALL -- call is a fast recursive tail call
- // that can be converted into a loop
-#endif
-
-#define GTF_CALL_M_PINVOKE 0x00001000 // GT_CALL -- call is a pinvoke. This mirrors VM flag CORINFO_FLG_PINVOKE.
- // A call marked as Pinvoke is not necessarily a GT_CALL_UNMANAGED. For e.g.
- // an IL Stub dynamically generated for a PInvoke declaration is flagged as
- // a Pinvoke but not as an unmanaged call. See impCheckForPInvokeCall() to
- // know when these flags are set.
-
-#define GTF_CALL_M_R2R_REL_INDIRECT 0x00002000 // GT_CALL -- ready to run call is indirected through a relative address
-#define GTF_CALL_M_DOES_NOT_RETURN 0x00004000 // GT_CALL -- call does not return
-#define GTF_CALL_M_WRAPPER_DELEGATE_INV 0x00008000 // GT_CALL -- call is in wrapper delegate
-#define GTF_CALL_M_FAT_POINTER_CHECK 0x00010000 // GT_CALL -- CoreRT managed calli needs transformation, that checks
- // special bit in calli address. If it is set, then it is necessary
- // to restore real function address and load hidden argument
- // as the first argument for calli. It is CoreRT replacement for instantiating
- // stubs, because executable code cannot be generated at runtime.
-#define GTF_CALL_M_HELPER_SPECIAL_DCE 0x00020000 // GT_CALL -- this helper call can be removed if it is part of a comma and
- // the comma result is unused.
-#define GTF_CALL_M_DEVIRTUALIZED 0x00040000 // GT_CALL -- this call was devirtualized
-#define GTF_CALL_M_UNBOXED 0x00080000 // GT_CALL -- this call was optimized to use the unboxed entry point
-#define GTF_CALL_M_GUARDED_DEVIRT 0x00100000 // GT_CALL -- this call is a candidate for guarded devirtualization
-#define GTF_CALL_M_GUARDED_DEVIRT_CHAIN 0x00200000 // GT_CALL -- this call is a candidate for chained guarded devirtualization
-#define GTF_CALL_M_GUARDED 0x00400000 // GT_CALL -- this call was transformed by guarded devirtualization
-#define GTF_CALL_M_ALLOC_SIDE_EFFECTS 0x00800000 // GT_CALL -- this is a call to an allocator with side effects
-#define GTF_CALL_M_SUPPRESS_GC_TRANSITION 0x01000000 // GT_CALL -- suppress the GC transition (i.e. during a pinvoke) but a separate GC safe point is required.
-#define GTF_CALL_M_EXP_RUNTIME_LOOKUP 0x02000000 // GT_CALL -- this call needs to be tranformed into CFG for the dynamic dictionary expansion feature.
-#define GTF_CALL_M_STRESS_TAILCALL 0x04000000 // GT_CALL -- the call is NOT "tail" prefixed but GTF_CALL_M_EXPLICIT_TAILCALL was added because of tail call stress mode
-#define GTF_CALL_M_EXPANDED_EARLY 0x08000000 // GT_CALL -- the Virtual Call target address is expanded and placed in gtControlExpr in Morph rather than in Lower
-
- // clang-format on
-
bool IsUnmanaged() const
{
return (gtFlags & GTF_CALL_UNMANAGED) != 0;
@@ -4556,7 +4647,7 @@ struct GenTreeCall final : public GenTree
void ResetArgInfo();
- unsigned gtCallMoreFlags; // in addition to gtFlags
+ GenTreeCallFlags gtCallMoreFlags; // in addition to gtFlags
unsigned char gtCallType : 3; // value from the gtCallTypes enumeration
unsigned char gtReturnType : 5; // exact return type
@@ -4700,12 +4791,12 @@ struct GenTreeMultiRegOp : public GenTreeOp
return gtOtherReg;
}
- unsigned GetRegSpillFlagByIdx(unsigned idx) const
+ GenTreeFlags GetRegSpillFlagByIdx(unsigned idx) const
{
return GetMultiRegSpillFlagsByIdx(gtSpillFlags, idx);
}
- void SetRegSpillFlagByIdx(unsigned flags, unsigned idx)
+ void SetRegSpillFlagByIdx(GenTreeFlags flags, unsigned idx)
{
#if FEATURE_MULTIREG_RET
gtSpillFlags = SetMultiRegSpillFlagsByIdx(gtSpillFlags, flags, idx);
@@ -5782,7 +5873,7 @@ struct GenTreeRetExpr : public GenTree
{
GenTree* gtInlineCandidate;
- unsigned __int64 bbFlags;
+ BasicBlockFlags bbFlags;
CORINFO_CLASS_HANDLE gtRetClsHnd;
@@ -6353,12 +6444,12 @@ struct GenTreePutArgSplit : public GenTreePutArgStk
}
}
- unsigned GetRegSpillFlagByIdx(unsigned idx) const
+ GenTreeFlags GetRegSpillFlagByIdx(unsigned idx) const
{
return GetMultiRegSpillFlagsByIdx(gtSpillFlags, idx);
}
- void SetRegSpillFlagByIdx(unsigned flags, unsigned idx)
+ void SetRegSpillFlagByIdx(GenTreeFlags flags, unsigned idx)
{
#if FEATURE_MULTIREG_RET
gtSpillFlags = SetMultiRegSpillFlagsByIdx(gtSpillFlags, flags, idx);
@@ -7268,44 +7359,6 @@ inline GenTree* GenTree::gtCommaAssignVal()
}
//-------------------------------------------------------------------------
-// gtRetExprVal - walk back through GT_RET_EXPRs
-//
-// Arguments:
-// pbbFlags - out-parameter that is set to the flags of the basic block
-// containing the inlinee return value. The value is 0
-// for unsuccessful inlines.
-//
-// Returns:
-// tree representing return value from a successful inline,
-// or original call for failed or yet to be determined inline.
-//
-// Notes:
-// Multi-level inlines can form chains of GT_RET_EXPRs.
-// This method walks back to the root of the chain.
-
-inline GenTree* GenTree::gtRetExprVal(unsigned __int64* pbbFlags /* = nullptr */)
-{
- GenTree* retExprVal = this;
- unsigned __int64 bbFlags = 0;
-
- assert(!retExprVal->OperIs(GT_PUTARG_TYPE));
-
- while (retExprVal->OperIs(GT_RET_EXPR))
- {
- const GenTreeRetExpr* retExpr = retExprVal->AsRetExpr();
- bbFlags = retExpr->bbFlags;
- retExprVal = retExpr->gtInlineCandidate;
- }
-
- if (pbbFlags != nullptr)
- {
- *pbbFlags = bbFlags;
- }
-
- return retExprVal;
-}
-
-//-------------------------------------------------------------------------
// gtSkipPutArgType - skip PUTARG_TYPE if it is presented.
//
// Returns:
@@ -7620,12 +7673,12 @@ inline var_types GenTree::GetRegTypeByIndex(int regIndex)
// This must be a multireg node and 'regIndex' must be a valid index for this node.
// This method returns the GTF "equivalent" flags based on the packed flags on the multireg node.
//
-inline unsigned int GenTree::GetRegSpillFlagByIdx(int regIndex) const
+inline GenTreeFlags GenTree::GetRegSpillFlagByIdx(int regIndex) const
{
#if FEATURE_MULTIREG_RET
if (IsMultiRegCall())
{
- return AsCall()->AsCall()->GetRegSpillFlagByIdx(regIndex);
+ return AsCall()->GetRegSpillFlagByIdx(regIndex);
}
#if FEATURE_ARG_SPLIT
@@ -7634,6 +7687,7 @@ inline unsigned int GenTree::GetRegSpillFlagByIdx(int regIndex) const
return AsPutArgSplit()->GetRegSpillFlagByIdx(regIndex);
}
#endif
+
#if !defined(TARGET_64BIT)
if (OperIsMultiRegOp())
{
@@ -7643,22 +7697,13 @@ inline unsigned int GenTree::GetRegSpillFlagByIdx(int regIndex) const
#endif // FEATURE_MULTIREG_RET
-#if defined(TARGET_XARCH) && defined(FEATURE_HW_INTRINSICS)
- if (OperIs(GT_HWINTRINSIC))
- {
- // At this time, the only multi-reg HW intrinsics all return the type of their
- // arguments. If this changes, we will need a way to record or determine this.
- assert(TypeGet() == TYP_STRUCT);
- return gtGetOp1()->TypeGet();
- }
-#endif
if (OperIs(GT_LCL_VAR, GT_STORE_LCL_VAR))
{
return AsLclVar()->GetRegSpillFlagByIdx(regIndex);
}
assert(!"Invalid node type for GetRegSpillFlagByIdx");
- return TYP_UNDEF;
+ return GTF_EMPTY;
}
//-----------------------------------------------------------------------------------
@@ -7673,12 +7718,12 @@ inline unsigned int GenTree::GetRegSpillFlagByIdx(int regIndex) const
// Notes:
// This must be a GenTreeLclVar or GenTreeCopyOrReload node.
//
-inline unsigned int GenTree::GetLastUseBit(int regIndex)
+inline GenTreeFlags GenTree::GetLastUseBit(int regIndex)
{
assert(regIndex < 4);
assert(OperIs(GT_LCL_VAR, GT_STORE_LCL_VAR, GT_COPY, GT_RELOAD));
static_assert_no_msg((1 << MULTIREG_LAST_USE_SHIFT) == GTF_VAR_MULTIREG_DEATH0);
- return (1 << (MULTIREG_LAST_USE_SHIFT + regIndex));
+ return (GenTreeFlags)(1 << (MULTIREG_LAST_USE_SHIFT + regIndex));
}
//-----------------------------------------------------------------------------------
diff --git a/src/coreclr/jit/importer.cpp b/src/coreclr/jit/importer.cpp
index 636ee3e91b2..09ee9f21aaf 100644
--- a/src/coreclr/jit/importer.cpp
+++ b/src/coreclr/jit/importer.cpp
@@ -1279,8 +1279,8 @@ GenTree* Compiler::impAssignStructPtr(GenTree* destAddr,
BasicBlock* block /* = NULL */
)
{
- GenTree* dest = nullptr;
- unsigned destFlags = 0;
+ GenTree* dest = nullptr;
+ GenTreeFlags destFlags = GTF_EMPTY;
if (ilOffset == BAD_IL_OFFSET)
{
@@ -2025,7 +2025,7 @@ GenTree* Compiler::impTokenToHandle(CORINFO_RESOLVED_TOKEN* pResolvedToken,
GenTree* Compiler::impLookupToTree(CORINFO_RESOLVED_TOKEN* pResolvedToken,
CORINFO_LOOKUP* pLookup,
- unsigned handleFlags,
+ GenTreeFlags handleFlags,
void* compileTimeHandle)
{
if (!pLookup->lookupKind.needsRuntimeLookup)
@@ -2086,7 +2086,7 @@ GenTree* Compiler::impLookupToTree(CORINFO_RESOLVED_TOKEN* pResolvedToken,
#ifdef FEATURE_READYTORUN_COMPILER
GenTree* Compiler::impReadyToRunLookupToTree(CORINFO_CONST_LOOKUP* pLookup,
- unsigned handleFlags,
+ GenTreeFlags handleFlags,
void* compileTimeHandle)
{
CORINFO_GENERIC_HANDLE handle = nullptr;
@@ -7566,7 +7566,7 @@ GenTree* Compiler::impImportStaticFieldAccess(CORINFO_RESOLVED_TOKEN* pResolvedT
#ifdef FEATURE_READYTORUN_COMPILER
if (opts.IsReadyToRun())
{
- unsigned callFlags = 0;
+ GenTreeFlags callFlags = GTF_EMPTY;
if (info.compCompHnd->getClassAttribs(pResolvedToken->hClass) & CORINFO_FLG_BEFOREFIELDINIT)
{
@@ -7604,7 +7604,7 @@ GenTree* Compiler::impImportStaticFieldAccess(CORINFO_RESOLVED_TOKEN* pResolvedT
GenTree* ctxTree = getRuntimeContextTree(kind.runtimeLookupKind);
GenTreeCall::Use* args = gtNewCallArgs(ctxTree);
- unsigned callFlags = 0;
+ GenTreeFlags callFlags = GTF_EMPTY;
if (info.compCompHnd->getClassAttribs(pResolvedToken->hClass) & CORINFO_FLG_BEFOREFIELDINIT)
{
diff --git a/src/coreclr/jit/lclmorph.cpp b/src/coreclr/jit/lclmorph.cpp
index 2da24ba75ba..db3e134b607 100644
--- a/src/coreclr/jit/lclmorph.cpp
+++ b/src/coreclr/jit/lclmorph.cpp
@@ -872,7 +872,7 @@ private:
}
// Local address nodes never have side effects (nor any other flags, at least at this point).
- addr->gtFlags = 0;
+ addr->gtFlags = GTF_EMPTY;
INDEBUG(m_stmtModified = true;)
}
@@ -1044,7 +1044,7 @@ private:
return;
}
- unsigned flags = 0;
+ GenTreeFlags flags = GTF_EMPTY;
if ((user != nullptr) && user->OperIs(GT_ASG) && (user->AsOp()->gtGetOp1() == indir))
{
diff --git a/src/coreclr/jit/lower.cpp b/src/coreclr/jit/lower.cpp
index 1a486007226..66d5b2abfe4 100644
--- a/src/coreclr/jit/lower.cpp
+++ b/src/coreclr/jit/lower.cpp
@@ -2800,19 +2800,19 @@ GenTree* Lowering::LowerJTrue(GenTreeOp* jtrue)
if ((relop->gtNext == jtrue) && relopOp2->IsCnsIntOrI())
{
- bool useJCMP = false;
- unsigned flags = 0;
+ bool useJCMP = false;
+ GenTreeFlags flags = GTF_EMPTY;
if (relop->OperIs(GT_EQ, GT_NE) && relopOp2->IsIntegralConst(0))
{
// Codegen will use cbz or cbnz in codegen which do not affect the flag register
- flags = relop->OperIs(GT_EQ) ? GTF_JCMP_EQ : 0;
+ flags = relop->OperIs(GT_EQ) ? GTF_JCMP_EQ : GTF_EMPTY;
useJCMP = true;
}
else if (relop->OperIs(GT_TEST_EQ, GT_TEST_NE) && isPow2(relopOp2->AsIntCon()->IconValue()))
{
// Codegen will use tbz or tbnz in codegen which do not affect the flag register
- flags = GTF_JCMP_TST | (relop->OperIs(GT_TEST_EQ) ? GTF_JCMP_EQ : 0);
+ flags = GTF_JCMP_TST | (relop->OperIs(GT_TEST_EQ) ? GTF_JCMP_EQ : GTF_EMPTY);
useJCMP = true;
}
diff --git a/src/coreclr/jit/morph.cpp b/src/coreclr/jit/morph.cpp
index a241aac2464..4e6fe4cdac5 100644
--- a/src/coreclr/jit/morph.cpp
+++ b/src/coreclr/jit/morph.cpp
@@ -71,7 +71,7 @@ GenTree* Compiler::fgMorphIntoHelperCall(GenTree* tree, int helper, GenTreeCall:
call->gtCallLateArgs = nullptr;
call->fgArgInfo = nullptr;
call->gtRetClsHnd = nullptr;
- call->gtCallMoreFlags = 0;
+ call->gtCallMoreFlags = GTF_CALL_M_EMPTY;
call->gtInlineCandidateInfo = nullptr;
call->gtControlExpr = nullptr;
@@ -3663,7 +3663,7 @@ GenTreeCall* Compiler::fgMorphArgs(GenTreeCall* call)
GenTreeCall::Use* args;
GenTree* argx;
- unsigned flagsSummary = 0;
+ GenTreeFlags flagsSummary = GTF_EMPTY;
unsigned argIndex = 0;
@@ -4267,8 +4267,8 @@ GenTreeCall* Compiler::fgMorphArgs(GenTreeCall* call)
//
void Compiler::fgMorphMultiregStructArgs(GenTreeCall* call)
{
- bool foundStructArg = false;
- unsigned flagsSummary = 0;
+ bool foundStructArg = false;
+ GenTreeFlags flagsSummary = GTF_EMPTY;
#ifdef TARGET_X86
assert(!"Logic error: no MultiregStructArgs for X86");
@@ -8358,7 +8358,7 @@ GenTree* Compiler::fgCreateCallDispatcherAndGetResult(GenTreeCall* orig
//
GenTree* Compiler::getLookupTree(CORINFO_RESOLVED_TOKEN* pResolvedToken,
CORINFO_LOOKUP* pLookup,
- unsigned handleFlags,
+ GenTreeFlags handleFlags,
void* compileTimeHandle)
{
if (!pLookup->lookupKind.needsRuntimeLookup)
@@ -14790,12 +14790,12 @@ DONE_MORPHING_CHILDREN:
// TBD: this transformation is currently necessary for correctness -- it might
// be good to analyze the failures that result if we don't do this, and fix them
// in other ways. Ideally, this should be optional.
- GenTree* commaNode = op1;
- unsigned treeFlags = tree->gtFlags;
- commaNode->gtType = typ;
- commaNode->gtFlags = (treeFlags & ~GTF_REVERSE_OPS); // Bashing the GT_COMMA flags here is
- // dangerous, clear the GTF_REVERSE_OPS at
- // least.
+ GenTree* commaNode = op1;
+ GenTreeFlags treeFlags = tree->gtFlags;
+ commaNode->gtType = typ;
+ commaNode->gtFlags = (treeFlags & ~GTF_REVERSE_OPS); // Bashing the GT_COMMA flags here is
+ // dangerous, clear the GTF_REVERSE_OPS at
+ // least.
#ifdef DEBUG
commaNode->gtDebugFlags |= GTF_DEBUG_NODE_MORPHED;
#endif
@@ -17747,8 +17747,8 @@ void Compiler::fgExpandQmarkForCastInstOf(BasicBlock* block, Statement* stmt)
// if they are going to be cleared by fgSplitBlockAfterStatement(). We currently only do this only
// for the GC safe point bit, the logic being that if 'block' was marked gcsafe, then surely
// remainderBlock will still be GC safe.
- unsigned propagateFlags = block->bbFlags & BBF_GC_SAFE_POINT;
- BasicBlock* remainderBlock = fgSplitBlockAfterStatement(block, stmt);
+ BasicBlockFlags propagateFlags = block->bbFlags & BBF_GC_SAFE_POINT;
+ BasicBlock* remainderBlock = fgSplitBlockAfterStatement(block, stmt);
fgRemoveRefPred(remainderBlock, block); // We're going to put more blocks between block and remainderBlock.
BasicBlock* helperBlock = fgNewBBafter(BBJ_NONE, block, true);
@@ -17925,8 +17925,8 @@ void Compiler::fgExpandQmarkStmt(BasicBlock* block, Statement* stmt)
// if they are going to be cleared by fgSplitBlockAfterStatement(). We currently only do this only
// for the GC safe point bit, the logic being that if 'block' was marked gcsafe, then surely
// remainderBlock will still be GC safe.
- unsigned propagateFlags = block->bbFlags & BBF_GC_SAFE_POINT;
- BasicBlock* remainderBlock = fgSplitBlockAfterStatement(block, stmt);
+ BasicBlockFlags propagateFlags = block->bbFlags & BBF_GC_SAFE_POINT;
+ BasicBlock* remainderBlock = fgSplitBlockAfterStatement(block, stmt);
fgRemoveRefPred(remainderBlock, block); // We're going to put more blocks between block and remainderBlock.
BasicBlock* condBlock = fgNewBBafter(BBJ_COND, block, true);
@@ -19426,9 +19426,9 @@ bool Compiler::fgCanTailCallViaJitHelper()
#endif
}
-static const int numberOfTrackedFlags = 5;
-static const unsigned trackedFlags[numberOfTrackedFlags] = {GTF_ASG, GTF_CALL, GTF_EXCEPT, GTF_GLOB_REF,
- GTF_ORDER_SIDEEFF};
+static const int numberOfTrackedFlags = 5;
+static const GenTreeFlags trackedFlags[numberOfTrackedFlags] = {GTF_ASG, GTF_CALL, GTF_EXCEPT, GTF_GLOB_REF,
+ GTF_ORDER_SIDEEFF};
//------------------------------------------------------------------------
// fgMorphArgList: morph argument list tree without recursion.
diff --git a/src/coreclr/jit/optimizer.cpp b/src/coreclr/jit/optimizer.cpp
index 90aaf9e2e94..cc324bfd8cf 100644
--- a/src/coreclr/jit/optimizer.cpp
+++ b/src/coreclr/jit/optimizer.cpp
@@ -1157,7 +1157,7 @@ bool Compiler::optRecordLoop(BasicBlock* head,
optLoopTable[loopInd].lpAsgVars = AllVarSetOps::UninitVal();
- optLoopTable[loopInd].lpFlags = 0;
+ optLoopTable[loopInd].lpFlags = LPFLG_EMPTY;
// We haven't yet recorded any side effects.
for (MemoryKind memoryKind : allMemoryKinds())
diff --git a/src/coreclr/jit/rangecheck.cpp b/src/coreclr/jit/rangecheck.cpp
index b0a29684202..a8beca89376 100644
--- a/src/coreclr/jit/rangecheck.cpp
+++ b/src/coreclr/jit/rangecheck.cpp
@@ -213,8 +213,8 @@ void RangeCheck::OptimizeRangeCheck(BasicBlock* block, Statement* stmt, GenTree*
if (m_pCompiler->vnStore->IsVNConstant(arrLenVn))
{
- ssize_t constVal = -1;
- unsigned iconFlags = 0;
+ ssize_t constVal = -1;
+ GenTreeFlags iconFlags = GTF_EMPTY;
if (m_pCompiler->optIsTreeKnownIntValue(true, bndsChk->gtArrLen, &constVal, &iconFlags))
{
@@ -249,8 +249,8 @@ void RangeCheck::OptimizeRangeCheck(BasicBlock* block, Statement* stmt, GenTree*
JITDUMP("ArrSize for lengthVN:%03X = %d\n", arrLenVn, arrSize);
if (m_pCompiler->vnStore->IsVNConstant(idxVn) && (arrSize > 0))
{
- ssize_t idxVal = -1;
- unsigned iconFlags = 0;
+ ssize_t idxVal = -1;
+ GenTreeFlags iconFlags = GTF_EMPTY;
if (!m_pCompiler->optIsTreeKnownIntValue(true, treeIndex, &idxVal, &iconFlags))
{
return;
diff --git a/src/coreclr/jit/rationalize.cpp b/src/coreclr/jit/rationalize.cpp
index 52f1cd4a69a..b8dd797f3a0 100644
--- a/src/coreclr/jit/rationalize.cpp
+++ b/src/coreclr/jit/rationalize.cpp
@@ -37,7 +37,7 @@ genTreeOps addrForm(genTreeOps loadForm)
}
// copy the flags determined by mask from src to dst
-void copyFlags(GenTree* dst, GenTree* src, unsigned mask)
+void copyFlags(GenTree* dst, GenTree* src, GenTreeFlags mask)
{
dst->gtFlags &= ~mask;
dst->gtFlags |= (src->gtFlags & mask);
diff --git a/src/coreclr/jit/regset.cpp b/src/coreclr/jit/regset.cpp
index 1a7816d6ca2..00c8a35c74f 100644
--- a/src/coreclr/jit/regset.cpp
+++ b/src/coreclr/jit/regset.cpp
@@ -353,7 +353,7 @@ void RegSet::rsSpillTree(regNumber reg, GenTree* tree, unsigned regIdx /* =0 */)
// The spill flag on the node should be cleared by the caller of this method.
assert((tree->gtFlags & GTF_SPILL) != 0);
- unsigned regFlags = 0;
+ GenTreeFlags regFlags = GTF_EMPTY;
if (call != nullptr)
{
regFlags = call->GetRegSpillFlagByIdx(regIdx);
@@ -559,7 +559,7 @@ TempDsc* RegSet::rsUnspillInPlace(GenTree* tree, regNumber oldReg, unsigned regI
if (tree->IsMultiRegCall())
{
GenTreeCall* call = tree->AsCall();
- unsigned flags = call->GetRegSpillFlagByIdx(regIdx);
+ GenTreeFlags flags = call->GetRegSpillFlagByIdx(regIdx);
flags &= ~GTF_SPILLED;
call->SetRegSpillFlagByIdx(flags, regIdx);
}
@@ -567,14 +567,14 @@ TempDsc* RegSet::rsUnspillInPlace(GenTree* tree, regNumber oldReg, unsigned regI
else if (tree->OperIsPutArgSplit())
{
GenTreePutArgSplit* splitArg = tree->AsPutArgSplit();
- unsigned flags = splitArg->GetRegSpillFlagByIdx(regIdx);
+ GenTreeFlags flags = splitArg->GetRegSpillFlagByIdx(regIdx);
flags &= ~GTF_SPILLED;
splitArg->SetRegSpillFlagByIdx(flags, regIdx);
}
else if (tree->OperIsMultiRegOp())
{
GenTreeMultiRegOp* multiReg = tree->AsMultiRegOp();
- unsigned flags = multiReg->GetRegSpillFlagByIdx(regIdx);
+ GenTreeFlags flags = multiReg->GetRegSpillFlagByIdx(regIdx);
flags &= ~GTF_SPILLED;
multiReg->SetRegSpillFlagByIdx(flags, regIdx);
}
@@ -582,7 +582,7 @@ TempDsc* RegSet::rsUnspillInPlace(GenTree* tree, regNumber oldReg, unsigned regI
else if (tree->IsMultiRegLclVar())
{
GenTreeLclVar* lcl = tree->AsLclVar();
- unsigned flags = lcl->GetRegSpillFlagByIdx(regIdx);
+ GenTreeFlags flags = lcl->GetRegSpillFlagByIdx(regIdx);
flags &= ~GTF_SPILLED;
lcl->SetRegSpillFlagByIdx(flags, regIdx);
}
diff --git a/src/coreclr/jit/treelifeupdater.cpp b/src/coreclr/jit/treelifeupdater.cpp
index d81ce887218..20a9745362b 100644
--- a/src/coreclr/jit/treelifeupdater.cpp
+++ b/src/coreclr/jit/treelifeupdater.cpp
@@ -50,9 +50,9 @@ bool TreeLifeUpdater<ForCodeGen>::UpdateLifeFieldVar(GenTreeLclVar* lclNode, uns
bool isBorn = ((lclNode->gtFlags & GTF_VAR_DEF) != 0);
bool isDying = !isBorn && lclNode->IsLastUse(multiRegIndex);
// GTF_SPILL will be set if any registers need to be spilled.
- unsigned spillFlags = (lclNode->gtFlags & lclNode->GetRegSpillFlagByIdx(multiRegIndex));
- bool spill = ((spillFlags & GTF_SPILL) != 0);
- bool isInMemory = false;
+ GenTreeFlags spillFlags = (lclNode->gtFlags & lclNode->GetRegSpillFlagByIdx(multiRegIndex));
+ bool spill = ((spillFlags & GTF_SPILL) != 0);
+ bool isInMemory = false;
if (isBorn || isDying)
{
diff --git a/src/coreclr/jit/valuenum.cpp b/src/coreclr/jit/valuenum.cpp
index 85dacfc1867..f49d259cdb3 100644
--- a/src/coreclr/jit/valuenum.cpp
+++ b/src/coreclr/jit/valuenum.cpp
@@ -1711,7 +1711,7 @@ ValueNum ValueNumStore::VNForCastOper(var_types castToType, bool srcIsUnsigned /
return result;
}
-ValueNum ValueNumStore::VNForHandle(ssize_t cnsVal, unsigned handleFlags)
+ValueNum ValueNumStore::VNForHandle(ssize_t cnsVal, GenTreeFlags handleFlags)
{
assert((handleFlags & ~GTF_ICON_HDL_MASK) == 0);
@@ -4217,7 +4217,7 @@ bool ValueNumStore::IsVNInt32Constant(ValueNum vn)
return TypeOfVN(vn) == TYP_INT;
}
-unsigned ValueNumStore::GetHandleFlags(ValueNum vn)
+GenTreeFlags ValueNumStore::GetHandleFlags(ValueNum vn)
{
assert(IsVNHandle(vn));
Chunk* c = m_chunks.GetNoExpand(GetChunkNum(vn));
@@ -6358,8 +6358,8 @@ void Compiler::fgValueNumberBlock(BasicBlock* blk)
}
else
{
- newMemoryVN =
- vnStore->VNForFunc(TYP_REF, VNF_PhiMemoryDef, vnStore->VNForHandle(ssize_t(blk), 0), phiAppVN);
+ newMemoryVN = vnStore->VNForFunc(TYP_REF, VNF_PhiMemoryDef,
+ vnStore->VNForHandle(ssize_t(blk), GTF_EMPTY), phiAppVN);
}
}
GetMemoryPerSsaData(blk->bbMemorySsaNumIn[memoryKind])->m_vnPair.SetLiberal(newMemoryVN);
diff --git a/src/coreclr/jit/valuenum.h b/src/coreclr/jit/valuenum.h
index 527d6f83535..d0623a5f204 100644
--- a/src/coreclr/jit/valuenum.h
+++ b/src/coreclr/jit/valuenum.h
@@ -285,7 +285,7 @@ public:
// We keep handle values in a separate pool, so we don't confuse a handle with an int constant
// that happens to be the same...
- ValueNum VNForHandle(ssize_t cnsVal, unsigned iconFlags);
+ ValueNum VNForHandle(ssize_t cnsVal, GenTreeFlags iconFlags);
// And the single constant for an object reference type.
static ValueNum VNForNull()
@@ -710,7 +710,7 @@ public:
void GetCompareCheckedBoundArithInfo(ValueNum vn, CompareCheckedBoundArithInfo* info);
// Returns the flags on the current handle. GTF_ICON_SCOPE_HDL for example.
- unsigned GetHandleFlags(ValueNum vn);
+ GenTreeFlags GetHandleFlags(ValueNum vn);
// Returns true iff the VN represents a handle constant.
bool IsVNHandle(ValueNum vn);
@@ -981,10 +981,10 @@ private:
struct VNHandle : public JitKeyFuncsDefEquals<VNHandle>
{
- ssize_t m_cnsVal;
- unsigned m_flags;
+ ssize_t m_cnsVal;
+ GenTreeFlags m_flags;
// Don't use a constructor to use the default copy constructor for hashtable rehash.
- static void Initialize(VNHandle* handle, ssize_t m_cnsVal, unsigned m_flags)
+ static void Initialize(VNHandle* handle, ssize_t m_cnsVal, GenTreeFlags m_flags)
{
handle->m_cnsVal = m_cnsVal;
handle->m_flags = m_flags;