Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/dotnet/runtime.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSingleAccretion <62474226+SingleAccretion@users.noreply.github.com>2022-11-11 02:11:41 +0300
committerGitHub <noreply@github.com>2022-11-11 02:11:41 +0300
commit78322ae89f4e2be735520bcb34f8a6eb8a769295 (patch)
tree930425781aa3dbf3a5c1551772954db4545dcb80
parent37ef5fbefcb8f6f33bdb25df8f7a30164fc3700b (diff)
Introduce `FIELD_ADDR` and use it for TLS statics and instance class fields (#77353)
* Introduce GT_FIELD_ADDR * Add FIELD_ADDR to fgAddrCouldBeNull * Add gtNewFieldAddrNode; move gtNewFieldRef * Implement Windows x86 TLS via FIELD_ADDR Tested manually to work as well as it did before. * Silence the IR checker * Minor code cleanup * Prepare morph for instance FIELD_ADDRs * Fix ObjectAllocator * Use FIELD_ADDR in ld[s]flda import * Work around morphing issues With NativeAOT, we can have FIELD_ADDR nodes that are effectively NOPs (those for the method table pointer field). This means not all operators that the expansion produces will be simple (in fact, they can be more or less arbitrary). This means we cannot simply call "fgMorphSmpOp" as we used to. Unfortunately, we cannot just call "fgMorphTree" either, because it propagates assertions on newly created (or, I suppose, bashed) IND nodes. This creates lots of new cases where GTF_ORDER_SIDEEFF is applied to these nodes, affecting CQ. Work around this by calling "fgMorphTree" for non-simple operators only.
-rw-r--r--src/coreclr/jit/compiler.cpp4
-rw-r--r--src/coreclr/jit/compiler.h11
-rw-r--r--src/coreclr/jit/compiler.hpp62
-rw-r--r--src/coreclr/jit/flowgraph.cpp139
-rw-r--r--src/coreclr/jit/gentree.cpp107
-rw-r--r--src/coreclr/jit/gentree.h55
-rw-r--r--src/coreclr/jit/gtlist.h3
-rw-r--r--src/coreclr/jit/gtstructs.h2
-rw-r--r--src/coreclr/jit/importer.cpp118
-rw-r--r--src/coreclr/jit/morph.cpp672
-rw-r--r--src/coreclr/jit/objectalloc.cpp2
-rw-r--r--src/coreclr/jit/rationalize.cpp3
12 files changed, 633 insertions, 545 deletions
diff --git a/src/coreclr/jit/compiler.cpp b/src/coreclr/jit/compiler.cpp
index d921aa458f6..e14f3439ee6 100644
--- a/src/coreclr/jit/compiler.cpp
+++ b/src/coreclr/jit/compiler.cpp
@@ -9365,9 +9365,9 @@ void cTreeFlags(Compiler* comp, GenTree* tree)
{
chars += printf("[IND_TGT_HEAP]");
}
- if (tree->gtFlags & GTF_IND_TLS_REF)
+ if (tree->gtFlags & GTF_IND_REQ_ADDR_IN_REG)
{
- chars += printf("[IND_TLS_REF]");
+ chars += printf("[IND_REQ_ADDR_IN_REG]");
}
if (tree->gtFlags & GTF_IND_ASG_LHS)
{
diff --git a/src/coreclr/jit/compiler.h b/src/coreclr/jit/compiler.h
index cbe3f52f29e..068dd098cc4 100644
--- a/src/coreclr/jit/compiler.h
+++ b/src/coreclr/jit/compiler.h
@@ -2641,6 +2641,11 @@ public:
GenTreeField* gtNewFieldRef(var_types type, CORINFO_FIELD_HANDLE fldHnd, GenTree* obj = nullptr, DWORD offset = 0);
+ GenTreeField* gtNewFieldAddrNode(var_types type,
+ CORINFO_FIELD_HANDLE fldHnd,
+ GenTree* obj = nullptr,
+ DWORD offset = 0);
+
GenTreeIndexAddr* gtNewIndexAddr(GenTree* arrayOp,
GenTree* indexOp,
var_types elemType,
@@ -2663,7 +2668,7 @@ public:
GenTreeMDArr* gtNewMDArrLowerBound(GenTree* arrayOp, unsigned dim, unsigned rank, BasicBlock* block);
- GenTreeIndir* gtNewIndir(var_types typ, GenTree* addr);
+ GenTreeIndir* gtNewIndir(var_types typ, GenTree* addr, GenTreeFlags indirFlags = GTF_EMPTY);
GenTree* gtNewNullCheck(GenTree* addr, BasicBlock* basicBlock);
@@ -5724,6 +5729,9 @@ public:
private:
GenTree* fgMorphField(GenTree* tree, MorphAddrContext* mac);
+ GenTree* fgMorphExpandInstanceField(GenTree* tree, MorphAddrContext* mac);
+ GenTree* fgMorphExpandTlsFieldAddr(GenTree* tree);
+ GenTree* fgMorphExpandStaticField(GenTree* tree);
bool fgCanFastTailCall(GenTreeCall* call, const char** failReason);
#if FEATURE_FASTTAILCALL
bool fgCallHasMustCopyByrefParameter(GenTreeCall* callee);
@@ -10846,6 +10854,7 @@ public:
case GT_RETURNTRAP:
case GT_NOP:
case GT_FIELD:
+ case GT_FIELD_ADDR:
case GT_RETURN:
case GT_RETFILT:
case GT_RUNTIMELOOKUP:
diff --git a/src/coreclr/jit/compiler.hpp b/src/coreclr/jit/compiler.hpp
index 274514bbf98..b89ebaf0871 100644
--- a/src/coreclr/jit/compiler.hpp
+++ b/src/coreclr/jit/compiler.hpp
@@ -1069,61 +1069,6 @@ inline GenTree* Compiler::gtNewRuntimeLookup(CORINFO_GENERIC_HANDLE hnd, CorInfo
return node;
}
-//------------------------------------------------------------------------
-// gtNewFieldRef: a helper for creating GT_FIELD nodes.
-//
-// Normalizes struct types (for SIMD vectors). Sets GTF_GLOB_REF for fields
-// that may be pointing into globally visible memory.
-//
-// Arguments:
-// type - type for the field node
-// fldHnd - the field handle
-// obj - the instance, an address
-// offset - the field offset
-//
-// Return Value:
-// The created node.
-//
-inline GenTreeField* Compiler::gtNewFieldRef(var_types type, CORINFO_FIELD_HANDLE fldHnd, GenTree* obj, DWORD offset)
-{
- // GT_FIELD nodes are transformed into GT_IND nodes.
- assert(GenTree::s_gtNodeSizes[GT_IND] <= GenTree::s_gtNodeSizes[GT_FIELD]);
-
- if (type == TYP_STRUCT)
- {
- CORINFO_CLASS_HANDLE structHnd;
- eeGetFieldType(fldHnd, &structHnd);
- type = impNormStructType(structHnd);
- }
-
- GenTreeField* fieldNode = new (this, GT_FIELD) GenTreeField(type, obj, fldHnd, offset);
-
- // If "obj" is the address of a local, note that a field of that struct local has been accessed.
- if ((obj != nullptr) && obj->OperIs(GT_ADDR) && varTypeIsStruct(obj->AsUnOp()->gtOp1) &&
- obj->AsUnOp()->gtOp1->OperIs(GT_LCL_VAR))
- {
- LclVarDsc* varDsc = lvaGetDesc(obj->AsUnOp()->gtOp1->AsLclVarCommon());
-
- varDsc->lvFieldAccessed = 1;
-
- if (lvaIsImplicitByRefLocal(lvaGetLclNum(varDsc)))
- {
- // These structs are passed by reference and can easily become global references if those
- // references are exposed. We clear out address-exposure information for these parameters
- // when they are converted into references in fgRetypeImplicitByRefArgs() so we do not have
- // the necessary information in morph to know if these indirections are actually global
- // references, so we have to be conservative here.
- fieldNode->gtFlags |= GTF_GLOB_REF;
- }
- }
- else
- {
- fieldNode->gtFlags |= GTF_GLOB_REF;
- }
-
- return fieldNode;
-}
-
inline GenTreeIndexAddr* Compiler::gtNewIndexAddr(GenTree* arrayOp,
GenTree* indexOp,
var_types elemType,
@@ -1267,10 +1212,14 @@ inline GenTreeMDArr* Compiler::gtNewMDArrLowerBound(GenTree* arrayOp, unsigned d
// Return Value:
// New GT_IND node
-inline GenTreeIndir* Compiler::gtNewIndir(var_types typ, GenTree* addr)
+inline GenTreeIndir* Compiler::gtNewIndir(var_types typ, GenTree* addr, GenTreeFlags indirFlags)
{
+ assert((indirFlags & ~GTF_IND_FLAGS) == GTF_EMPTY);
+
GenTree* indir = gtNewOperNode(GT_IND, typ, addr);
+ indir->gtFlags |= indirFlags;
indir->SetIndirExceptionFlags(this);
+
return indir->AsIndir();
}
@@ -4130,6 +4079,7 @@ void GenTree::VisitOperands(TVisitor visitor)
// Unary operators with an optional operand
case GT_NOP:
case GT_FIELD:
+ case GT_FIELD_ADDR:
case GT_RETURN:
case GT_RETFILT:
if (this->AsUnOp()->gtOp1 == nullptr)
diff --git a/src/coreclr/jit/flowgraph.cpp b/src/coreclr/jit/flowgraph.cpp
index 605b06ea863..4fc2b9b0d9d 100644
--- a/src/coreclr/jit/flowgraph.cpp
+++ b/src/coreclr/jit/flowgraph.cpp
@@ -936,102 +936,89 @@ GenTreeCall* Compiler::fgGetSharedCCtor(CORINFO_CLASS_HANDLE cls)
//
bool Compiler::fgAddrCouldBeNull(GenTree* addr)
{
- addr = addr->gtEffectiveVal();
- if (addr->IsIconHandle())
- {
- return false;
- }
- else if (addr->OperIs(GT_CNS_STR, GT_CLS_VAR_ADDR))
- {
- return false;
- }
- else if (addr->OperIs(GT_INDEX_ADDR))
- {
- return !addr->AsIndexAddr()->IsNotNull();
- }
- else if (addr->OperIs(GT_ARR_ADDR))
- {
- return (addr->gtFlags & GTF_ARR_ADDR_NONNULL) == 0;
- }
- else if (addr->OperIs(GT_IND))
- {
- return (addr->gtFlags & GTF_IND_NONNULL) == 0;
- }
- else if (addr->gtOper == GT_LCL_VAR)
- {
- unsigned varNum = addr->AsLclVarCommon()->GetLclNum();
-
- if (lvaIsImplicitByRefLocal(varNum))
- {
+ switch (addr->OperGet())
+ {
+ case GT_CNS_INT:
+ return !addr->IsIconHandle();
+
+ case GT_CNS_STR:
+ case GT_ADDR:
+ case GT_FIELD_ADDR:
+ case GT_CLS_VAR_ADDR:
+ // A GT_ADDR node, by itself, never requires null checking. The expression whose address is being
+ // taken is either a local or static variable, whose address is necessarily non-null, or else it is
+ // a field dereference, which will do its own bounds checking if necessary.
return false;
- }
- }
- else if (addr->gtOper == GT_ADDR)
- {
- if (addr->AsOp()->gtOp1->gtOper == GT_CNS_INT)
- {
- GenTree* cns1Tree = addr->AsOp()->gtOp1;
- if (!cns1Tree->IsIconHandle())
- {
- // Indirection of some random constant...
- // It is safest just to return true
- return true;
- }
- }
- return false; // we can't have a null address
- }
- else if (addr->gtOper == GT_ADD)
- {
- if (addr->AsOp()->gtOp1->gtOper == GT_CNS_INT)
- {
- GenTree* cns1Tree = addr->AsOp()->gtOp1;
- if (!cns1Tree->IsIconHandle())
+ case GT_IND:
+ return (addr->gtFlags & GTF_IND_NONNULL) == 0;
+
+ case GT_INDEX_ADDR:
+ return !addr->AsIndexAddr()->IsNotNull();
+
+ case GT_ARR_ADDR:
+ return (addr->gtFlags & GTF_ARR_ADDR_NONNULL) == 0;
+
+ case GT_LCL_VAR:
+ return !lvaIsImplicitByRefLocal(addr->AsLclVar()->GetLclNum());
+
+ case GT_COMMA:
+ return fgAddrCouldBeNull(addr->AsOp()->gtOp2);
+
+ case GT_ADD:
+ if (addr->AsOp()->gtOp1->gtOper == GT_CNS_INT)
{
- if (!fgIsBigOffset(cns1Tree->AsIntCon()->gtIconVal))
+ GenTree* cns1Tree = addr->AsOp()->gtOp1;
+ if (!cns1Tree->IsIconHandle())
+ {
+ if (!fgIsBigOffset(cns1Tree->AsIntCon()->gtIconVal))
+ {
+ // Op1 was an ordinary small constant
+ return fgAddrCouldBeNull(addr->AsOp()->gtOp2);
+ }
+ }
+ else // Op1 was a handle represented as a constant
{
- // Op1 was an ordinary small constant
- return fgAddrCouldBeNull(addr->AsOp()->gtOp2);
+ // Is Op2 also a constant?
+ if (addr->AsOp()->gtOp2->gtOper == GT_CNS_INT)
+ {
+ GenTree* cns2Tree = addr->AsOp()->gtOp2;
+ // Is this an addition of a handle and constant
+ if (!cns2Tree->IsIconHandle())
+ {
+ if (!fgIsBigOffset(cns2Tree->AsIntCon()->gtIconVal))
+ {
+ // Op2 was an ordinary small constant
+ return false; // we can't have a null address
+ }
+ }
+ }
}
}
- else // Op1 was a handle represented as a constant
+ else
{
- // Is Op2 also a constant?
+ // Op1 is not a constant. What about Op2?
if (addr->AsOp()->gtOp2->gtOper == GT_CNS_INT)
{
GenTree* cns2Tree = addr->AsOp()->gtOp2;
- // Is this an addition of a handle and constant
+ // Is this an addition of a small constant
if (!cns2Tree->IsIconHandle())
{
if (!fgIsBigOffset(cns2Tree->AsIntCon()->gtIconVal))
{
// Op2 was an ordinary small constant
- return false; // we can't have a null address
+ return fgAddrCouldBeNull(addr->AsOp()->gtOp1);
}
}
}
}
- }
- else
- {
- // Op1 is not a constant
- // What about Op2?
- if (addr->AsOp()->gtOp2->gtOper == GT_CNS_INT)
- {
- GenTree* cns2Tree = addr->AsOp()->gtOp2;
- // Is this an addition of a small constant
- if (!cns2Tree->IsIconHandle())
- {
- if (!fgIsBigOffset(cns2Tree->AsIntCon()->gtIconVal))
- {
- // Op2 was an ordinary small constant
- return fgAddrCouldBeNull(addr->AsOp()->gtOp1);
- }
- }
- }
- }
+ break;
+
+ default:
+ break;
}
- return true; // default result: addr could be null
+
+ return true; // default result: addr could be null.
}
//------------------------------------------------------------------------------
diff --git a/src/coreclr/jit/gentree.cpp b/src/coreclr/jit/gentree.cpp
index 28f5a7c1ce2..8535f0e90c5 100644
--- a/src/coreclr/jit/gentree.cpp
+++ b/src/coreclr/jit/gentree.cpp
@@ -265,6 +265,7 @@ void GenTree::InitNodeSize()
GenTree::s_gtNodeSizes[GT_ARR_OFFSET] = TREE_NODE_SZ_LARGE;
GenTree::s_gtNodeSizes[GT_RET_EXPR] = TREE_NODE_SZ_LARGE;
GenTree::s_gtNodeSizes[GT_FIELD] = TREE_NODE_SZ_LARGE;
+ GenTree::s_gtNodeSizes[GT_FIELD_ADDR] = TREE_NODE_SZ_LARGE;
GenTree::s_gtNodeSizes[GT_CMPXCHG] = TREE_NODE_SZ_LARGE;
GenTree::s_gtNodeSizes[GT_QMARK] = TREE_NODE_SZ_LARGE;
GenTree::s_gtNodeSizes[GT_STORE_DYN_BLK] = TREE_NODE_SZ_LARGE;
@@ -2345,9 +2346,6 @@ bool GenTree::Compare(GenTree* op1, GenTree* op2, bool swapOK)
genTreeOps oper;
unsigned kind;
-// printf("tree1:\n"); gtDispTree(op1);
-// printf("tree2:\n"); gtDispTree(op2);
-
AGAIN:
if (op1 == nullptr)
@@ -2526,6 +2524,7 @@ AGAIN:
break;
case GT_FIELD:
+ case GT_FIELD_ADDR:
if (op1->AsField()->gtFldHnd != op2->AsField()->gtFldHnd)
{
return false;
@@ -3011,6 +3010,7 @@ AGAIN:
break;
case GT_FIELD:
+ case GT_FIELD_ADDR:
hash = genTreeHashAdd(hash, tree->AsField()->gtFldHnd);
break;
@@ -6578,19 +6578,13 @@ ExceptionSetFlags GenTree::OperExceptions(Compiler* comp)
return ExceptionSetFlags::IndexOutOfRangeException;
case GT_FIELD:
- {
- GenTree* fldObj = this->AsField()->GetFldObj();
-
- if (fldObj != nullptr)
+ case GT_FIELD_ADDR:
+ if (AsField()->IsInstance() && comp->fgAddrCouldBeNull(AsField()->GetFldObj()))
{
- if (comp->fgAddrCouldBeNull(fldObj))
- {
- return ExceptionSetFlags::NullReferenceException;
- }
+ return ExceptionSetFlags::NullReferenceException;
}
return ExceptionSetFlags::None;
- }
case GT_BOUNDS_CHECK:
case GT_INDEX_ADDR:
@@ -7517,6 +7511,86 @@ GenTreeRetExpr* Compiler::gtNewInlineCandidateReturnExpr(GenTreeCall* inlineCand
return node;
}
+//------------------------------------------------------------------------
+// gtNewFieldRef: Create a new GT_FIELD node.
+//
+// Normalizes struct types (for SIMD vectors). Sets GTF_GLOB_REF for fields
+// that may be pointing into globally visible memory.
+//
+// Arguments:
+// type - type for the field node
+// fldHnd - the field handle
+// obj - the instance, an address
+// offset - the field offset
+//
+// Return Value:
+// The created node.
+//
+GenTreeField* Compiler::gtNewFieldRef(var_types type, CORINFO_FIELD_HANDLE fldHnd, GenTree* obj, DWORD offset)
+{
+ // GT_FIELD nodes are transformed into GT_IND nodes.
+ assert(GenTree::s_gtNodeSizes[GT_IND] <= GenTree::s_gtNodeSizes[GT_FIELD]);
+
+ if (type == TYP_STRUCT)
+ {
+ CORINFO_CLASS_HANDLE structHnd;
+ eeGetFieldType(fldHnd, &structHnd);
+ type = impNormStructType(structHnd);
+ }
+
+ GenTreeField* fieldNode = new (this, GT_FIELD) GenTreeField(GT_FIELD, type, obj, fldHnd, offset);
+
+ // If "obj" is the address of a local, note that a field of that struct local has been accessed.
+ if ((obj != nullptr) && obj->OperIs(GT_ADDR) && varTypeIsStruct(obj->AsUnOp()->gtOp1) &&
+ obj->AsUnOp()->gtOp1->OperIs(GT_LCL_VAR))
+ {
+ LclVarDsc* varDsc = lvaGetDesc(obj->AsUnOp()->gtOp1->AsLclVarCommon());
+
+ varDsc->lvFieldAccessed = 1;
+
+ if (lvaIsImplicitByRefLocal(lvaGetLclNum(varDsc)))
+ {
+ // These structs are passed by reference and can easily become global references if those
+ // references are exposed. We clear out address-exposure information for these parameters
+ // when they are converted into references in fgRetypeImplicitByRefArgs() so we do not have
+ // the necessary information in morph to know if these indirections are actually global
+ // references, so we have to be conservative here.
+ fieldNode->gtFlags |= GTF_GLOB_REF;
+ }
+ }
+ else
+ {
+ fieldNode->gtFlags |= GTF_GLOB_REF;
+ }
+
+ return fieldNode;
+}
+
+//------------------------------------------------------------------------
+// gtNewFieldRef: Create a new GT_FIELD_ADDR node.
+//
+// Arguments:
+// type - type for the address node
+// fldHnd - the field handle
+// obj - the instance, an address
+// offset - the field offset
+//
+// Return Value:
+// The created node.
+//
+GenTreeField* Compiler::gtNewFieldAddrNode(var_types type, CORINFO_FIELD_HANDLE fldHnd, GenTree* obj, DWORD offset)
+{
+ assert(varTypeIsI(genActualType(type)));
+
+ GenTreeField* fieldNode = new (this, GT_FIELD_ADDR) GenTreeField(GT_FIELD_ADDR, type, obj, fldHnd, offset);
+
+ // TODO-ADDR: add GTF_EXCEPT handling here and delete it from callers.
+ // TODO-ADDR: delete this zero-diff quirk.
+ fieldNode->gtFlags |= GTF_GLOB_REF;
+
+ return fieldNode;
+}
+
/*****************************************************************************
*
* Create a node that will assign 'src' to 'dst'.
@@ -8627,8 +8701,10 @@ GenTree* Compiler::gtCloneExpr(
break;
case GT_FIELD:
- copy = new (this, GT_FIELD) GenTreeField(tree->TypeGet(), tree->AsField()->GetFldObj(),
- tree->AsField()->gtFldHnd, tree->AsField()->gtFldOffset);
+ case GT_FIELD_ADDR:
+ copy = new (this, tree->OperGet())
+ GenTreeField(tree->OperGet(), tree->TypeGet(), tree->AsField()->GetFldObj(),
+ tree->AsField()->gtFldHnd, tree->AsField()->gtFldOffset);
copy->AsField()->gtFldMayOverlap = tree->AsField()->gtFldMayOverlap;
#ifdef FEATURE_READYTORUN
copy->AsField()->gtFieldLookup = tree->AsField()->gtFieldLookup;
@@ -9429,6 +9505,7 @@ GenTreeUseEdgeIterator::GenTreeUseEdgeIterator(GenTree* node)
// Unary operators with an optional operand
case GT_NOP:
case GT_FIELD:
+ case GT_FIELD_ADDR:
case GT_RETURN:
case GT_RETFILT:
if (m_node->AsUnOp()->gtOp1 == nullptr)
@@ -11863,7 +11940,7 @@ void Compiler::gtDispTree(GenTree* tree,
#endif // FEATURE_ARG_SPLIT
#endif // FEATURE_PUT_STRUCT_ARG_STK
- if (tree->OperIs(GT_FIELD))
+ if (tree->OperIs(GT_FIELD, GT_FIELD_ADDR))
{
printf(" %s", eeGetFieldName(tree->AsField()->gtFldHnd), 0);
}
diff --git a/src/coreclr/jit/gentree.h b/src/coreclr/jit/gentree.h
index d0f40d9ad81..b78096914eb 100644
--- a/src/coreclr/jit/gentree.h
+++ b/src/coreclr/jit/gentree.h
@@ -489,33 +489,29 @@ enum GenTreeFlags : unsigned int
GTF_MEMORYBARRIER_LOAD = 0x40000000, // GT_MEMORYBARRIER -- Load barrier
+ GTF_FLD_TLS = 0x80000000, // GT_FIELD_ADDR -- field address is a Windows x86 TLS reference
GTF_FLD_VOLATILE = 0x40000000, // GT_FIELD -- same as GTF_IND_VOLATILE
- GTF_FLD_INITCLASS = 0x20000000, // GT_FIELD -- field access requires preceding class/static init helper
+ GTF_FLD_INITCLASS = 0x20000000, // GT_FIELD/GT_FIELD_ADDR -- field access requires preceding class/static init helper
GTF_FLD_TGT_HEAP = 0x10000000, // GT_FIELD -- same as GTF_IND_TGT_HEAP
GTF_INX_RNGCHK = 0x80000000, // GT_INDEX_ADDR -- this array address should be range-checked
GTF_INX_ADDR_NONNULL = 0x40000000, // GT_INDEX_ADDR -- this array address is not null
- GTF_IND_TGT_NOT_HEAP = 0x80000000, // GT_IND -- the target is not on the heap
- GTF_IND_VOLATILE = 0x40000000, // GT_IND -- the load or store must use volatile semantics (this is a nop on X86)
+ GTF_IND_TGT_NOT_HEAP = 0x80000000, // GT_IND -- the target is not on the heap
+ GTF_IND_VOLATILE = 0x40000000, // GT_IND -- the load or store must use volatile semantics (this is a nop on X86)
GTF_IND_NONFAULTING = 0x20000000, // Operations for which OperIsIndir() is true -- An indir that cannot fault.
- // Same as GTF_ARRLEN_NONFAULTING.
- GTF_IND_TGT_HEAP = 0x10000000, // GT_IND -- the target is on the heap
- GTF_IND_TLS_REF = 0x08000000, // GT_IND -- the target is accessed via TLS
- GTF_IND_ASG_LHS = 0x04000000, // GT_IND -- this GT_IND node is (the effective val) of the LHS of an
- // assignment; don't evaluate it independently.
- GTF_IND_REQ_ADDR_IN_REG = GTF_IND_ASG_LHS, // GT_IND -- requires its addr operand to be evaluated
- // into a register. This flag is useful in cases where it
- // is required to generate register indirect addressing mode.
- // One such case is virtual stub calls on xarch. This is only
- // valid in the backend, where GTF_IND_ASG_LHS is not necessary
- // (all such indirections will be lowered to GT_STOREIND).
- GTF_IND_UNALIGNED = 0x02000000, // GT_IND -- the load or store is unaligned (we assume worst case
- // alignment of 1 byte)
- GTF_IND_INVARIANT = 0x01000000, // GT_IND -- the target is invariant (a prejit indirection)
- GTF_IND_NONNULL = 0x00400000, // GT_IND -- the indirection never returns null (zero)
-
- GTF_IND_FLAGS = GTF_IND_VOLATILE | GTF_IND_NONFAULTING | GTF_IND_TLS_REF | GTF_IND_UNALIGNED | GTF_IND_INVARIANT |
+ GTF_IND_TGT_HEAP = 0x10000000, // GT_IND -- the target is on the heap
+ GTF_IND_REQ_ADDR_IN_REG = 0x08000000, // GT_IND -- requires its addr operand to be evaluated into a register.
+ // This flag is useful in cases where it is required to generate register
+ // indirect addressing mode. One such case is virtual stub calls on xarch.
+ GTF_IND_ASG_LHS = 0x04000000, // GT_IND -- this GT_IND node is (the effective val) of the LHS of an
+ // assignment; don't evaluate it independently.
+ GTF_IND_UNALIGNED = 0x02000000, // GT_IND -- the load or store is unaligned (we assume worst case
+ // alignment of 1 byte)
+ GTF_IND_INVARIANT = 0x01000000, // GT_IND -- the target is invariant (a prejit indirection)
+ GTF_IND_NONNULL = 0x00400000, // GT_IND -- the indirection never returns null (zero)
+
+ GTF_IND_FLAGS = GTF_IND_VOLATILE | GTF_IND_NONFAULTING | GTF_IND_UNALIGNED | GTF_IND_INVARIANT |
GTF_IND_NONNULL | GTF_IND_TGT_NOT_HEAP | GTF_IND_TGT_HEAP,
GTF_ADDRMODE_NO_CSE = 0x80000000, // GT_ADD/GT_MUL/GT_LSH -- Do not CSE this node only, forms complex
@@ -1743,6 +1739,7 @@ public:
case GT_RETFILT:
case GT_NOP:
case GT_FIELD:
+ case GT_FIELD_ADDR:
return true;
case GT_RETURN:
return gtType == TYP_VOID;
@@ -4032,8 +4029,8 @@ struct GenTreeField : public GenTreeUnOp
CORINFO_CONST_LOOKUP gtFieldLookup;
#endif
- GenTreeField(var_types type, GenTree* obj, CORINFO_FIELD_HANDLE fldHnd, DWORD offs)
- : GenTreeUnOp(GT_FIELD, type, obj), gtFldHnd(fldHnd), gtFldOffset(offs), gtFldMayOverlap(false)
+ GenTreeField(genTreeOps oper, var_types type, GenTree* obj, CORINFO_FIELD_HANDLE fldHnd, DWORD offs)
+ : GenTreeUnOp(oper, type, obj), gtFldHnd(fldHnd), gtFldOffset(offs), gtFldMayOverlap(false)
{
#ifdef FEATURE_READYTORUN
gtFieldLookup.addr = nullptr;
@@ -4064,6 +4061,17 @@ struct GenTreeField : public GenTreeUnOp
return GetFldObj() != nullptr;
}
+ bool IsStatic() const
+ {
+ return !IsInstance();
+ }
+
+ bool IsTlsStatic() const
+ {
+ assert(((gtFlags & GTF_FLD_TLS) == 0) || IsStatic());
+ return (gtFlags & GTF_FLD_TLS) != 0;
+ }
+
bool IsOffsetKnown() const
{
#ifdef FEATURE_READYTORUN
@@ -7023,8 +7031,7 @@ struct GenTreeIndir : public GenTreeOp
void SetAddr(GenTree* addr)
{
- assert(addr != nullptr);
- assert(addr->TypeIs(TYP_I_IMPL, TYP_BYREF));
+ assert(varTypeIsI(addr));
gtOp1 = addr;
}
diff --git a/src/coreclr/jit/gtlist.h b/src/coreclr/jit/gtlist.h
index 65a77bf6a3e..cde991961ab 100644
--- a/src/coreclr/jit/gtlist.h
+++ b/src/coreclr/jit/gtlist.h
@@ -93,7 +93,8 @@ GTNODE(NULLCHECK , GenTreeIndir ,0,GTK_UNOP|GTK_NOVALUE)
GTNODE(ARR_LENGTH , GenTreeArrLen ,0,GTK_UNOP|GTK_EXOP) // single-dimension (SZ) array length
GTNODE(MDARR_LENGTH , GenTreeMDArr ,0,GTK_UNOP|GTK_EXOP) // multi-dimension (MD) array length of a specific dimension
GTNODE(MDARR_LOWER_BOUND, GenTreeMDArr ,0,GTK_UNOP|GTK_EXOP) // multi-dimension (MD) array lower bound of a specific dimension
-GTNODE(FIELD , GenTreeField ,0,GTK_UNOP|GTK_EXOP|DBK_NOTLIR) // Member-field
+GTNODE(FIELD , GenTreeField ,0,GTK_UNOP|GTK_EXOP|DBK_NOTLIR) // Field load
+GTNODE(FIELD_ADDR , GenTreeField ,0,GTK_UNOP|GTK_EXOP|DBK_NOTLIR) // Field address
GTNODE(ALLOCOBJ , GenTreeAllocObj ,0,GTK_UNOP|GTK_EXOP|DBK_NOTLIR) // object allocator
GTNODE(INIT_VAL , GenTreeOp ,0,GTK_UNOP) // Initialization value for an initBlk
diff --git a/src/coreclr/jit/gtstructs.h b/src/coreclr/jit/gtstructs.h
index 7d50adbca39..ae62bde014a 100644
--- a/src/coreclr/jit/gtstructs.h
+++ b/src/coreclr/jit/gtstructs.h
@@ -66,7 +66,7 @@ GTSTRUCT_3(LclVar , GT_LCL_VAR, GT_LCL_VAR_ADDR, GT_STORE_LCL_VAR)
GTSTRUCT_3(LclFld , GT_LCL_FLD, GT_STORE_LCL_FLD, GT_LCL_FLD_ADDR)
GTSTRUCT_1(Cast , GT_CAST)
GTSTRUCT_1(Box , GT_BOX)
-GTSTRUCT_1(Field , GT_FIELD)
+GTSTRUCT_2(Field , GT_FIELD, GT_FIELD_ADDR)
GTSTRUCT_1(Call , GT_CALL)
GTSTRUCT_1(FieldList , GT_FIELD_LIST)
GTSTRUCT_1(Colon , GT_COLON)
diff --git a/src/coreclr/jit/importer.cpp b/src/coreclr/jit/importer.cpp
index 8002890e3b8..65fb915f783 100644
--- a/src/coreclr/jit/importer.cpp
+++ b/src/coreclr/jit/importer.cpp
@@ -4528,7 +4528,6 @@ GenTree* Compiler::impImportStaticFieldAccess(CORINFO_RESOLVED_TOKEN* pResolvedT
}
else // We need the value of a static field
{
- // In future, it may be better to just create the right tree here instead of folding it later.
op1 = gtNewFieldRef(lclTyp, pResolvedToken->hField);
if (pFieldInfo->fieldFlags & CORINFO_FLG_FIELD_INITCLASS)
@@ -4561,9 +4560,7 @@ GenTree* Compiler::impImportStaticFieldAccess(CORINFO_RESOLVED_TOKEN* pResolvedT
if (isBoxedStatic)
{
- op1 = gtNewOperNode(GT_IND, TYP_REF, op1);
- op1->gtFlags |= (GTF_IND_INVARIANT | GTF_IND_NONFAULTING | GTF_IND_NONNULL);
-
+ op1 = gtNewIndir(TYP_REF, op1, GTF_IND_INVARIANT | GTF_IND_NONFAULTING | GTF_IND_NONNULL);
op1 = gtNewOperNode(GT_ADD, TYP_BYREF, op1, gtNewIconNode(TARGET_POINTER_SIZE, outerFldSeq));
}
@@ -9413,12 +9410,10 @@ void Compiler::impImportBlockCode(BasicBlock* block)
int aflags = isLoadAddress ? CORINFO_ACCESS_ADDRESS : CORINFO_ACCESS_GET;
GenTree* obj = nullptr;
- typeInfo* tiObj = nullptr;
CORINFO_CLASS_HANDLE objType = nullptr; // used for fields
- if (opcode == CEE_LDFLD || opcode == CEE_LDFLDA)
+ if ((opcode == CEE_LDFLD) || (opcode == CEE_LDFLDA))
{
- tiObj = &impStackTop().seTypeInfo;
StackEntry se = impPopStack();
objType = se.seTypeInfo.GetClassHandle();
obj = se.val;
@@ -9540,8 +9535,19 @@ void Compiler::impImportBlockCode(BasicBlock* block)
obj = impGetStructAddr(obj, objType, CHECK_SPILL_ALL, true);
}
- /* Create the data member node */
- op1 = gtNewFieldRef(lclTyp, resolvedToken.hField, obj, fieldInfo.offset);
+ DWORD typeFlags = info.compCompHnd->getClassAttribs(resolvedToken.hClass);
+
+ // TODO-ADDR: use FIELD_ADDR for all fields, not just those of classes.
+ //
+ if (isLoadAddress && ((typeFlags & CORINFO_FLG_VALUECLASS) == 0))
+ {
+ op1 = gtNewFieldAddrNode(varTypeIsGC(obj) ? TYP_BYREF : TYP_I_IMPL, resolvedToken.hField,
+ obj, fieldInfo.offset);
+ }
+ else
+ {
+ op1 = gtNewFieldRef(lclTyp, resolvedToken.hField, obj, fieldInfo.offset);
+ }
#ifdef FEATURE_READYTORUN
if (fieldInfo.fieldAccessor == CORINFO_FIELD_INSTANCE_WITH_BASE)
@@ -9555,26 +9561,22 @@ void Compiler::impImportBlockCode(BasicBlock* block)
op1->gtFlags |= GTF_EXCEPT;
}
- DWORD typeFlags = info.compCompHnd->getClassAttribs(resolvedToken.hClass);
if (StructHasOverlappingFields(typeFlags))
{
op1->AsField()->gtFldMayOverlap = true;
}
- // wrap it in a address of operator if necessary
- if (isLoadAddress)
+ // Wrap it in a address of operator if necessary.
+ if (isLoadAddress && op1->OperIs(GT_FIELD))
{
- op1 = gtNewOperNode(GT_ADDR,
- (var_types)(varTypeIsGC(obj->TypeGet()) ? TYP_BYREF : TYP_I_IMPL), op1);
+ op1 = gtNewOperNode(GT_ADDR, varTypeIsGC(obj) ? TYP_BYREF : TYP_I_IMPL, op1);
}
- else
+
+ if (!isLoadAddress && compIsForInlining() &&
+ impInlineIsGuaranteedThisDerefBeforeAnySideEffects(nullptr, nullptr, obj,
+ impInlineInfo->inlArgInfo))
{
- if (compIsForInlining() &&
- impInlineIsGuaranteedThisDerefBeforeAnySideEffects(nullptr, nullptr, obj,
- impInlineInfo->inlArgInfo))
- {
- impInlineInfo->thisDereferencedFirst = true;
- }
+ impInlineInfo->thisDereferencedFirst = true;
}
}
break;
@@ -9582,22 +9584,27 @@ void Compiler::impImportBlockCode(BasicBlock* block)
case CORINFO_FIELD_STATIC_TLS:
#ifdef TARGET_X86
// Legacy TLS access is implemented as intrinsic on x86 only
+ op1 = gtNewFieldAddrNode(TYP_I_IMPL, resolvedToken.hField, nullptr, fieldInfo.offset);
+ op1->gtFlags |= GTF_FLD_TLS; // fgMorphExpandTlsField will handle the transformation.
- /* Create the data member node */
- op1 = gtNewFieldRef(lclTyp, resolvedToken.hField, NULL, fieldInfo.offset);
- op1->gtFlags |= GTF_IND_TLS_REF; // fgMorphField will handle the transformation
-
- if (isLoadAddress)
+ if (!isLoadAddress)
{
- op1 = gtNewOperNode(GT_ADDR, (var_types)TYP_I_IMPL, op1);
+ if (varTypeIsStruct(lclTyp))
+ {
+ op1 = gtNewObjNode(fieldInfo.structType, op1);
+ op1->gtFlags |= GTF_IND_NONFAULTING;
+ }
+ else
+ {
+ op1 = gtNewIndir(lclTyp, op1, GTF_IND_NONFAULTING);
+ op1->gtFlags |= GTF_GLOB_REF;
+ }
}
break;
#else
fieldInfo.fieldAccessor = CORINFO_FIELD_STATIC_ADDR_HELPER;
-
FALLTHROUGH;
#endif
-
case CORINFO_FIELD_STATIC_ADDR_HELPER:
case CORINFO_FIELD_INSTANCE_HELPER:
case CORINFO_FIELD_INSTANCE_ADDR_HELPER:
@@ -9675,8 +9682,7 @@ void Compiler::impImportBlockCode(BasicBlock* block)
if (!usesHelper)
{
- assert((op1->OperGet() == GT_FIELD) || (op1->OperGet() == GT_IND) ||
- (op1->OperGet() == GT_OBJ));
+ assert(op1->OperIs(GT_FIELD, GT_IND, GT_OBJ));
op1->gtFlags |= GTF_IND_VOLATILE;
}
}
@@ -9685,15 +9691,13 @@ void Compiler::impImportBlockCode(BasicBlock* block)
{
if (!usesHelper)
{
- assert((op1->OperGet() == GT_FIELD) || (op1->OperGet() == GT_IND) ||
- (op1->OperGet() == GT_OBJ));
+ assert(op1->OperIs(GT_FIELD, GT_IND, GT_OBJ));
op1->gtFlags |= GTF_IND_UNALIGNED;
}
}
}
- /* Check if the class needs explicit initialization */
-
+ // Check if the class needs explicit initialization.
if (fieldInfo.fieldFlags & CORINFO_FLG_FIELD_INITCLASS)
{
GenTree* helperNode = impInitClass(&resolvedToken);
@@ -9728,21 +9732,17 @@ void Compiler::impImportBlockCode(BasicBlock* block)
JITDUMP(" %08X", resolvedToken.token);
- int aflags = CORINFO_ACCESS_SET;
- GenTree* obj = nullptr;
- typeInfo* tiObj = nullptr;
- typeInfo tiVal;
+ int aflags = CORINFO_ACCESS_SET;
+ GenTree* obj = nullptr;
- /* Pull the value from the stack */
+ // Pull the value from the stack.
StackEntry se = impPopStack();
op2 = se.val;
- tiVal = se.seTypeInfo;
- clsHnd = tiVal.GetClassHandle();
+ clsHnd = se.seTypeInfo.GetClassHandle();
if (opcode == CEE_STFLD)
{
- tiObj = &impStackTop().seTypeInfo;
- obj = impPopStack().val;
+ obj = impPopStack().val;
if (impIsThis(obj))
{
@@ -9850,19 +9850,25 @@ void Compiler::impImportBlockCode(BasicBlock* block)
case CORINFO_FIELD_STATIC_TLS:
#ifdef TARGET_X86
- // Legacy TLS access is implemented as intrinsic on x86 only
-
- /* Create the data member node */
- op1 = gtNewFieldRef(lclTyp, resolvedToken.hField, NULL, fieldInfo.offset);
- op1->gtFlags |= GTF_IND_TLS_REF; // fgMorphField will handle the transformation
+ // Legacy TLS access is implemented as intrinsic on x86 only.
+ op1 = gtNewFieldAddrNode(TYP_I_IMPL, resolvedToken.hField, nullptr, fieldInfo.offset);
+ op1->gtFlags |= GTF_FLD_TLS; // fgMorphExpandTlsField will handle the transformation.
+ if (varTypeIsStruct(lclTyp))
+ {
+ op1 = gtNewObjNode(fieldInfo.structType, op1);
+ op1->gtFlags |= GTF_IND_NONFAULTING;
+ }
+ else
+ {
+ op1 = gtNewIndir(lclTyp, op1, GTF_IND_NONFAULTING);
+ op1->gtFlags |= GTF_GLOB_REF;
+ }
break;
#else
fieldInfo.fieldAccessor = CORINFO_FIELD_STATIC_ADDR_HELPER;
-
FALLTHROUGH;
#endif
-
case CORINFO_FIELD_STATIC_ADDR_HELPER:
case CORINFO_FIELD_INSTANCE_HELPER:
case CORINFO_FIELD_INSTANCE_ADDR_HELPER:
@@ -9960,14 +9966,8 @@ void Compiler::impImportBlockCode(BasicBlock* block)
}
#endif
- // We can generate an assignment to a TYP_FLOAT from a TYP_DOUBLE
- // We insert a cast to the dest 'op1' type
- //
- if ((op1->TypeGet() != op2->TypeGet()) && varTypeIsFloating(op1->gtType) &&
- varTypeIsFloating(op2->gtType))
- {
- op2 = gtNewCastNode(op1->TypeGet(), op2, false, op1->TypeGet());
- }
+ // Insert an implicit FLOAT<->DOUBLE cast if needed.
+ op2 = impImplicitR4orR8Cast(op2, op1->TypeGet());
op1 = gtNewAssignNode(op1, op2);
}
diff --git a/src/coreclr/jit/morph.cpp b/src/coreclr/jit/morph.cpp
index 5a0812c4e64..ea291fd21c3 100644
--- a/src/coreclr/jit/morph.cpp
+++ b/src/coreclr/jit/morph.cpp
@@ -4963,30 +4963,34 @@ unsigned Compiler::fgGetFieldMorphingTemp(GenTreeField* fieldNode)
return lclNum;
}
-/*****************************************************************************
- *
- * Transform the given GT_FIELD tree for code generation.
- */
-
+//------------------------------------------------------------------------
+// fgMorphField: Fully morph a FIELD/FIELD_ADDR tree.
+//
+// Expands the field node into explicit additions and indirections.
+//
+// Arguments:
+// tree - The FIELD/FIELD_ADDR tree
+// mac - The morphing context, used to elide adding null checks
+//
+// Return Value:
+// The fully morphed "tree".
+//
GenTree* Compiler::fgMorphField(GenTree* tree, MorphAddrContext* mac)
{
- assert(tree->gtOper == GT_FIELD);
+ assert(tree->OperIs(GT_FIELD, GT_FIELD_ADDR));
- CORINFO_FIELD_HANDLE symHnd = tree->AsField()->gtFldHnd;
- unsigned fldOffset = tree->AsField()->gtFldOffset;
- GenTree* objRef = tree->AsField()->GetFldObj();
- bool fldMayOverlap = tree->AsField()->gtFldMayOverlap;
- FieldSeq* fieldSeq = nullptr;
+ GenTreeField* fieldNode = tree->AsField();
+ GenTree* objRef = fieldNode->GetFldObj();
- // Reset the flag because we may reuse the node.
- tree->AsField()->gtFldMayOverlap = false;
-
- noway_assert(((objRef != nullptr) && (objRef->IsLocalAddrExpr() != nullptr)) ||
- ((tree->gtFlags & GTF_GLOB_REF) != 0));
+ if (tree->OperIs(GT_FIELD))
+ {
+ noway_assert(((objRef != nullptr) && (objRef->IsLocalAddrExpr() != nullptr)) ||
+ ((tree->gtFlags & GTF_GLOB_REF) != 0));
+ }
#ifdef FEATURE_SIMD
// if this field belongs to simd struct, translate it to simd intrinsic.
- if (mac == nullptr)
+ if ((mac == nullptr) && tree->OperIs(GT_FIELD))
{
if (IsBaselineSimdIsaSupported())
{
@@ -5008,38 +5012,92 @@ GenTree* Compiler::fgMorphField(GenTree* tree, MorphAddrContext* mac)
}
#endif
- // Create a default MorphAddrContext early so it doesn't go out of scope
- // before it is used.
- MorphAddrContext defMAC(MACK_Ind);
+ MorphAddrContext indMAC(MACK_Ind);
+ MorphAddrContext addrMAC(MACK_Addr);
+ bool isAddr = tree->OperIs(GT_FIELD_ADDR);
- // Is this an instance data member?
- if (objRef != nullptr)
+ if (fieldNode->IsInstance())
{
- if (tree->gtFlags & GTF_IND_TLS_REF)
+ // NULL mac means we encounter the GT_FIELD/GT_FIELD_ADDR first (and don't know our parent).
+ if (mac == nullptr)
{
- NO_WAY("instance field can not be a TLS ref.");
+ // FIELD denotes a dereference of the field, equivalent to a MACK_Ind with zero offset.
+ mac = tree->OperIs(GT_FIELD) ? &indMAC : &addrMAC;
}
- /* We'll create the expression "*(objRef + mem_offs)" */
+ tree = fgMorphExpandInstanceField(tree, mac);
+ }
+ else if (fieldNode->IsTlsStatic())
+ {
+ tree = fgMorphExpandTlsFieldAddr(tree);
+ }
+ else
+ {
+ tree = fgMorphExpandStaticField(tree);
+ }
- noway_assert(varTypeIsGC(objRef->TypeGet()) || objRef->TypeGet() == TYP_I_IMPL);
+ // Pass down the current mac; if non null we are computing an address
+ GenTree* result;
+ if (tree->OperIsSimple())
+ {
+ result = fgMorphSmpOp(tree, mac);
+ DBEXEC(result != fieldNode, result->gtDebugFlags |= GTF_DEBUG_NODE_MORPHED);
- /*
- Now we have a tree like this:
+ // Quirk: preserve previous behavior with this NO_CSE.
+ if (isAddr && result->OperIs(GT_COMMA))
+ {
+ result->SetDoNotCSE();
+ }
+ }
+ else
+ {
+ result = fgMorphTree(tree, mac);
+ DBEXEC(result == fieldNode, result->gtDebugFlags &= ~GTF_DEBUG_NODE_MORPHED);
+ }
+
+ JITDUMP("\nFinal value of Compiler::fgMorphField after morphing:\n");
+ DISPTREE(result);
+
+ return result;
+}
+
+//------------------------------------------------------------------------
+// fgMorphExpandInstanceField: Expand an instance field reference.
+//
+// Expands the field node into explicit additions and indirections, adding
+// explicit null checks if necessary.
+//
+// Arguments:
+// tree - The FIELD/FIELD_ADDR tree
+// mac - The morphing context, used to elide adding null checks
+//
+// Return Value:
+// The expanded "tree" of an arbitrary shape.
+//
+GenTree* Compiler::fgMorphExpandInstanceField(GenTree* tree, MorphAddrContext* mac)
+{
+ assert(tree->OperIs(GT_FIELD, GT_FIELD_ADDR) && tree->AsField()->IsInstance());
+
+ GenTree* objRef = tree->AsField()->GetFldObj();
+ CORINFO_FIELD_HANDLE fieldHandle = tree->AsField()->gtFldHnd;
+ unsigned fieldOffset = tree->AsField()->gtFldOffset;
+
+ noway_assert(varTypeIsI(genActualType(objRef)));
+
+ /* Now we have a tree like this:
+--------------------+
- | GT_FIELD | tree
+ | GT_FIELD[_ADDR] | tree
+----------+---------+
|
+--------------+-------------+
|tree->AsField()->GetFldObj()|
+--------------+-------------+
-
We want to make it like this (when fldOffset is <= MAX_UNCHECKED_OFFSET_FOR_NULL_OBJECT):
+--------------------+
- | GT_IND/GT_OBJ | tree
+ | GT_IND/GT_OBJ | tree (for FIELD)
+---------+----------+
|
|
@@ -5050,37 +5108,37 @@ GenTree* Compiler::fgMorphField(GenTree* tree, MorphAddrContext* mac)
/ \
/ \
/ \
- +-------------------+ +----------------------+
- | objRef | | fldOffset |
- | | | (when fldOffset !=0) |
- +-------------------+ +----------------------+
+ +-------------------+ +----------------------+
+ | objRef | | fldOffset |
+ | | | (when fldOffset !=0) |
+ +-------------------+ +----------------------+
or this (when fldOffset is > MAX_UNCHECKED_OFFSET_FOR_NULL_OBJECT):
+--------------------+
- | GT_IND/GT_OBJ | tree
+ | GT_IND/GT_OBJ | tree (for FIELD)
+----------+---------+
|
+----------+---------+
- | GT_COMMA | comma2
+ | GT_COMMA | comma2
+----------+---------+
|
/ \
/ \
/ \
/ \
- +---------+----------+ +---------+----------+
- comma | GT_COMMA | | "+" (i.e. GT_ADD) | addr
- +---------+----------+ +---------+----------+
- | |
- / \ / \
- / \ / \
- / \ / \
- +-----+-----+ +-----+-----+ +---------+ +-----------+
- asg | GT_ASG | ind | GT_IND | | tmpLcl | | fldOffset |
- +-----+-----+ +-----+-----+ +---------+ +-----------+
+ +---------+----------+ +---------+----------+
+ comma | GT_COMMA | | "+" (i.e. GT_ADD) | addr
+ +---------+----------+ +---------+----------+
+ | |
+ / \ / \
+ / \ / \
+ / \ / \
+ +-----+-----+ +-----+-----+ +---------+ +-----------+
+ asg | GT_ASG | ind | GT_IND | | tmpLcl | | fldOffset |
+ +-----+-----+ +-----+-----+ +---------+ +-----------+
| |
/ \ |
/ \ |
@@ -5089,325 +5147,324 @@ GenTree* Compiler::fgMorphField(GenTree* tree, MorphAddrContext* mac)
| tmpLcl | | objRef | | tmpLcl |
+-----------+ +-----------+ +-----------+
+ */
- */
-
- var_types objRefType = objRef->TypeGet();
- GenTree* addr = nullptr;
- GenTree* comma = nullptr;
+ var_types objRefType = objRef->TypeGet();
+ GenTree* addr = nullptr;
+ GenTree* comma = nullptr;
+ bool addExplicitNullCheck = false;
- // NULL mac means we encounter the GT_FIELD first. This denotes a dereference of the field,
- // and thus is equivalent to a MACK_Ind with zero offset.
- if (mac == nullptr)
+ if (fgAddrCouldBeNull(objRef))
+ {
+ if (!mac->m_allConstantOffsets || fgIsBigOffset(mac->m_totalOffset + fieldOffset))
{
- mac = &defMAC;
+ addExplicitNullCheck = true;
}
-
- bool addExplicitNullCheck = false;
-
- // Implicit byref locals and string literals are never null.
- if (fgAddrCouldBeNull(objRef))
+ else
{
- if (!mac->m_allConstantOffsets || fgIsBigOffset(mac->m_totalOffset + fldOffset))
- {
- addExplicitNullCheck = true;
- }
- else
- {
- addExplicitNullCheck = mac->m_kind == MACK_Addr;
- }
+ addExplicitNullCheck = mac->m_kind == MACK_Addr;
}
+ }
- if (addExplicitNullCheck)
- {
-#ifdef DEBUG
- if (verbose)
- {
- printf("Before explicit null check morphing:\n");
- gtDispTree(tree);
- }
-#endif
-
- //
- // Create the "comma" subtree
- //
- GenTree* asg = nullptr;
-
- unsigned lclNum;
+ if (addExplicitNullCheck)
+ {
+ JITDUMP("Before explicit null check morphing:\n");
+ DISPTREE(tree);
- if (!objRef->OperIs(GT_LCL_VAR) || lvaIsLocalImplicitlyAccessedByRef(objRef->AsLclVar()->GetLclNum()))
- {
- lclNum = fgGetFieldMorphingTemp(tree->AsField());
+ // Create the "comma" subtree.
+ GenTree* asg = nullptr;
+ unsigned lclNum;
- // Create the "asg" node
- asg = gtNewTempAssign(lclNum, objRef);
- }
- else
- {
- lclNum = objRef->AsLclVarCommon()->GetLclNum();
- }
+ if (!objRef->OperIs(GT_LCL_VAR) || lvaIsLocalImplicitlyAccessedByRef(objRef->AsLclVar()->GetLclNum()))
+ {
+ lclNum = fgGetFieldMorphingTemp(tree->AsField());
- GenTree* lclVar = gtNewLclvNode(lclNum, objRefType);
- GenTree* nullchk = gtNewNullCheck(lclVar, compCurBB);
+ // Create the "asg" node
+ asg = gtNewTempAssign(lclNum, objRef);
+ }
+ else
+ {
+ lclNum = objRef->AsLclVarCommon()->GetLclNum();
+ }
- if (asg != nullptr)
- {
- // Create the "comma" node.
- comma = gtNewOperNode(GT_COMMA, TYP_VOID, asg, nullchk);
- }
- else
- {
- comma = nullchk;
- }
+ GenTree* lclVar = gtNewLclvNode(lclNum, objRefType);
+ GenTree* nullchk = gtNewNullCheck(lclVar, compCurBB);
- addr = gtNewLclvNode(lclNum, objRefType); // Use "tmpLcl" to create "addr" node.
+ if (asg != nullptr)
+ {
+ // Create the "comma" node.
+ comma = gtNewOperNode(GT_COMMA, TYP_VOID, asg, nullchk);
}
else
{
- addr = objRef;
+ comma = nullchk;
}
+ addr = gtNewLclvNode(lclNum, objRefType); // Use "tmpLcl" to create "addr" node.
+ }
+ else
+ {
+ addr = objRef;
+ }
+
#ifdef FEATURE_READYTORUN
- if (tree->AsField()->gtFieldLookup.addr != nullptr)
+ if (tree->AsField()->gtFieldLookup.addr != nullptr)
+ {
+ GenTree* offsetNode = nullptr;
+ if (tree->AsField()->gtFieldLookup.accessType == IAT_PVALUE)
{
- GenTree* offsetNode = nullptr;
- if (tree->AsField()->gtFieldLookup.accessType == IAT_PVALUE)
- {
- offsetNode = gtNewIndOfIconHandleNode(TYP_I_IMPL, (size_t)tree->AsField()->gtFieldLookup.addr,
- GTF_ICON_CONST_PTR, true);
+ offsetNode = gtNewIndOfIconHandleNode(TYP_I_IMPL, (size_t)tree->AsField()->gtFieldLookup.addr,
+ GTF_ICON_CONST_PTR, true);
#ifdef DEBUG
- offsetNode->gtGetOp1()->AsIntCon()->gtTargetHandle = (size_t)symHnd;
+ offsetNode->gtGetOp1()->AsIntCon()->gtTargetHandle = (size_t)fieldHandle;
#endif
- }
- else
- {
- noway_assert(!"unexpected accessType for R2R field access");
- }
-
- var_types addType = (objRefType == TYP_I_IMPL) ? TYP_I_IMPL : TYP_BYREF;
- addr = gtNewOperNode(GT_ADD, addType, addr, offsetNode);
}
-#endif
-
- // We only need to attach the field offset information for class fields.
- if ((objRefType == TYP_REF) && !fldMayOverlap)
+ else
{
- fieldSeq = GetFieldSeqStore()->Create(symHnd, fldOffset, FieldSeq::FieldKind::Instance);
+ noway_assert(!"unexpected accessType for R2R field access");
}
- // Add the member offset to the object's address.
- if (fldOffset != 0)
- {
- addr = gtNewOperNode(GT_ADD, (objRefType == TYP_I_IMPL) ? TYP_I_IMPL : TYP_BYREF, addr,
- gtNewIconNode(fldOffset, fieldSeq));
- }
+ addr = gtNewOperNode(GT_ADD, (objRefType == TYP_I_IMPL) ? TYP_I_IMPL : TYP_BYREF, addr, offsetNode);
+ }
+#endif
+
+ // We only need to attach the field offset information for class fields.
+ FieldSeq* fieldSeq = nullptr;
+ if ((objRefType == TYP_REF) && !tree->AsField()->gtFldMayOverlap)
+ {
+ fieldSeq = GetFieldSeqStore()->Create(fieldHandle, fieldOffset, FieldSeq::FieldKind::Instance);
+ }
- // Now let's set the "tree" as a GT_IND tree.
+ // Add the member offset to the object's address.
+ if (fieldOffset != 0)
+ {
+ addr = gtNewOperNode(GT_ADD, (objRefType == TYP_I_IMPL) ? TYP_I_IMPL : TYP_BYREF, addr,
+ gtNewIconNode(fieldOffset, fieldSeq));
+ }
+ if (addExplicitNullCheck)
+ {
+ // Create the "comma2" tree.
+ addr = gtNewOperNode(GT_COMMA, addr->TypeGet(), comma, addr);
+ }
+
+ if (tree->OperIs(GT_FIELD))
+ {
tree->SetOper(GT_IND);
- tree->AsOp()->gtOp1 = addr;
+ tree->AsIndir()->SetAddr(addr);
+ }
+ else // Otherwise, we have a FIELD_ADDR.
+ {
+ tree = addr;
+ }
- if (addExplicitNullCheck)
- {
- //
- // Create "comma2" node and link it to "tree".
- //
- GenTree* comma2 = gtNewOperNode(GT_COMMA, addr->TypeGet(), comma, addr);
- tree->AsOp()->gtOp1 = comma2;
- }
+ if (addExplicitNullCheck)
+ {
+ JITDUMP("After adding explicit null check:\n");
+ DISPTREE(tree);
+ }
-#ifdef DEBUG
- if (verbose)
+ return tree;
+}
+
+//------------------------------------------------------------------------
+// fgMorphExpandTlsFieldAddr: Expand a TLS field address.
+//
+// Expands ".tls"-style statics, produced by the C++/CLI compiler for
+// "__declspec(thread)" variables. An overview of the underlying native
+// mechanism can be found here: http://www.nynaeve.net/?p=180.
+//
+// Arguments:
+// tree - The GT_FIELD_ADDR tree
+//
+// Return Value:
+// The expanded tree - a GT_ADD.
+//
+GenTree* Compiler::fgMorphExpandTlsFieldAddr(GenTree* tree)
+{
+ // Note we do not support "FIELD"s for TLS statics, for simplicity.
+ assert(tree->OperIs(GT_FIELD_ADDR) && tree->AsField()->IsTlsStatic());
+
+ CORINFO_FIELD_HANDLE fieldHandle = tree->AsField()->gtFldHnd;
+ int fieldOffset = tree->AsField()->gtFldOffset;
+
+ // Thread Local Storage static field reference
+ //
+ // Field ref is a TLS 'Thread-Local-Storage' reference
+ //
+ // Build this tree: ADD(I_IMPL) #
+ // / \.
+ // / CNS(fldOffset)
+ // /
+ // /
+ // /
+ // IND(I_IMPL) == [Base of this DLL's TLS]
+ // |
+ // ADD(I_IMPL)
+ // / \.
+ // / CNS(IdValue*4) or MUL
+ // / / \.
+ // IND(I_IMPL) / CNS(4)
+ // | /
+ // CNS(TLS_HDL,0x2C) IND
+ // |
+ // CNS(pIdAddr)
+ //
+ // # Denotes the original node
+ //
+ void** pIdAddr = nullptr;
+ unsigned IdValue = info.compCompHnd->getFieldThreadLocalStoreID(fieldHandle, (void**)&pIdAddr);
+
+ //
+ // If we can we access the TLS DLL index ID value directly
+ // then pIdAddr will be NULL and
+ // IdValue will be the actual TLS DLL index ID
+ //
+ GenTree* dllRef = nullptr;
+ if (pIdAddr == nullptr)
+ {
+ if (IdValue != 0)
{
- if (addExplicitNullCheck)
- {
- printf("After adding explicit null check:\n");
- gtDispTree(tree);
- }
+ dllRef = gtNewIconNode(IdValue * 4, TYP_I_IMPL);
}
-#endif
}
- else /* This is a static data member */
+ else
{
- if (tree->gtFlags & GTF_IND_TLS_REF)
- {
- // Thread Local Storage static field reference
- //
- // Field ref is a TLS 'Thread-Local-Storage' reference
- //
- // Build this tree: IND(*) #
- // |
- // ADD(I_IMPL)
- // / \.
- // / CNS(fldOffset)
- // /
- // /
- // /
- // IND(I_IMPL) == [Base of this DLL's TLS]
- // |
- // ADD(I_IMPL)
- // / \.
- // / CNS(IdValue*4) or MUL
- // / / \.
- // IND(I_IMPL) / CNS(4)
- // | /
- // CNS(TLS_HDL,0x2C) IND
- // |
- // CNS(pIdAddr)
- //
- // # Denotes the original node
- //
- void** pIdAddr = nullptr;
- unsigned IdValue = info.compCompHnd->getFieldThreadLocalStoreID(symHnd, (void**)&pIdAddr);
-
- //
- // If we can we access the TLS DLL index ID value directly
- // then pIdAddr will be NULL and
- // IdValue will be the actual TLS DLL index ID
- //
- GenTree* dllRef = nullptr;
- if (pIdAddr == nullptr)
- {
- if (IdValue != 0)
- {
- dllRef = gtNewIconNode(IdValue * 4, TYP_I_IMPL);
- }
- }
- else
- {
- dllRef = gtNewIndOfIconHandleNode(TYP_I_IMPL, (size_t)pIdAddr, GTF_ICON_CONST_PTR, true);
+ dllRef = gtNewIndOfIconHandleNode(TYP_I_IMPL, (size_t)pIdAddr, GTF_ICON_CONST_PTR, true);
- // Next we multiply by 4
- dllRef = gtNewOperNode(GT_MUL, TYP_I_IMPL, dllRef, gtNewIconNode(4, TYP_I_IMPL));
- }
+ // Next we multiply by 4
+ dllRef = gtNewOperNode(GT_MUL, TYP_I_IMPL, dllRef, gtNewIconNode(4, TYP_I_IMPL));
+ }
#define WIN32_TLS_SLOTS (0x2C) // Offset from fs:[0] where the pointer to the slots resides
- // Mark this ICON as a TLS_HDL, codegen will use FS:[cns]
+ // Mark this ICON as a TLS_HDL, codegen will use FS:[cns]
+ GenTree* tlsRef = gtNewIconHandleNode(WIN32_TLS_SLOTS, GTF_ICON_TLS_HDL);
- GenTree* tlsRef = gtNewIconHandleNode(WIN32_TLS_SLOTS, GTF_ICON_TLS_HDL);
+ // Translate GTF_FLD_INITCLASS to GTF_ICON_INITCLASS
+ if ((tree->gtFlags & GTF_FLD_INITCLASS) != 0)
+ {
+ tree->gtFlags &= ~GTF_FLD_INITCLASS;
+ tlsRef->gtFlags |= GTF_ICON_INITCLASS;
+ }
- // Translate GTF_FLD_INITCLASS to GTF_ICON_INITCLASS
- if ((tree->gtFlags & GTF_FLD_INITCLASS) != 0)
- {
- tree->gtFlags &= ~GTF_FLD_INITCLASS;
- tlsRef->gtFlags |= GTF_ICON_INITCLASS;
- }
+ tlsRef = gtNewIndir(TYP_I_IMPL, tlsRef, GTF_IND_NONFAULTING | GTF_IND_INVARIANT);
- tlsRef = gtNewOperNode(GT_IND, TYP_I_IMPL, tlsRef);
+ if (dllRef != nullptr)
+ {
+ // Add the dllRef.
+ tlsRef = gtNewOperNode(GT_ADD, TYP_I_IMPL, tlsRef, dllRef);
+ }
- if (dllRef != nullptr)
- {
- /* Add the dllRef */
- tlsRef = gtNewOperNode(GT_ADD, TYP_I_IMPL, tlsRef, dllRef);
- }
+ // indirect to have tlsRef point at the base of the DLLs Thread Local Storage.
+ tlsRef = gtNewOperNode(GT_IND, TYP_I_IMPL, tlsRef);
- /* indirect to have tlsRef point at the base of the DLLs Thread Local Storage */
- tlsRef = gtNewOperNode(GT_IND, TYP_I_IMPL, tlsRef);
+ // Add the TLS static field offset to the address.
+ assert(!tree->AsField()->gtFldMayOverlap);
+ FieldSeq* fieldSeq = GetFieldSeqStore()->Create(fieldHandle, fieldOffset, FieldSeq::FieldKind::SimpleStatic);
+ GenTree* offsetNode = gtNewIconNode(fieldOffset, fieldSeq);
- // Add the TLS static field offset to the address.
- assert(!fldMayOverlap);
- fieldSeq = GetFieldSeqStore()->Create(symHnd, fldOffset, FieldSeq::FieldKind::SimpleStatic);
- tlsRef = gtNewOperNode(GT_ADD, TYP_I_IMPL, tlsRef, gtNewIconNode(fldOffset, fieldSeq));
+ tree->ChangeOper(GT_ADD);
+ tree->AsOp()->gtOp1 = tlsRef;
+ tree->AsOp()->gtOp2 = offsetNode;
- // Final indirect to get to actual value of TLS static field
+ return tree;
+}
- tree->SetOper(GT_IND);
- tree->AsOp()->gtOp1 = tlsRef;
+//------------------------------------------------------------------------
+// fgMorphExpandStaticField: Expand a simple static field load.
+//
+// Transforms the field into an explicit indirection off of a constant
+// address.
+//
+// Arguments:
+// tree - The GT_FIELD tree
+//
+// Return Value:
+// The expanded tree - a GT_IND.
+//
+GenTree* Compiler::fgMorphExpandStaticField(GenTree* tree)
+{
+ // Note we do not support "FIELD_ADDR"s for simple statics.
+ assert(tree->OperIs(GT_FIELD) && tree->AsField()->IsStatic());
- noway_assert(tree->gtFlags & GTF_IND_TLS_REF);
- }
- else
- {
- // Normal static field reference
- //
- // If we can we access the static's address directly
- // then pFldAddr will be NULL and
- // fldAddr will be the actual address of the static field
- //
- void** pFldAddr = nullptr;
- void* fldAddr = info.compCompHnd->getFieldAddress(symHnd, (void**)&pFldAddr);
+ // If we can we access the static's address directly
+ // then pFldAddr will be NULL and
+ // fldAddr will be the actual address of the static field
+ //
+ CORINFO_FIELD_HANDLE fieldHandle = tree->AsField()->gtFldHnd;
+ void** pFldAddr = nullptr;
+ void* fldAddr = info.compCompHnd->getFieldAddress(fieldHandle, (void**)&pFldAddr);
- // We should always be able to access this static field address directly
- //
- assert(pFldAddr == nullptr);
+ // We should always be able to access this static field address directly
+ //
+ assert(pFldAddr == nullptr);
- // For boxed statics, this direct address will be for the box. We have already added
- // the indirection for the field itself and attached the sequence, in importation.
- bool isBoxedStatic = gtIsStaticFieldPtrToBoxedStruct(tree->TypeGet(), symHnd);
- if (!isBoxedStatic)
- {
- // Only simple statics get importred as GT_FIELDs.
- fieldSeq = GetFieldSeqStore()->Create(symHnd, reinterpret_cast<size_t>(fldAddr),
- FieldSeq::FieldKind::SimpleStatic);
- }
+ // For boxed statics, this direct address will be for the box. We have already added
+ // the indirection for the field itself and attached the sequence, in importation.
+ FieldSeq* fieldSeq = nullptr;
+ bool isBoxedStatic = gtIsStaticFieldPtrToBoxedStruct(tree->TypeGet(), fieldHandle);
+ if (!isBoxedStatic)
+ {
+ // Only simple statics get importred as GT_FIELDs.
+ fieldSeq = GetFieldSeqStore()->Create(fieldHandle, reinterpret_cast<size_t>(fldAddr),
+ FieldSeq::FieldKind::SimpleStatic);
+ }
- // TODO-CQ: enable this optimization for 32 bit targets.
- bool isStaticReadOnlyInited = false;
+ // TODO-CQ: enable this optimization for 32 bit targets.
+ bool isStaticReadOnlyInited = false;
#ifdef TARGET_64BIT
- if (tree->TypeIs(TYP_REF) && !isBoxedStatic)
- {
- bool pIsSpeculative = true;
- if (info.compCompHnd->getStaticFieldCurrentClass(symHnd, &pIsSpeculative) != NO_CLASS_HANDLE)
- {
- isStaticReadOnlyInited = !pIsSpeculative;
- }
- }
+ if (tree->TypeIs(TYP_REF) && !isBoxedStatic)
+ {
+ bool pIsSpeculative = true;
+ if (info.compCompHnd->getStaticFieldCurrentClass(fieldHandle, &pIsSpeculative) != NO_CLASS_HANDLE)
+ {
+ isStaticReadOnlyInited = !pIsSpeculative;
+ }
+ }
#endif // TARGET_64BIT
- GenTreeFlags handleKind = GTF_EMPTY;
- if (isBoxedStatic)
- {
- handleKind = GTF_ICON_STATIC_BOX_PTR;
- }
- else if (isStaticReadOnlyInited)
- {
- handleKind = GTF_ICON_CONST_PTR;
- }
- else
- {
- handleKind = GTF_ICON_STATIC_HDL;
- }
- GenTreeIntCon* addr = gtNewIconHandleNode((size_t)fldAddr, handleKind, fieldSeq);
- INDEBUG(addr->gtTargetHandle = reinterpret_cast<size_t>(symHnd));
-
- // Translate GTF_FLD_INITCLASS to GTF_ICON_INITCLASS, if we need to.
- if (((tree->gtFlags & GTF_FLD_INITCLASS) != 0) && !isStaticReadOnlyInited)
- {
- tree->gtFlags &= ~GTF_FLD_INITCLASS;
- addr->gtFlags |= GTF_ICON_INITCLASS;
- }
-
- tree->SetOper(GT_IND);
- tree->AsOp()->gtOp1 = addr;
-
- if (isBoxedStatic)
- {
- // The box for the static cannot be null, and is logically invariant, since it
- // represents (a base for) the static's address.
- tree->gtFlags |= (GTF_IND_INVARIANT | GTF_IND_NONFAULTING | GTF_IND_NONNULL);
- }
- else if (isStaticReadOnlyInited)
- {
- JITDUMP("Marking initialized static read-only field '%s' as invariant.\n", eeGetFieldName(symHnd));
-
- // Static readonly field is not null at this point (see getStaticFieldCurrentClass impl).
- tree->gtFlags |= (GTF_IND_INVARIANT | GTF_IND_NONFAULTING | GTF_IND_NONNULL);
- }
+ GenTreeFlags handleKind = GTF_EMPTY;
+ if (isBoxedStatic)
+ {
+ handleKind = GTF_ICON_STATIC_BOX_PTR;
+ }
+ else if (isStaticReadOnlyInited)
+ {
+ handleKind = GTF_ICON_CONST_PTR;
+ }
+ else
+ {
+ handleKind = GTF_ICON_STATIC_HDL;
+ }
+ GenTreeIntCon* addr = gtNewIconHandleNode((size_t)fldAddr, handleKind, fieldSeq);
+ INDEBUG(addr->gtTargetHandle = reinterpret_cast<size_t>(fieldHandle));
- return fgMorphSmpOp(tree, /* mac */ nullptr);
- }
+ // Translate GTF_FLD_INITCLASS to GTF_ICON_INITCLASS, if we need to.
+ if (((tree->gtFlags & GTF_FLD_INITCLASS) != 0) && !isStaticReadOnlyInited)
+ {
+ tree->gtFlags &= ~GTF_FLD_INITCLASS;
+ addr->gtFlags |= GTF_ICON_INITCLASS;
}
- noway_assert(tree->OperIs(GT_IND));
+ tree->SetOper(GT_IND);
+ tree->AsOp()->gtOp1 = addr;
- // Pass down the current mac; if non null we are computing an address
- GenTree* result = fgMorphSmpOp(tree, mac);
+ if (isBoxedStatic)
+ {
+ // The box for the static cannot be null, and is logically invariant, since it
+ // represents (a base for) the static's address.
+ tree->gtFlags |= (GTF_IND_INVARIANT | GTF_IND_NONFAULTING | GTF_IND_NONNULL);
+ }
+ else if (isStaticReadOnlyInited)
+ {
+ JITDUMP("Marking initialized static read-only field '%s' as invariant.\n", eeGetFieldName(fieldHandle));
- JITDUMP("\nFinal value of Compiler::fgMorphField after calling fgMorphSmpOp:\n");
- DISPTREE(result);
+ // Static readonly field is not null at this point (see getStaticFieldCurrentClass impl).
+ tree->gtFlags |= (GTF_IND_INVARIANT | GTF_IND_NONFAULTING | GTF_IND_NONNULL);
+ }
- return result;
+ return tree;
}
//------------------------------------------------------------------------------
@@ -9361,6 +9418,7 @@ GenTree* Compiler::fgMorphSmpOp(GenTree* tree, MorphAddrContext* mac, bool* optA
break;
case GT_FIELD:
+ case GT_FIELD_ADDR:
return fgMorphField(tree, mac);
case GT_INDEX_ADDR:
diff --git a/src/coreclr/jit/objectalloc.cpp b/src/coreclr/jit/objectalloc.cpp
index 1f251f48d38..94072a00eb7 100644
--- a/src/coreclr/jit/objectalloc.cpp
+++ b/src/coreclr/jit/objectalloc.cpp
@@ -656,6 +656,7 @@ bool ObjectAllocator::CanLclVarEscapeViaParentStack(ArrayStack<GenTree*>* parent
case GT_COLON:
case GT_QMARK:
case GT_ADD:
+ case GT_FIELD_ADDR:
// Check whether the local escapes via its grandparent.
++parentIndex;
keepChecking = true;
@@ -761,6 +762,7 @@ void ObjectAllocator::UpdateAncestorTypes(GenTree* tree, ArrayStack<GenTree*>* p
case GT_COLON:
case GT_QMARK:
case GT_ADD:
+ case GT_FIELD_ADDR:
if (parent->TypeGet() == TYP_REF)
{
parent->ChangeType(newType);
diff --git a/src/coreclr/jit/rationalize.cpp b/src/coreclr/jit/rationalize.cpp
index dad87564355..299dfc1d17a 100644
--- a/src/coreclr/jit/rationalize.cpp
+++ b/src/coreclr/jit/rationalize.cpp
@@ -53,9 +53,6 @@ void Rationalizer::RewriteIndir(LIR::Use& use)
GenTreeIndir* indir = use.Def()->AsIndir();
assert(indir->OperIs(GT_IND, GT_BLK, GT_OBJ));
- // Clear the `GTF_IND_ASG_LHS` flag, which overlaps with `GTF_IND_REQ_ADDR_IN_REG`.
- indir->gtFlags &= ~GTF_IND_ASG_LHS;
-
if (varTypeIsSIMD(indir))
{
if (indir->OperIs(GT_BLK, GT_OBJ))