diff --git a/src/coreclr/jit/compiler.cpp b/src/coreclr/jit/compiler.cpp index 5af1161f9f20f..a7b6a29e4728b 100644 --- a/src/coreclr/jit/compiler.cpp +++ b/src/coreclr/jit/compiler.cpp @@ -1947,7 +1947,7 @@ void Compiler::compInit(ArenaAllocator* pAlloc, #endif // DEBUG vnStore = nullptr; - m_opAsgnVarDefSsaNums = nullptr; + m_outlinedCompositeSsaNums = nullptr; m_nodeToLoopMemoryBlockMap = nullptr; fgSsaPassesCompleted = 0; fgVNPassesCompleted = 0; @@ -5320,11 +5320,10 @@ void Compiler::ResetOptAnnotations() assert(opts.optRepeat); assert(JitConfig.JitOptRepeatCount() > 0); fgResetForSsa(); - vnStore = nullptr; - m_opAsgnVarDefSsaNums = nullptr; - m_blockToEHPreds = nullptr; - fgSsaPassesCompleted = 0; - fgVNPassesCompleted = 0; + vnStore = nullptr; + m_blockToEHPreds = nullptr; + fgSsaPassesCompleted = 0; + fgVNPassesCompleted = 0; for (BasicBlock* const block : Blocks()) { diff --git a/src/coreclr/jit/compiler.h b/src/coreclr/jit/compiler.h index 1db145289ce27..06dda1670c68d 100644 --- a/src/coreclr/jit/compiler.h +++ b/src/coreclr/jit/compiler.h @@ -211,17 +211,19 @@ class LclSsaVarDsc // TODO-Cleanup: In the case of uninitialized variables the block is set to nullptr by // SsaBuilder and changed to fgFirstBB during value numbering. It would be useful to // investigate and perhaps eliminate this rather unexpected behavior. - BasicBlock* m_block; + BasicBlock* m_block = nullptr; // The GT_ASG node that generates the definition, or nullptr for definitions // of uninitialized variables. - GenTreeOp* m_asg; + GenTreeOp* m_asg = nullptr; + // The SSA number associated with the previous definition for partial (GTF_USEASG) defs. + unsigned m_useDefSsaNum = SsaConfig::RESERVED_SSA_NUM; public: - LclSsaVarDsc() : m_block(nullptr), m_asg(nullptr) + LclSsaVarDsc() { } - LclSsaVarDsc(BasicBlock* block) : m_block(block), m_asg(nullptr) + LclSsaVarDsc(BasicBlock* block) : m_block(block) { } @@ -251,6 +253,16 @@ class LclSsaVarDsc m_asg = asg; } + unsigned GetUseDefSsaNum() const + { + return m_useDefSsaNum; + } + + void SetUseDefSsaNum(unsigned ssaNum) + { + m_useDefSsaNum = ssaNum; + } + ValueNumPair m_vnPair; }; @@ -341,7 +353,7 @@ class SsaDefArray } // Get a pointer to the SSA definition at the specified index. - T* GetSsaDefByIndex(unsigned index) + T* GetSsaDefByIndex(unsigned index) const { assert(index < m_count); return &m_array[index]; @@ -354,7 +366,7 @@ class SsaDefArray } // Get a pointer to the SSA definition associated with the specified SSA number. - T* GetSsaDef(unsigned ssaNum) + T* GetSsaDef(unsigned ssaNum) const { assert(ssaNum != SsaConfig::RESERVED_SSA_NUM); return GetSsaDefByIndex(ssaNum - GetMinSsaNum()); @@ -1098,7 +1110,7 @@ class LclVarDsc // Returns the address of the per-Ssa data for the given ssaNum (which is required // not to be the SsaConfig::RESERVED_SSA_NUM, which indicates that the variable is // not an SSA variable). - LclSsaVarDsc* GetPerSsaData(unsigned ssaNum) + LclSsaVarDsc* GetPerSsaData(unsigned ssaNum) const { return lvPerSsaData.GetSsaDef(ssaNum); } @@ -2758,6 +2770,9 @@ class Compiler // the given "fldHnd", is such an object pointer. bool gtIsStaticFieldPtrToBoxedStruct(var_types fieldNodeType, CORINFO_FIELD_HANDLE fldHnd); + bool gtStoreDefinesField( + LclVarDsc* fieldVarDsc, ssize_t offset, unsigned size, ssize_t* pFieldStoreOffset, unsigned* pFileStoreSize); + // Return true if call is a recursive call; return false otherwise. // Note when inlining, this looks for calls back to the root method. bool gtIsRecursiveCall(GenTreeCall* call) @@ -2859,6 +2874,7 @@ class Compiler char* gtGetLclVarName(unsigned lclNum); void gtDispLclVar(unsigned lclNum, bool padForBiggestDisp = true); void gtDispLclVarStructType(unsigned lclNum); + void gtDispSsaName(unsigned lclNum, unsigned ssaNum, bool isDef); void gtDispClassLayout(ClassLayout* layout, var_types type); void gtDispILLocation(const ILLocation& loc); void gtDispStmt(Statement* stmt, const char* msg = nullptr); @@ -3427,10 +3443,9 @@ class Compiler bool lvaTempsHaveLargerOffsetThanVars(); // Returns "true" iff local variable "lclNum" is in SSA form. - bool lvaInSsa(unsigned lclNum) + bool lvaInSsa(unsigned lclNum) const { - assert(lclNum < lvaCount); - return lvaTable[lclNum].lvInSsa; + return lvaGetDesc(lclNum)->lvInSsa; } unsigned lvaStubArgumentVar; // variable representing the secret stub argument coming in EAX @@ -4691,20 +4706,9 @@ class Compiler return BasicBlockRangeList(startBlock, endBlock); } - // The presence of a partial definition presents some difficulties for SSA: this is both a use of some SSA name - // of "x", and a def of a new SSA name for "x". The tree only has one local variable for "x", so it has to choose - // whether to treat that as the use or def. It chooses the "use", and thus the old SSA name. This map allows us - // to record/recover the "def" SSA number, given the lcl var node for "x" in such a tree. - typedef JitHashTable, unsigned> NodeToUnsignedMap; - NodeToUnsignedMap* m_opAsgnVarDefSsaNums; - NodeToUnsignedMap* GetOpAsgnVarDefSsaNums() - { - if (m_opAsgnVarDefSsaNums == nullptr) - { - m_opAsgnVarDefSsaNums = new (getAllocator()) NodeToUnsignedMap(getAllocator()); - } - return m_opAsgnVarDefSsaNums; - } + // This array, managed by the SSA numbering infrastructure, keeps "outlined composite SSA numbers". + // See "SsaNumInfo::GetNum" for more details on when this is needed. + JitExpandArrayStack* m_outlinedCompositeSsaNums; // This map tracks nodes whose value numbers explicitly or implicitly depend on memory states. // The map provides the entry block of the most closely enclosing loop that @@ -4733,12 +4737,6 @@ class Compiler void optRecordLoopMemoryDependence(GenTree* tree, BasicBlock* block, ValueNum memoryVN); void optCopyLoopMemoryDependence(GenTree* fromTree, GenTree* toTree); - // Requires that "lcl" has the GTF_VAR_DEF flag set. Returns the SSA number of "lcl". - // Except: assumes that lcl is a def, and if it is - // a partial def (GTF_VAR_USEASG), looks up and returns the SSA number for the "def", - // rather than the "use" SSA number recorded in the tree "lcl". - inline unsigned GetSsaNumForLocalVarDef(GenTree* lcl); - inline bool PreciseRefCountsRequired(); // Performs SSA conversion. @@ -6754,11 +6752,7 @@ class Compiler bool optCopyProp(Statement* stmt, GenTreeLclVarCommon* tree, unsigned lclNum, LclNumToLiveDefsMap* curSsaName); void optBlockCopyPropPopStacks(BasicBlock* block, LclNumToLiveDefsMap* curSsaName); bool optBlockCopyProp(BasicBlock* block, LclNumToLiveDefsMap* curSsaName); - void optCopyPropPushDef(GenTree* defNode, - GenTreeLclVarCommon* lclNode, - unsigned lclNum, - LclNumToLiveDefsMap* curSsaName); - unsigned optIsSsaLocal(GenTreeLclVarCommon* lclNode); + void optCopyPropPushDef(GenTree* defNode, GenTreeLclVarCommon* lclNode, LclNumToLiveDefsMap* curSsaName); int optCopyProp_LclVarScore(const LclVarDsc* lclVarDsc, const LclVarDsc* copyVarDsc, bool preferOp2); PhaseStatus optVnCopyProp(); INDEBUG(void optDumpCopyPropStack(LclNumToLiveDefsMap* curSsaName)); @@ -9483,7 +9477,6 @@ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX STRESS_MODE(DO_WHILE_LOOPS) \ STRESS_MODE(MIN_OPTS) \ STRESS_MODE(REVERSE_FLAG) /* Will set GTF_REVERSE_OPS whenever we can */ \ - STRESS_MODE(REVERSE_COMMA) /* Will reverse commas created with gtNewCommaNode */ \ STRESS_MODE(TAILCALL) /* Will make the call as a tailcall whenever legal */ \ STRESS_MODE(CATCH_ARG) /* Will spill catch arg */ \ STRESS_MODE(UNSAFE_BUFFER_CHECKS) \ @@ -9496,6 +9489,7 @@ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX STRESS_MODE(BYREF_PROMOTION) /* Change undoPromotion decisions for byrefs */ \ STRESS_MODE(PROMOTE_FEWER_STRUCTS)/* Don't promote some structs that can be promoted */ \ STRESS_MODE(VN_BUDGET)/* Randomize the VN budget */ \ + STRESS_MODE(SSA_INFO) /* Select lower thresholds for "complex" SSA num encoding */ \ \ /* After COUNT_VARN, stress level 2 does all of these all the time */ \ \ @@ -9508,7 +9502,6 @@ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX STRESS_MODE(CHK_FLOW_UPDATE) \ STRESS_MODE(EMITTER) \ STRESS_MODE(CHK_REIMPORT) \ - STRESS_MODE(FLATFP) \ STRESS_MODE(GENERIC_CHECK) \ STRESS_MODE(COUNT) @@ -10465,7 +10458,7 @@ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX return compRoot->m_fieldSeqStore; } - typedef JitHashTable, FieldSeq*> NodeToFieldSeqMap; + typedef JitHashTable, unsigned> NodeToUnsignedMap; NodeToUnsignedMap* m_memorySsaMap[MemoryKindCount]; diff --git a/src/coreclr/jit/compiler.hpp b/src/coreclr/jit/compiler.hpp index e9b118f001e88..ecce3c1aa0272 100644 --- a/src/coreclr/jit/compiler.hpp +++ b/src/coreclr/jit/compiler.hpp @@ -4082,36 +4082,6 @@ bool Compiler::fgVarNeedsExplicitZeroInit(unsigned varNum, bool bbInALoop, bool return !info.compInitMem || (varDsc->lvIsTemp && !varDsc->HasGCPtr()); } -/*****************************************************************************/ -unsigned Compiler::GetSsaNumForLocalVarDef(GenTree* lcl) -{ - // Address-taken variables don't have SSA numbers. - if (!lvaInSsa(lcl->AsLclVarCommon()->GetLclNum())) - { - return SsaConfig::RESERVED_SSA_NUM; - } - - if (lcl->gtFlags & GTF_VAR_USEASG) - { - // It's partial definition of a struct. "lcl" is both used and defined here; - // we've chosen in this case to annotate "lcl" with the SSA number (and VN) of the use, - // and to store the SSA number of the def in a side table. - unsigned ssaNum; - // In case of a remorph (fgMorph) in CSE/AssertionProp after SSA phase, there - // wouldn't be an entry for the USEASG portion of the indir addr, return - // reserved. - if (!GetOpAsgnVarDefSsaNums()->Lookup(lcl, &ssaNum)) - { - return SsaConfig::RESERVED_SSA_NUM; - } - return ssaNum; - } - else - { - return lcl->AsLclVarCommon()->GetSsaNum(); - } -} - inline bool Compiler::PreciseRefCountsRequired() { return opts.OptimizationEnabled(); diff --git a/src/coreclr/jit/copyprop.cpp b/src/coreclr/jit/copyprop.cpp index b673289f2e8b3..d029c8f3dcef0 100644 --- a/src/coreclr/jit/copyprop.cpp +++ b/src/coreclr/jit/copyprop.cpp @@ -30,6 +30,18 @@ // void Compiler::optBlockCopyPropPopStacks(BasicBlock* block, LclNumToLiveDefsMap* curSsaName) { + auto popDef = [=](unsigned defLclNum, unsigned defSsaNum) { + CopyPropSsaDefStack* stack = nullptr; + if ((defSsaNum != SsaConfig::RESERVED_SSA_NUM) && curSsaName->Lookup(defLclNum, &stack)) + { + stack->Pop(); + if (stack->Empty()) + { + curSsaName->Remove(defLclNum); + } + } + }; + for (Statement* const stmt : block->Statements()) { for (GenTree* const tree : stmt->TreeList()) @@ -37,22 +49,20 @@ void Compiler::optBlockCopyPropPopStacks(BasicBlock* block, LclNumToLiveDefsMap* GenTreeLclVarCommon* lclDefNode = nullptr; if (tree->OperIsSsaDef() && tree->DefinesLocal(this, &lclDefNode)) { - const unsigned lclNum = optIsSsaLocal(lclDefNode); - - if (lclNum == BAD_VAR_NUM) + if (lclDefNode->HasCompositeSsaName()) { - continue; - } + LclVarDsc* varDsc = lvaGetDesc(lclDefNode); + assert(varDsc->lvPromoted); - CopyPropSsaDefStack* stack = nullptr; - if (curSsaName->Lookup(lclNum, &stack)) - { - stack->Pop(); - if (stack->Empty()) + for (unsigned index = 0; index < varDsc->lvFieldCnt; index++) { - curSsaName->Remove(lclNum); + popDef(varDsc->lvFieldLclStart + index, lclDefNode->GetSsaNum(this, index)); } } + else + { + popDef(lclDefNode->GetLclNum(), lclDefNode->GetSsaNum()); + } } } } @@ -67,19 +77,11 @@ void Compiler::optDumpCopyPropStack(LclNumToLiveDefsMap* curSsaName) JITDUMP("{ "); for (LclNumToLiveDefsMap::KeyIterator iter = curSsaName->Begin(); !iter.Equal(curSsaName->End()); ++iter) { - GenTreeLclVarCommon* lclVar = iter.GetValue()->Top().GetDefNode()->AsLclVarCommon(); - unsigned ssaLclNum = optIsSsaLocal(lclVar); - assert(ssaLclNum != BAD_VAR_NUM); + GenTreeLclVarCommon* lclDefNode = iter.GetValue()->Top().GetDefNode()->AsLclVarCommon(); + unsigned defLclNum = iter.Get(); + unsigned defSsaNum = lvaGetDesc(defLclNum)->GetSsaNumForSsaDef(iter.GetValue()->Top().GetSsaDef()); - if (ssaLclNum == lclVar->GetLclNum()) - { - JITDUMP("%d-[%06d]:V%02u ", iter.Get(), dspTreeID(lclVar), ssaLclNum); - } - else - { - // A promoted field was assigned using the parent struct, print `ssa field lclNum(parent lclNum)`. - JITDUMP("%d-[%06d]:V%02u(V%02u) ", iter.Get(), dspTreeID(lclVar), ssaLclNum, lclVar->GetLclNum()); - } + JITDUMP("[%06d]:V%02u/%u ", dspTreeID(lclDefNode), defLclNum, defSsaNum); } JITDUMP("}\n\n"); } @@ -138,16 +140,15 @@ int Compiler::optCopyProp_LclVarScore(const LclVarDsc* lclVarDsc, const LclVarDs // Arguments: // stmt - Statement the tree belongs to // tree - The local tree to perform copy propagation on -// lclNum - The local number of said tree +// lclNum - Number of the local "tree" refers to // curSsaName - The map from lclNum to its recently live definitions as a stack // // Returns: -// true if any changes were made +// Whether any changes were made. // bool Compiler::optCopyProp(Statement* stmt, GenTreeLclVarCommon* tree, unsigned lclNum, LclNumToLiveDefsMap* curSsaName) { - assert((lclNum != BAD_VAR_NUM) && (optIsSsaLocal(tree) == lclNum) && ((tree->gtFlags & GTF_VAR_DEF) == 0)); - assert(tree->gtVNPair.BothDefined()); + assert(((tree->gtFlags & GTF_VAR_DEF) == 0) && (tree->GetLclNum() == lclNum) && tree->gtVNPair.BothDefined()); bool madeChanges = false; LclVarDsc* varDsc = lvaGetDesc(lclNum); @@ -269,50 +270,17 @@ bool Compiler::optCopyProp(Statement* stmt, GenTreeLclVarCommon* tree, unsigned return madeChanges; } -//------------------------------------------------------------------------------ -// optIsSsaLocal : helper to check if the tree is a local that participates in SSA numbering. -// -// Arguments: -// lclNode - The local tree to perform the check on; -// -// Returns: -// - lclNum if the local is participating in SSA; -// - fieldLclNum if the parent local can be replaced by its only field; -// - BAD_VAR_NUM otherwise. -// -unsigned Compiler::optIsSsaLocal(GenTreeLclVarCommon* lclNode) -{ - unsigned lclNum = lclNode->GetLclNum(); - LclVarDsc* varDsc = lvaGetDesc(lclNum); - - if (!lvaInSsa(lclNum) && varDsc->CanBeReplacedWithItsField(this)) - { - lclNum = varDsc->lvFieldLclStart; - } - - if (!lvaInSsa(lclNum)) - { - return BAD_VAR_NUM; - } - - return lclNum; -} - //------------------------------------------------------------------------------ // optCopyPropPushDef: Push the new live SSA def on the stack for "lclNode". // // Arguments: // defNode - The definition node for this def (GT_ASG/GT_CALL) (will be "nullptr" for "use" defs) // lclNode - The local tree representing "the def" (that can actually be a use) -// lclNum - The local's number (see "optIsSsaLocal") // curSsaName - The map of local numbers to stacks of their defs // -void Compiler::optCopyPropPushDef(GenTree* defNode, - GenTreeLclVarCommon* lclNode, - unsigned lclNum, - LclNumToLiveDefsMap* curSsaName) +void Compiler::optCopyPropPushDef(GenTree* defNode, GenTreeLclVarCommon* lclNode, LclNumToLiveDefsMap* curSsaName) { - assert((lclNum != BAD_VAR_NUM) && (lclNum == optIsSsaLocal(lclNode))); + unsigned lclNum = lclNode->GetLclNum(); // Shadowed parameters are special: they will (at most) have one use, that is one on the RHS of an // assignment to their shadow, and we must not substitute them anywhere. So we'll not push any defs. @@ -323,45 +291,58 @@ void Compiler::optCopyPropPushDef(GenTree* defNode, return; } - unsigned ssaDefNum = SsaConfig::RESERVED_SSA_NUM; - if (defNode == nullptr) - { - // Parameters, this pointer etc. - assert((lclNode->gtFlags & GTF_VAR_DEF) == 0); - assert(lclNode->GetSsaNum() == SsaConfig::FIRST_SSA_NUM); - ssaDefNum = lclNode->GetSsaNum(); - } - else - { - assert((lclNode->gtFlags & GTF_VAR_DEF) != 0); + auto pushDef = [=](unsigned defLclNum, unsigned defSsaNum) { + // The default is "not available". + LclSsaVarDsc* ssaDef = nullptr; - // TODO-CQ: design better heuristics for propagation and remove this condition. - if (!defNode->IsPhiDefn()) + if (defSsaNum != SsaConfig::RESERVED_SSA_NUM) { - ssaDefNum = GetSsaNumForLocalVarDef(lclNode); + ssaDef = lvaGetDesc(defLclNum)->GetPerSsaData(defSsaNum); + } - // This will be "RESERVED_SSA_NUM" for promoted struct fields assigned using the parent struct. - // TODO-CQ: fix this. - assert((ssaDefNum != SsaConfig::RESERVED_SSA_NUM) || lvaGetDesc(lclNode)->CanBeReplacedWithItsField(this)); + CopyPropSsaDefStack* defStack; + if (!curSsaName->Lookup(defLclNum, &defStack)) + { + defStack = new (curSsaName->GetAllocator()) CopyPropSsaDefStack(curSsaName->GetAllocator()); + curSsaName->Set(defLclNum, defStack); } - } - // The default is "not available". - LclSsaVarDsc* ssaDef = nullptr; + defStack->Push(CopyPropSsaDef(ssaDef, lclNode)); + }; - if (ssaDefNum != SsaConfig::RESERVED_SSA_NUM) + if (lclNode->HasCompositeSsaName()) { - ssaDef = lvaGetDesc(lclNum)->GetPerSsaData(ssaDefNum); - } + LclVarDsc* varDsc = lvaGetDesc(lclNum); + assert(varDsc->lvPromoted); - CopyPropSsaDefStack* defStack; - if (!curSsaName->Lookup(lclNum, &defStack)) - { - defStack = new (curSsaName->GetAllocator()) CopyPropSsaDefStack(curSsaName->GetAllocator()); - curSsaName->Set(lclNum, defStack); + if (varDsc->CanBeReplacedWithItsField(this)) + { + // TODO-CQ: remove this zero-diff quirk. + pushDef(varDsc->lvFieldLclStart, SsaConfig::RESERVED_SSA_NUM); + } + else + { + for (unsigned index = 0; index < varDsc->lvFieldCnt; index++) + { + unsigned ssaNum = lclNode->GetSsaNum(this, index); + if (ssaNum != SsaConfig::RESERVED_SSA_NUM) + { + pushDef(varDsc->lvFieldLclStart + index, ssaNum); + } + } + } } + else if (lclNode->HasSsaName()) + { + unsigned ssaNum = lclNode->GetSsaNum(); + if ((defNode != nullptr) && defNode->IsPhiDefn()) + { + // TODO-CQ: design better heuristics for propagation and remove this. + ssaNum = SsaConfig::RESERVED_SSA_NUM; + } - defStack->Push(CopyPropSsaDef(ssaDef, lclNode)); + pushDef(lclNum, ssaNum); + } } //------------------------------------------------------------------------------ @@ -406,29 +387,18 @@ bool Compiler::optBlockCopyProp(BasicBlock* block, LclNumToLiveDefsMap* curSsaNa GenTreeLclVarCommon* lclDefNode = nullptr; if (tree->OperIsSsaDef() && tree->DefinesLocal(this, &lclDefNode)) { - const unsigned lclNum = optIsSsaLocal(lclDefNode); - - if (lclNum == BAD_VAR_NUM) - { - continue; - } - - optCopyPropPushDef(tree, lclDefNode, lclNum, curSsaName); + optCopyPropPushDef(tree, lclDefNode, curSsaName); } - else if (tree->OperIs(GT_LCL_VAR, GT_LCL_FLD) && ((tree->gtFlags & GTF_VAR_DEF) == 0)) + else if (tree->OperIs(GT_LCL_VAR, GT_LCL_FLD) && ((tree->gtFlags & GTF_VAR_DEF) == 0) && + tree->AsLclVarCommon()->HasSsaName()) { - const unsigned lclNum = optIsSsaLocal(tree->AsLclVarCommon()); - - if (lclNum == BAD_VAR_NUM) - { - continue; - } + unsigned lclNum = tree->AsLclVarCommon()->GetLclNum(); // If we encounter first use of a param or this pointer add it as a // live definition. Since they are always live, we'll do it only once. if ((lvaGetDesc(lclNum)->lvIsParam || (lclNum == info.compThisArg)) && !curSsaName->Lookup(lclNum)) { - optCopyPropPushDef(nullptr, tree->AsLclVarCommon(), lclNum, curSsaName); + optCopyPropPushDef(nullptr, tree->AsLclVarCommon(), curSsaName); } // TODO-Review: EH successor/predecessor iteration seems broken. diff --git a/src/coreclr/jit/earlyprop.cpp b/src/coreclr/jit/earlyprop.cpp index 3adaf9e5fd0f2..5c30adcdb7cde 100644 --- a/src/coreclr/jit/earlyprop.cpp +++ b/src/coreclr/jit/earlyprop.cpp @@ -349,7 +349,6 @@ GenTree* Compiler::optPropGetValueRec(unsigned lclNum, unsigned ssaNum, optPropK return nullptr; } - SSAName ssaName(lclNum, ssaNum); GenTree* value = nullptr; // Bound the recursion with a hard limit. @@ -372,12 +371,12 @@ GenTree* Compiler::optPropGetValueRec(unsigned lclNum, unsigned ssaNum, optPropK { assert(ssaDefAsg->OperIs(GT_ASG)); + GenTree* treeLhs = ssaDefAsg->gtGetOp1(); GenTree* treeRhs = ssaDefAsg->gtGetOp2(); - if (treeRhs->OperIsScalarLocal() && lvaInSsa(treeRhs->AsLclVarCommon()->GetLclNum()) && - treeRhs->AsLclVarCommon()->HasSsaName()) + // Recursively track the Rhs for "entire" stores. + if (treeLhs->OperIs(GT_LCL_VAR) && (treeLhs->AsLclVar()->GetLclNum() == lclNum) && treeRhs->OperIs(GT_LCL_VAR)) { - // Recursively track the Rhs unsigned rhsLclNum = treeRhs->AsLclVarCommon()->GetLclNum(); unsigned rhsSsaNum = treeRhs->AsLclVarCommon()->GetSsaNum(); diff --git a/src/coreclr/jit/gentree.cpp b/src/coreclr/jit/gentree.cpp index a24607cb0887d..2f511fa880761 100644 --- a/src/coreclr/jit/gentree.cpp +++ b/src/coreclr/jit/gentree.cpp @@ -11041,6 +11041,32 @@ void Compiler::gtDispLclVarStructType(unsigned lclNum) } } +//------------------------------------------------------------------------ +// gtDispSsaName: Display the SSA use/def for a given local. +// +// Arguments: +// lclNum - The local's number. +// ssaNum - The SSA number. +// isDef - Whether this is a def. +// +void Compiler::gtDispSsaName(unsigned lclNum, unsigned ssaNum, bool isDef) +{ + if (ssaNum != SsaConfig::RESERVED_SSA_NUM) + { + if (isDef) + { + unsigned oldDefSsaNum = lvaGetDesc(lclNum)->GetPerSsaData(ssaNum)->GetUseDefSsaNum(); + if (oldDefSsaNum != SsaConfig::RESERVED_SSA_NUM) + { + printf("ud:%d->%d", oldDefSsaNum, ssaNum); + return; + } + } + + printf("%s:%d", isDef ? "d" : "u", ssaNum); + } +} + //------------------------------------------------------------------------ // gtDispClassLayout: Print size and type information about a layout. // @@ -11349,19 +11375,10 @@ void Compiler::gtDispLeaf(GenTree* tree, IndentStack* indentStack) printf(" "); const unsigned varNum = tree->AsLclVarCommon()->GetLclNum(); const LclVarDsc* varDsc = lvaGetDesc(varNum); + const bool isDef = (tree->gtFlags & GTF_VAR_DEF) != 0; + gtDispLclVar(varNum); - if (tree->AsLclVarCommon()->HasSsaName()) - { - if (tree->gtFlags & GTF_VAR_USEASG) - { - assert(tree->gtFlags & GTF_VAR_DEF); - printf("ud:%d->%d", tree->AsLclVarCommon()->GetSsaNum(), GetSsaNumForLocalVarDef(tree)); - } - else - { - printf("%s:%d", (tree->gtFlags & GTF_VAR_DEF) ? "d" : "u", tree->AsLclVarCommon()->GetSsaNum()); - } - } + gtDispSsaName(varNum, tree->AsLclVarCommon()->GetSsaNum(), isDef); if (isLclFld) { @@ -11387,15 +11404,15 @@ void Compiler::gtDispLeaf(GenTree* tree, IndentStack* indentStack) } else { - - for (unsigned i = varDsc->lvFieldLclStart; i < varDsc->lvFieldLclStart + varDsc->lvFieldCnt; ++i) + for (unsigned index = 0; index < varDsc->lvFieldCnt; index++) { - LclVarDsc* fieldVarDsc = lvaGetDesc(i); + unsigned fieldLclNum = varDsc->lvFieldLclStart + index; + LclVarDsc* fieldVarDsc = lvaGetDesc(fieldLclNum); const char* fieldName; #if !defined(TARGET_64BIT) if (varTypeIsLong(varDsc)) { - fieldName = (i == 0) ? "lo" : "hi"; + fieldName = (index == 0) ? "lo" : "hi"; } else #endif // !defined(TARGET_64BIT) @@ -11411,7 +11428,8 @@ void Compiler::gtDispLeaf(GenTree* tree, IndentStack* indentStack) printIndent(indentStack); printf(" %-6s V%02u.%s (offs=0x%02x) -> ", varTypeName(fieldVarDsc->TypeGet()), tree->AsLclVarCommon()->GetLclNum(), fieldName, fieldVarDsc->lvFldOffset); - gtDispLclVar(i); + gtDispLclVar(fieldLclNum); + gtDispSsaName(fieldLclNum, tree->AsLclVarCommon()->GetSsaNum(this, index), isDef); if (fieldVarDsc->lvRegister) { @@ -11420,7 +11438,7 @@ void Compiler::gtDispLeaf(GenTree* tree, IndentStack* indentStack) } if (fieldVarDsc->lvTracked && fgLocalVarLivenessDone && tree->IsMultiRegLclVar() && - tree->AsLclVar()->IsLastUse(i - varDsc->lvFieldLclStart)) + tree->AsLclVar()->IsLastUse(index)) { printf(" (last use)"); } @@ -16366,9 +16384,8 @@ bool GenTree::IsPhiNode() bool GenTree::IsPhiDefn() { - bool res = ((OperGet() == GT_ASG) && (AsOp()->gtOp2 != nullptr) && (AsOp()->gtOp2->OperGet() == GT_PHI)) || - ((OperGet() == GT_STORE_LCL_VAR) && (AsOp()->gtOp1 != nullptr) && (AsOp()->gtOp1->OperGet() == GT_PHI)); - assert(!res || OperGet() == GT_STORE_LCL_VAR || AsOp()->gtOp1->OperGet() == GT_LCL_VAR); + bool res = OperIs(GT_ASG) && AsOp()->gtOp2->OperIs(GT_PHI); + assert(!res || AsOp()->gtOp1->OperIs(GT_LCL_VAR)); return res; } @@ -16407,7 +16424,8 @@ bool GenTree::IsPartialLclFld(Compiler* comp) // that "LocalAddressVisitor" recognizes, as it is used to detect which // trees can define tracked locals. // -bool GenTree::DefinesLocal(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, bool* pIsEntire, ssize_t* pOffset) +bool GenTree::DefinesLocal( + Compiler* comp, GenTreeLclVarCommon** pLclVarTree, bool* pIsEntire, ssize_t* pOffset, unsigned* pSize) { assert((pOffset == nullptr) || (*pOffset == 0)); @@ -16421,17 +16439,39 @@ bool GenTree::DefinesLocal(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, bo // Return early for the common case. // - if (lhs->OperIs(GT_LCL_VAR, GT_LCL_FLD)) + if (lhs->OperIs(GT_LCL_VAR)) { *pLclVarTree = lhs->AsLclVarCommon(); if (pIsEntire != nullptr) { - *pIsEntire = !lhs->IsPartialLclFld(comp); + *pIsEntire = true; + } + if (pOffset != nullptr) + { + *pOffset = 0; } + if (pSize != nullptr) + { + *pSize = comp->lvaLclExactSize(lhs->AsLclVarCommon()->GetLclNum()); + } + + return true; + } + if (lhs->OperIs(GT_LCL_FLD)) + { + *pLclVarTree = lhs->AsLclVarCommon(); + if (pIsEntire != nullptr) + { + *pIsEntire = !lhs->AsLclFld()->IsPartialLclFld(comp); + } if (pOffset != nullptr) { - *pOffset = lhs->AsLclVarCommon()->GetLclOffs(); + *pOffset = lhs->AsLclFld()->GetLclOffs(); + } + if (pSize != nullptr) + { + *pSize = lhs->AsLclFld()->GetSize(); } return true; @@ -16498,6 +16538,11 @@ bool GenTree::DefinesLocal(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, bo *pOffset = offset; } + if (pSize != nullptr) + { + *pSize = storeSize; + } + return true; } @@ -17345,6 +17390,41 @@ bool Compiler::gtIsStaticFieldPtrToBoxedStruct(var_types fieldNodeType, CORINFO_ return fieldTyp != TYP_REF; } +//------------------------------------------------------------------------ +// gtStoreDefinesField: Does the given parent store modify the given field? +// +// Arguments: +// fieldVarDsc - The field local +// offset - Offset of the store, relative to the parent +// size - Size of the store in bytes +// pFieldStoreOffset - [out] parameter for the store's offset relative +// to the field local itself +// pFileStoreSize - [out] parameter for the amount of the field's +// local's bytes affected by the store +// +// Return Value: +// If the given store affects the given field local, "true, "false" +// otherwise. +// +bool Compiler::gtStoreDefinesField( + LclVarDsc* fieldVarDsc, ssize_t offset, unsigned size, ssize_t* pFieldStoreOffset, unsigned* pFileStoreSize) +{ + ssize_t fieldOffset = fieldVarDsc->lvFldOffset; + unsigned fieldSize = genTypeSize(fieldVarDsc); // No TYP_STRUCT field locals. + + ssize_t storeEndOffset = offset + static_cast(size); + ssize_t fieldEndOffset = fieldOffset + static_cast(fieldSize); + if ((fieldOffset < storeEndOffset) && (offset < fieldEndOffset)) + { + *pFieldStoreOffset = (offset < fieldOffset) ? 0 : (offset - fieldOffset); + *pFileStoreSize = static_cast(min(storeEndOffset, fieldEndOffset) - max(offset, fieldOffset)); + + return true; + } + + return false; +} + CORINFO_CLASS_HANDLE Compiler::gtGetStructHandleIfPresent(GenTree* tree) { CORINFO_CLASS_HANDLE structHnd = NO_CLASS_HANDLE; @@ -23300,6 +23380,150 @@ regNumber GenTree::ExtractTempReg(regMaskTP mask /* = (regMaskTP)-1 */) return genRegNumFromMask(tempRegMask); } +//------------------------------------------------------------------------ +// GetNum: Get the SSA number for a given field. +// +// Arguments: +// compiler - The Compiler instance +// index - The field index +// +// Return Value: +// The SSA number corresponding to the field at "index". +// +unsigned SsaNumInfo::GetNum(Compiler* compiler, unsigned index) const +{ + assert(IsComposite()); + if (HasCompactFormat()) + { + return (m_value >> (index * BITS_PER_SIMPLE_NUM)) & SIMPLE_NUM_MASK; + } + + // We expect this case to be very rare (outside stress). + return *GetOutlinedNumSlot(compiler, index); +} + +//------------------------------------------------------------------------ +// GetOutlinedNumSlot: Get a pointer the "outlined" SSA number for a field. +// +// Arguments: +// compiler - The Compiler instance +// index - The field index +// +// Return Value: +// Pointer to the SSA number corresponding to the field at "index". +// +unsigned* SsaNumInfo::GetOutlinedNumSlot(Compiler* compiler, unsigned index) const +{ + assert(IsComposite() && !HasCompactFormat()); + + // The "outlined" format for a composite number encodes a 30-bit-sized index. + // First, extract it: this will need "bit stitching" from the two parts. + unsigned outIndexLow = m_value & OUTLINED_INDEX_LOW_MASK; + unsigned outIndexHigh = (m_value & OUTLINED_INDEX_HIGH_MASK) >> 1; + unsigned outIndex = outIndexLow | outIndexHigh; + + return &compiler->m_outlinedCompositeSsaNums->GetRefNoExpand(outIndex + index); +} + +//------------------------------------------------------------------------ +// NumCanBeEncodedCompactly: Can the given field ref be encoded compactly? +// +// Arguments: +// ssaNum - The SSA number +// index - The field index +// +// Return Value: +// Whether the ref of the field at "index" can be encoded through the +// "compact" encoding scheme. +// +// Notes: +// Under stress, we randomly reduce the number of refs that can be +// encoded compactly, to stress the outlined encoding logic. +// +/* static */ bool SsaNumInfo::NumCanBeEncodedCompactly(unsigned index, unsigned ssaNum) +{ +#ifdef DEBUG + if (JitTls::GetCompiler()->compStressCompile(Compiler::STRESS_SSA_INFO, 20)) + { + return (ssaNum - 2) < index; + } +#endif // DEBUG + + assert(index < MAX_NumOfFieldsInPromotableStruct); + + return (ssaNum <= MAX_SIMPLE_NUM) && + ((index < SIMPLE_NUM_COUNT) || (SIMPLE_NUM_COUNT <= MAX_NumOfFieldsInPromotableStruct)); +} + +//------------------------------------------------------------------------ +// Composite: Form a composite SSA number, one capable of representing refs +// to more than one SSA local. +// +// Arguments: +// baseNum - The SSA number to base the new one on (composite/invalid) +// compiler - The Compiler instance +// parentLclNum - The promoted local representing a "whole" ref +// index - The field index +// ssaNum - The SSA number +// +// Return Value: +// A new, always composite, SSA number that represents all of the refs +// in "baseNum", with the field at "index" set to "ssaNum". +// +// Notes: +// It is assumed that the new number represents the same "whole" ref as +// the old one (the same parent local). If the SSA number needs to be +// reset fully, a new, RESERVED one should be created, and composed from +// with the appropriate parent reference. +// +/* static */ SsaNumInfo SsaNumInfo::Composite( + SsaNumInfo baseNum, Compiler* compiler, unsigned parentLclNum, unsigned index, unsigned ssaNum) +{ + assert(baseNum.IsInvalid() || baseNum.IsComposite()); + assert(compiler->lvaGetDesc(parentLclNum)->lvPromoted); + + if (NumCanBeEncodedCompactly(index, ssaNum) && (baseNum.IsInvalid() || baseNum.HasCompactFormat())) + { + unsigned ssaNumEncoded = ssaNum << (index * BITS_PER_SIMPLE_NUM); + if (baseNum.IsInvalid()) + { + return SsaNumInfo(COMPOSITE_ENCODING_BIT | ssaNumEncoded); + } + + return SsaNumInfo(ssaNumEncoded | baseNum.m_value); + } + + if (!baseNum.IsInvalid()) + { + *baseNum.GetOutlinedNumSlot(compiler, index) = ssaNum; + return baseNum; + } + + // This is the only path where we can encounter a null table. + if (compiler->m_outlinedCompositeSsaNums == nullptr) + { + CompAllocator alloc = compiler->getAllocator(CMK_SSA); + compiler->m_outlinedCompositeSsaNums = new (alloc) JitExpandArrayStack(alloc); + } + + // Allocate a new chunk for the field numbers. Once allocated, it cannot be expanded. + int count = compiler->lvaGetDesc(parentLclNum)->lvFieldCnt; + JitExpandArrayStack* table = compiler->m_outlinedCompositeSsaNums; + int outIdx = table->Size(); + unsigned* pLastSlot = &table->GetRef(outIdx + count - 1); // This will grow the table. + pLastSlot[-(count - 1) + static_cast(index)] = ssaNum; + + // Split the index if it does not fit into a small encoding. + if ((outIdx & ~OUTLINED_INDEX_LOW_MASK) != 0) + { + int outIdxLow = outIdx & OUTLINED_INDEX_LOW_MASK; + int outIdxHigh = (outIdx << 1) & OUTLINED_INDEX_HIGH_MASK; + outIdx = outIdxLow | outIdxHigh; + } + + return SsaNumInfo(COMPOSITE_ENCODING_BIT | OUTLINED_ENCODING_BIT | outIdx); +} + //------------------------------------------------------------------------ // GetLclOffs: if `this` is a field or a field address it returns offset // of the field inside the struct, for not a field it returns 0. diff --git a/src/coreclr/jit/gentree.h b/src/coreclr/jit/gentree.h index 85e27fe3dce1b..240a9bf732961 100644 --- a/src/coreclr/jit/gentree.h +++ b/src/coreclr/jit/gentree.h @@ -1986,7 +1986,8 @@ struct GenTree bool DefinesLocal(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, bool* pIsEntire = nullptr, - ssize_t* pOffset = nullptr); + ssize_t* pOffset = nullptr, + unsigned* pSize = nullptr); bool DefinesLocalAddr(GenTreeLclVarCommon** pLclVarTree, ssize_t* pOffset = nullptr); @@ -3510,13 +3511,109 @@ struct GenTreeVecCon : public GenTree #endif }; -// Common supertype of LCL_VAR, LCL_FLD, REG_VAR, PHI_ARG -// This inherits from UnOp because lclvar stores are Unops +// Encapsulates the SSA info carried by local nodes. Most local nodes have simple 1-to-1 +// relationships with their SSA refs. However, defs of promoted structs can represent +// many SSA defs at the same time, and we need to efficiently encode that. +// +class SsaNumInfo final +{ + // This can be in one of four states: + // 1. Single SSA name: > RESERVED_SSA_NUM (0). + // 2. RESERVED_SSA_NUM (0) + // 3. "Inline composite name": packed SSA numbers of field locals (each could be RESERVED): + // [byte 3]: [top bit][ssa num 3] (7 bits) + // [byte 2]: [ssa num 2] (8 bits) + // [byte 1]: [compact encoding bit][ssa num 1] (7 bits) + // [byte 0]: [ssa num 0] (8 bits) + // We expect this encoding to cover the 99%+ case of composite names: locals with more + // than 127 defs, maximum for this encoding, are rare, and the current limit on the count + // of promoted fields is 4. + // 4. "Outlined composite name": index into the "composite SSA nums" table. The table itself + // will have the very simple format of N (the total number of fields / simple names) slots + // with full SSA numbers, starting at the encoded index. Notably, the table entries will + // include "empty" slots (for untracked fields), as we don't expect to use the table in + // the common case, and in the pathological cases, the space overhead should be mitigated + // by the cap on the number of tracked locals. + // + static const int BITS_PER_SIMPLE_NUM = 8; + static const int MAX_SIMPLE_NUM = (1 << (BITS_PER_SIMPLE_NUM - 1)) - 1; + static const int SIMPLE_NUM_MASK = MAX_SIMPLE_NUM; + static const int SIMPLE_NUM_COUNT = sizeof(int) / BITS_PER_SIMPLE_NUM; + static const int COMPOSITE_ENCODING_BIT = 1 << 31; + static const int OUTLINED_ENCODING_BIT = 1 << 15; + static const int OUTLINED_INDEX_LOW_MASK = OUTLINED_ENCODING_BIT - 1; + static const int OUTLINED_INDEX_HIGH_MASK = + ~(COMPOSITE_ENCODING_BIT | OUTLINED_ENCODING_BIT | OUTLINED_INDEX_LOW_MASK); + static_assert_no_msg(SsaConfig::RESERVED_SSA_NUM == 0); // A lot in the encoding relies on this. + + int m_value; + + SsaNumInfo(int value) : m_value(value) + { + } + +public: + SsaNumInfo() : m_value(SsaConfig::RESERVED_SSA_NUM) + { + } + + bool IsSimple() const + { + return IsInvalid() || IsSsaNum(m_value); + } + + bool IsComposite() const + { + return !IsSimple(); + } + + bool IsInvalid() const + { + return m_value == SsaConfig::RESERVED_SSA_NUM; + } + + unsigned GetNum() const + { + assert(IsSimple()); + return m_value; + } + + unsigned GetNum(Compiler* compiler, unsigned index) const; + + static SsaNumInfo Simple(unsigned ssaNum) + { + assert(IsSsaNum(ssaNum) || (ssaNum == SsaConfig::RESERVED_SSA_NUM)); + return SsaNumInfo(ssaNum); + } + + static SsaNumInfo Composite( + SsaNumInfo baseNum, Compiler* compiler, unsigned parentLclNum, unsigned index, unsigned ssaNum); + +private: + bool HasCompactFormat() const + { + assert(IsComposite()); + return (m_value & OUTLINED_ENCODING_BIT) == 0; + } + + unsigned* GetOutlinedNumSlot(Compiler* compiler, unsigned index) const; + + static bool NumCanBeEncodedCompactly(unsigned index, unsigned ssaNum); + + static bool IsSsaNum(int value) + { + return value > SsaConfig::RESERVED_SSA_NUM; + } +}; + +// Common supertype of [STORE_]LCL_VAR, [STORE_]LCL_FLD, PHI_ARG, LCL_VAR_ADDR, LCL_FLD_ADDR. +// This inherits from UnOp because lclvar stores are unary. +// struct GenTreeLclVarCommon : public GenTreeUnOp { private: - unsigned _gtLclNum; // The local number. An index into the Compiler::lvaTable array. - unsigned _gtSsaNum; // The SSA number. + unsigned m_lclNum; // The local number. An index into the Compiler::lvaTable array. + SsaNumInfo m_ssaNum; // The SSA info. public: GenTreeLclVarCommon(genTreeOps oper, var_types type, unsigned lclNum DEBUGARG(bool largeNode = false)) @@ -3533,13 +3630,13 @@ struct GenTreeLclVarCommon : public GenTreeUnOp unsigned GetLclNum() const { - return _gtLclNum; + return m_lclNum; } void SetLclNum(unsigned lclNum) { - _gtLclNum = lclNum; - _gtSsaNum = SsaConfig::RESERVED_SSA_NUM; + m_lclNum = lclNum; + m_ssaNum = SsaNumInfo(); } uint16_t GetLclOffs() const; @@ -3548,17 +3645,32 @@ struct GenTreeLclVarCommon : public GenTreeUnOp unsigned GetSsaNum() const { - return _gtSsaNum; + return m_ssaNum.IsSimple() ? m_ssaNum.GetNum() : SsaConfig::RESERVED_SSA_NUM; + } + + unsigned GetSsaNum(Compiler* compiler, unsigned index) const + { + return m_ssaNum.IsComposite() ? m_ssaNum.GetNum(compiler, index) : SsaConfig::RESERVED_SSA_NUM; } void SetSsaNum(unsigned ssaNum) { - _gtSsaNum = ssaNum; + m_ssaNum = SsaNumInfo::Simple(ssaNum); + } + + void SetSsaNum(Compiler* compiler, unsigned index, unsigned ssaNum) + { + m_ssaNum = SsaNumInfo::Composite(m_ssaNum, compiler, GetLclNum(), index, ssaNum); + } + + bool HasSsaName() const + { + return GetSsaNum() != SsaConfig::RESERVED_SSA_NUM; } - bool HasSsaName() + bool HasCompositeSsaName() const { - return (GetSsaNum() != SsaConfig::RESERVED_SSA_NUM); + return m_ssaNum.IsComposite(); } #if DEBUGGABLE_GENTREE diff --git a/src/coreclr/jit/jitexpandarray.h b/src/coreclr/jit/jitexpandarray.h index d812f85854625..646f9e6747a3b 100644 --- a/src/coreclr/jit/jitexpandarray.h +++ b/src/coreclr/jit/jitexpandarray.h @@ -223,6 +223,26 @@ class JitExpandArrayStack : public JitExpandArray { } + //------------------------------------------------------------------------ + // GetRef: Get a reference to the element at index `idx`. + // + // Arguments: + // idx - the element index + // + // Return Value: + // A reference to the element at index `idx`. + // + // Notes: + // Like `Get`, but returns a reference, so suitable for use as + // the LHS of an assignment. + // + T& GetRef(unsigned idx) + { + T& itemRef = JitExpandArray::GetRef(idx); + m_used = max((idx + 1), m_used); + return itemRef; + } + //------------------------------------------------------------------------ // Set: Assign value a copy of `val` to the element at index `idx`. // @@ -334,6 +354,27 @@ class JitExpandArrayStack : public JitExpandArray return this->m_members[idx]; } + //------------------------------------------------------------------------ + // GetRefNoExpand: Get a reference to the element at index `idx`. + // + // Arguments: + // idx - the element index + // + // Return Value: + // A reference to the element at index `idx`. + // + // Notes: + // Unlike `GetRef` this does not expand the array if the index is not valid. + // + // Assumptions: + // The element index does not exceed the current stack depth. + // + T& GetRefNoExpand(unsigned idx) + { + assert(idx < m_used); + return this->m_members[idx]; + } + //------------------------------------------------------------------------ // Remove: Remove the element at index `idx`. // diff --git a/src/coreclr/jit/optimizer.cpp b/src/coreclr/jit/optimizer.cpp index 99d6271246508..febf028ac088c 100644 --- a/src/coreclr/jit/optimizer.cpp +++ b/src/coreclr/jit/optimizer.cpp @@ -7277,7 +7277,7 @@ void Compiler::optHoistLoopBlocks(unsigned loopNum, ArrayStack* blo // To be invariant a LclVar node must not be the LHS of an assignment ... bool isInvariant = !user->OperIs(GT_ASG) || (user->AsOp()->gtGetOp1() != tree); // and the variable must be in SSA ... - isInvariant = isInvariant && m_compiler->lvaInSsa(lclNum) && lclVar->HasSsaName(); + isInvariant = isInvariant && lclVar->HasSsaName(); // and the SSA definition must be outside the loop we're hoisting from ... isInvariant = isInvariant && !m_compiler->optLoopTable[m_loopNum].lpContains( @@ -8468,7 +8468,7 @@ bool Compiler::optComputeLoopSideEffectsOfBlock(BasicBlock* blk) { // If it's a local byref for which we recorded a value number, use that... GenTreeLclVar* argLcl = arg->AsLclVar(); - if (lvaInSsa(argLcl->GetLclNum()) && argLcl->HasSsaName()) + if (argLcl->HasSsaName()) { ValueNum argVN = lvaTable[argLcl->GetLclNum()].GetPerSsaData(argLcl->GetSsaNum())->m_vnPair.GetLiberal(); @@ -8546,7 +8546,7 @@ bool Compiler::optComputeLoopSideEffectsOfBlock(BasicBlock* blk) if (rhsVN != ValueNumStore::NoVN) { rhsVN = vnStore->VNNormalValue(rhsVN); - if (lvaInSsa(lhsLcl->GetLclNum()) && lhsLcl->HasSsaName()) + if (lhsLcl->HasSsaName()) { lvaTable[lhsLcl->GetLclNum()] .GetPerSsaData(lhsLcl->GetSsaNum()) diff --git a/src/coreclr/jit/rangecheck.cpp b/src/coreclr/jit/rangecheck.cpp index a228c2ad9fc04..2fac7780007b2 100644 --- a/src/coreclr/jit/rangecheck.cpp +++ b/src/coreclr/jit/rangecheck.cpp @@ -467,11 +467,8 @@ LclSsaVarDsc* RangeCheck::GetSsaDefAsg(GenTreeLclVarCommon* lclUse) return nullptr; } - LclVarDsc* varDsc = m_pCompiler->lvaGetDesc(lclUse); - if (varDsc->CanBeReplacedWithItsField(m_pCompiler)) - { - varDsc = m_pCompiler->lvaGetDesc(varDsc->lvFieldLclStart); - } + unsigned lclNum = lclUse->GetLclNum(); + LclVarDsc* varDsc = m_pCompiler->lvaGetDesc(lclNum); LclSsaVarDsc* ssaDef = varDsc->GetPerSsaData(ssaNum); // RangeCheck does not care about uninitialized variables. @@ -486,8 +483,10 @@ LclSsaVarDsc* RangeCheck::GetSsaDefAsg(GenTreeLclVarCommon* lclUse) } // RangeCheck does not understand definitions generated by LCL_FLD nodes - // nor definitions generated by indirect stores to local variables. - if (!ssaDef->GetAssignment()->gtGetOp1()->OperIs(GT_LCL_VAR)) + // nor definitions generated by indirect stores to local variables, nor + // stores through parent structs. + GenTree* defStoreLhs = ssaDef->GetAssignment()->gtGetOp1(); + if (!defStoreLhs->OperIs(GT_LCL_VAR) || !defStoreLhs->AsLclVar()->HasSsaName()) { return nullptr; } @@ -505,11 +504,6 @@ LclSsaVarDsc* RangeCheck::GetSsaDefAsg(GenTreeLclVarCommon* lclUse) #ifdef DEBUG UINT64 RangeCheck::HashCode(unsigned lclNum, unsigned ssaNum) { - LclVarDsc* varDsc = m_pCompiler->lvaGetDesc(lclNum); - if (varDsc->CanBeReplacedWithItsField(m_pCompiler)) - { - lclNum = varDsc->lvFieldLclStart; - } assert(ssaNum != SsaConfig::RESERVED_SSA_NUM); return UINT64(lclNum) << 32 | ssaNum; } @@ -537,8 +531,7 @@ RangeCheck::Location* RangeCheck::GetDef(unsigned lclNum, unsigned ssaNum) RangeCheck::Location* RangeCheck::GetDef(GenTreeLclVarCommon* lcl) { - unsigned lclNum = lcl->GetLclNum(); - return GetDef(lclNum, lcl->GetSsaNum()); + return GetDef(lcl->GetLclNum(), lcl->GetSsaNum()); } // Add the def location to the hash table. @@ -576,12 +569,7 @@ void RangeCheck::MergeEdgeAssertions(GenTreeLclVarCommon* lcl, ASSERT_VALARG_TP return; } - LclVarDsc* varDsc = m_pCompiler->lvaGetDesc(lcl); - if (varDsc->CanBeReplacedWithItsField(m_pCompiler)) - { - varDsc = m_pCompiler->lvaGetDesc(varDsc->lvFieldLclStart); - } - LclSsaVarDsc* ssaData = varDsc->GetPerSsaData(lcl->GetSsaNum()); + LclSsaVarDsc* ssaData = m_pCompiler->lvaGetDesc(lcl)->GetPerSsaData(lcl->GetSsaNum()); ValueNum normalLclVN = m_pCompiler->vnStore->VNConservativeNormalValue(ssaData->m_vnPair); MergeEdgeAssertions(normalLclVN, assertions, pRange); } @@ -1506,33 +1494,9 @@ void RangeCheck::MapStmtDefs(const Location& loc) { GenTreeLclVarCommon* tree = loc.tree; - unsigned lclNum = tree->GetLclNum(); - unsigned ssaNum = tree->GetSsaNum(); - if (ssaNum == SsaConfig::RESERVED_SSA_NUM) + if (tree->HasSsaName() && ((tree->gtFlags & GTF_VAR_DEF) != 0) && loc.parent->OperIs(GT_ASG)) { - return; - } - - // If useasg then get the correct ssaNum to add to the map. - if (tree->gtFlags & GTF_VAR_USEASG) - { - unsigned ssaNum = m_pCompiler->GetSsaNumForLocalVarDef(tree); - if (ssaNum != SsaConfig::RESERVED_SSA_NUM) - { - // To avoid ind(addr) use asgs - if (loc.parent->OperIs(GT_ASG)) - { - SetDef(HashCode(lclNum, ssaNum), new (m_alloc) Location(loc)); - } - } - } - // If def get the location and store it against the variable's ssaNum. - else if (tree->gtFlags & GTF_VAR_DEF) - { - if (loc.parent->OperGet() == GT_ASG) - { - SetDef(HashCode(lclNum, ssaNum), new (m_alloc) Location(loc)); - } + SetDef(HashCode(tree->GetLclNum(), tree->GetSsaNum()), new (m_alloc) Location(loc)); } } diff --git a/src/coreclr/jit/ssabuilder.cpp b/src/coreclr/jit/ssabuilder.cpp index a01845b15f0f7..e57a3423ec6d1 100644 --- a/src/coreclr/jit/ssabuilder.cpp +++ b/src/coreclr/jit/ssabuilder.cpp @@ -83,6 +83,11 @@ void Compiler::fgResetForSsa() m_memorySsaMap[memoryKind] = nullptr; } + if (m_outlinedCompositeSsaNums != nullptr) + { + m_outlinedCompositeSsaNums->Reset(); + } + for (BasicBlock* const blk : Blocks()) { // Eliminate phis. @@ -738,62 +743,57 @@ void SsaBuilder::RenameDef(GenTree* defNode, BasicBlock* block) GenTreeLclVarCommon* lclNode; bool isFullDef = false; - bool isLocal = defNode->DefinesLocal(m_pCompiler, &lclNode, &isFullDef); + ssize_t offset = 0; + unsigned storeSize = 0; + bool isLocal = defNode->DefinesLocal(m_pCompiler, &lclNode, &isFullDef, &offset, &storeSize); if (isLocal) { + // This should have been marked as definition. + assert(((lclNode->gtFlags & GTF_VAR_DEF) != 0) && (((lclNode->gtFlags & GTF_VAR_USEASG) != 0) == !isFullDef)); + unsigned lclNum = lclNode->GetLclNum(); LclVarDsc* varDsc = m_pCompiler->lvaGetDesc(lclNum); - if (!m_pCompiler->lvaInSsa(lclNum) && varDsc->CanBeReplacedWithItsField(m_pCompiler)) + if (m_pCompiler->lvaInSsa(lclNum)) { - lclNum = varDsc->lvFieldLclStart; - varDsc = m_pCompiler->lvaGetDesc(lclNum); - assert(isFullDef); + lclNode->SetSsaNum(RenamePushDef(defNode, block, lclNum, isFullDef)); + assert(!varDsc->IsAddressExposed()); // Cannot define SSA memory. + return; } - if (m_pCompiler->lvaInSsa(lclNum)) + if (varDsc->lvPromoted) { - // Promoted variables are not in SSA, only their fields are. - assert(!m_pCompiler->lvaGetDesc(lclNum)->lvPromoted); - // This should have been marked as definition. - assert((lclNode->gtFlags & GTF_VAR_DEF) != 0); - - unsigned ssaNum = varDsc->lvPerSsaData.AllocSsaNum(m_allocator, block, - defNode->OperIs(GT_ASG) ? defNode->AsOp() : nullptr); - - if (!isFullDef) - { - assert((lclNode->gtFlags & GTF_VAR_USEASG) != 0); - - // This is a partial definition of a variable. The node records only the SSA number - // of the use that is implied by this partial definition. The SSA number of the new - // definition will be recorded in the m_opAsgnVarDefSsaNums map. - lclNode->SetSsaNum(m_renameStack.Top(lclNum)); - m_pCompiler->GetOpAsgnVarDefSsaNums()->Set(lclNode, ssaNum); - } - else + for (unsigned index = 0; index < varDsc->lvFieldCnt; index++) { - assert((lclNode->gtFlags & GTF_VAR_USEASG) == 0); - lclNode->SetSsaNum(ssaNum); - } + unsigned fieldLclNum = varDsc->lvFieldLclStart + index; + LclVarDsc* fieldVarDsc = m_pCompiler->lvaGetDesc(fieldLclNum); + if (m_pCompiler->lvaInSsa(fieldLclNum)) + { + ssize_t fieldStoreOffset; + unsigned fieldStoreSize; + unsigned ssaNum = SsaConfig::RESERVED_SSA_NUM; - m_renameStack.Push(block, lclNum, ssaNum); + // Fast-path the common case of an "entire" store. + if (isFullDef) + { + ssaNum = RenamePushDef(defNode, block, fieldLclNum, /* defIsFull */ true); + } + else if (m_pCompiler->gtStoreDefinesField(fieldVarDsc, offset, storeSize, &fieldStoreOffset, + &fieldStoreSize)) + { + ssaNum = RenamePushDef(defNode, block, fieldLclNum, + ValueNumStore::LoadStoreIsEntire(genTypeSize(fieldVarDsc), + fieldStoreOffset, fieldStoreSize)); + } - // If necessary, add "lclNum/ssaNum" to the arg list of a phi def in any - // handlers for try blocks that "block" is within. (But only do this for "real" definitions, - // not phi definitions.) - if (!defNode->IsPhiDefn()) - { - AddDefToHandlerPhis(block, lclNum, ssaNum); + if (ssaNum != SsaConfig::RESERVED_SSA_NUM) + { + lclNode->SetSsaNum(m_pCompiler, index, ssaNum); + } + } } - - // If it's a SSA local then it cannot be address exposed and thus does not define SSA memory. - assert(!m_pCompiler->lvaVarAddrExposed(lclNum)); - return; } - - lclNode->SetSsaNum(SsaConfig::RESERVED_SSA_NUM); } else if (defNode->OperIs(GT_CALL)) { @@ -801,7 +801,6 @@ void SsaBuilder::RenameDef(GenTree* defNode, BasicBlock* block) // the memory effect of the call is captured by the live out state from the block and doesn't need special // handling here. If we ever change liveness to more carefully model call effects (from interprecedural // information) we might need to revisit this. - return; } @@ -858,6 +857,47 @@ void SsaBuilder::RenameDef(GenTree* defNode, BasicBlock* block) } } +//------------------------------------------------------------------------ +// RenamePushDef: Create and push a new definition on the renaming stack. +// +// Arguments: +// defNode - The store node for the definition +// block - The block in which it occurs +// lclNum - Number of the local being defined +// isFullDef - Whether the def is "entire" +// +// Return Value: +// The pushed SSA number. +// +unsigned SsaBuilder::RenamePushDef(GenTree* defNode, BasicBlock* block, unsigned lclNum, bool isFullDef) +{ + // Promoted variables are not in SSA, only their fields are. + assert(m_pCompiler->lvaInSsa(lclNum) && !m_pCompiler->lvaGetDesc(lclNum)->lvPromoted); + + LclVarDsc* varDsc = m_pCompiler->lvaGetDesc(lclNum); + unsigned ssaNum = + varDsc->lvPerSsaData.AllocSsaNum(m_allocator, block, defNode->OperIs(GT_ASG) ? defNode->AsOp() : nullptr); + + if (!isFullDef) + { + // This is a partial definition of a variable. The node records only the SSA number + // of the def. The SSA number of the old definition (the "use" portion) will be + // recorded in the SSA descriptor. + varDsc->GetPerSsaData(ssaNum)->SetUseDefSsaNum(m_renameStack.Top(lclNum)); + } + + m_renameStack.Push(block, lclNum, ssaNum); + + // If necessary, add SSA name to the arg list of a phi def in any handlers for try + // blocks that "block" is within. (But only do this for "real" definitions, not phis.) + if (!defNode->IsPhiDefn()) + { + AddDefToHandlerPhis(block, lclNum, ssaNum); + } + + return ssaNum; +} + //------------------------------------------------------------------------ // RenameLclUse: Rename a use of a local variable. // @@ -1633,10 +1673,6 @@ bool SsaBuilder::IncludeInSsa(unsigned lclNum) { LclVarDsc* varDsc = m_pCompiler->lvaGetDesc(lclNum); - if (varDsc->IsAddressExposed()) - { - return false; // We exclude address-exposed variables. - } if (!varDsc->lvTracked) { return false; // SSA is only done for tracked variables diff --git a/src/coreclr/jit/ssabuilder.h b/src/coreclr/jit/ssabuilder.h index 83b81e0c0da45..c84da01f27526 100644 --- a/src/coreclr/jit/ssabuilder.h +++ b/src/coreclr/jit/ssabuilder.h @@ -79,7 +79,8 @@ class SsaBuilder // Rename all definitions and uses within a block. void BlockRenameVariables(BasicBlock* block); // Rename a local or memory definition generated by a GT_ASG/GT_CALL node. - void RenameDef(GenTree* asgNode, BasicBlock* block); + void RenameDef(GenTree* defNode, BasicBlock* block); + unsigned RenamePushDef(GenTree* defNode, BasicBlock* block, unsigned lclNum, bool isFullDef); // Rename a use of a local variable. void RenameLclUse(GenTreeLclVarCommon* lclNode); diff --git a/src/coreclr/jit/valuenum.cpp b/src/coreclr/jit/valuenum.cpp index 435aae46c8111..791305c22d338 100644 --- a/src/coreclr/jit/valuenum.cpp +++ b/src/coreclr/jit/valuenum.cpp @@ -4846,53 +4846,88 @@ void Compiler::fgValueNumberLocalStore(GenTree* storeNode, // Should not have been recorded as updating the GC heap. assert(!GetMemorySsaMap(GcHeap)->Lookup(storeNode)); - LclVarDsc* varDsc = lvaGetDesc(lclDefNode); - unsigned lclDefSsaNum = GetSsaNumForLocalVarDef(lclDefNode); + auto processDef = [=](unsigned defLclNum, unsigned defSsaNum, ssize_t defOffset, unsigned defSize, + ValueNumPair defValue) { - if (lclDefSsaNum != SsaConfig::RESERVED_SSA_NUM) - { - unsigned lclSize = lvaLclExactSize(lclDefNode->GetLclNum()); + LclVarDsc* defVarDsc = lvaGetDesc(defLclNum); - ValueNumPair newLclValue; - if (vnStore->LoadStoreIsEntire(lclSize, offset, storeSize)) + if (defSsaNum != SsaConfig::RESERVED_SSA_NUM) { - newLclValue = value; + unsigned lclSize = lvaLclExactSize(defLclNum); + + ValueNumPair newLclValue; + if (vnStore->LoadStoreIsEntire(lclSize, defOffset, defSize)) + { + newLclValue = defValue; + } + else + { + assert((lclDefNode->gtFlags & GTF_VAR_USEASG) != 0); + unsigned oldDefSsaNum = defVarDsc->GetPerSsaData(defSsaNum)->GetUseDefSsaNum(); + ValueNumPair oldLclValue = defVarDsc->GetPerSsaData(oldDefSsaNum)->m_vnPair; + newLclValue = vnStore->VNPairForStore(oldLclValue, lclSize, defOffset, defSize, defValue); + } + + // Any out-of-bounds stores should have made the local address-exposed. + assert(newLclValue.BothDefined()); + + if (normalize) + { + // We normalize types stored in local locations because things outside VN itself look at them. + newLclValue = vnStore->VNPairForLoadStoreBitCast(newLclValue, defVarDsc->TypeGet(), lclSize); + assert((genActualType(vnStore->TypeOfVN(newLclValue.GetLiberal())) == genActualType(defVarDsc))); + } + + defVarDsc->GetPerSsaData(defSsaNum)->m_vnPair = newLclValue; + + JITDUMP("Tree [%06u] assigned VN to local var V%02u/%d: ", dspTreeID(storeNode), defLclNum, defSsaNum); + JITDUMPEXEC(vnpPrint(newLclValue, 1)); + JITDUMP("\n"); + } + else if (defVarDsc->IsAddressExposed()) + { + ValueNum heapVN = vnStore->VNForExpr(compCurBB, TYP_HEAP); + recordAddressExposedLocalStore(storeNode, heapVN DEBUGARG("local assign")); } else { - assert((lclDefNode->gtFlags & GTF_VAR_USEASG) != 0); - // The "lclDefNode" node will be labeled with the SSA number of its "use" identity - // (we looked in a side table above for its "def" identity). Look up that value. - ValueNumPair oldLclValue = varDsc->GetPerSsaData(lclDefNode->GetSsaNum())->m_vnPair; - newLclValue = vnStore->VNPairForStore(oldLclValue, lclSize, offset, storeSize, value); + JITDUMP("Tree [%06u] assigns to non-address-taken local V%02u; excluded from SSA, so value not tracked\n", + dspTreeID(storeNode), defLclNum); } + }; - // Any out-of-bounds stores should have made the local address-exposed. - assert(newLclValue.BothDefined()); + if (lclDefNode->HasCompositeSsaName()) + { + LclVarDsc* varDsc = lvaGetDesc(lclDefNode); + assert(varDsc->lvPromoted); - if (normalize) + for (unsigned index = 0; index < varDsc->lvFieldCnt; index++) { - // We normalize types stored in local locations because things outside VN itself look at them. - newLclValue = vnStore->VNPairForLoadStoreBitCast(newLclValue, varDsc->TypeGet(), lclSize); - assert((genActualType(vnStore->TypeOfVN(newLclValue.GetLiberal())) == genActualType(varDsc))); - } + unsigned fieldLclNum = varDsc->lvFieldLclStart + index; + LclVarDsc* fieldVarDsc = lvaGetDesc(fieldLclNum); - varDsc->GetPerSsaData(lclDefSsaNum)->m_vnPair = newLclValue; + ssize_t fieldStoreOffset; + unsigned fieldStoreSize; + if (gtStoreDefinesField(fieldVarDsc, offset, storeSize, &fieldStoreOffset, &fieldStoreSize)) + { + // TYP_STRUCT can represent the general case where the value could be of any size. + var_types fieldStoreType = TYP_STRUCT; + if (vnStore->LoadStoreIsEntire(genTypeSize(fieldVarDsc), fieldStoreOffset, fieldStoreSize)) + { + // Avoid redundant bitcasts for the common case of a full definition. + fieldStoreType = fieldVarDsc->TypeGet(); + } + ValueNumPair fieldStoreValue = + vnStore->VNPairForLoad(value, storeSize, fieldStoreType, offset, fieldStoreSize); - JITDUMP("Tree [%06u] assigned VN to local var V%02u/%d: ", dspTreeID(storeNode), lclDefNode->GetLclNum(), - lclDefSsaNum); - JITDUMPEXEC(vnpPrint(newLclValue, 1)); - JITDUMP("\n"); - } - else if (varDsc->IsAddressExposed()) - { - ValueNum heapVN = vnStore->VNForExpr(compCurBB, TYP_HEAP); - recordAddressExposedLocalStore(storeNode, heapVN DEBUGARG("local assign")); + processDef(fieldLclNum, lclDefNode->GetSsaNum(this, index), fieldStoreOffset, fieldStoreSize, + fieldStoreValue); + } + } } else { - JITDUMP("Tree [%06u] assigns to non-address-taken local var V%02u; excluded from SSA, so value not tracked\n", - dspTreeID(storeNode), lclDefNode->GetLclNum()); + processDef(lclDefNode->GetLclNum(), lclDefNode->GetSsaNum(), offset, storeSize, value); } } @@ -8377,75 +8412,45 @@ void Compiler::fgValueNumberBlockAssignment(GenTree* tree) GenTree* rhs = tree->gtGetOp2(); GenTreeLclVarCommon* lclVarTree = nullptr; - bool isEntire = false; ssize_t offset = 0; - if (tree->DefinesLocal(this, &lclVarTree, &isEntire, &offset)) + unsigned storeSize = 0; + if (tree->DefinesLocal(this, &lclVarTree, /* isEntire */ nullptr, &offset, &storeSize)) { assert(lclVarTree->gtFlags & GTF_VAR_DEF); - // Should not have been recorded as updating the GC heap. - assert(!GetMemorySsaMap(GcHeap)->Lookup(tree)); - unsigned lhsLclNum = lclVarTree->GetLclNum(); - unsigned lclDefSsaNum = GetSsaNumForLocalVarDef(lclVarTree); - LclVarDsc* lhsVarDsc = lvaGetDesc(lhsLclNum); + LclVarDsc* lhsVarDsc = lvaGetDesc(lclVarTree); + ValueNumPair rhsVNPair = ValueNumPair(); - // Ignore vars that we excluded from SSA (for example, because they're address-exposed). They don't have - // SSA names in which to store VN's on defs. We'll yield unique VN's when we read from them. - if (lclDefSsaNum != SsaConfig::RESERVED_SSA_NUM) + if (tree->OperIsInitBlkOp()) { - ClassLayout* const layout = lhs->GetLayout(this); - unsigned storeSize = layout->GetSize(); + ClassLayout* const layout = lhs->GetLayout(this); - ValueNumPair rhsVNPair = ValueNumPair(); - if (tree->OperIsInitBlkOp()) + ValueNum initObjVN = ValueNumStore::NoVN; + if (rhs->IsIntegralConst(0)) { - ValueNum initObjVN = ValueNumStore::NoVN; - if (rhs->IsIntegralConst(0)) - { - initObjVN = (lhs->TypeGet() == TYP_STRUCT) ? vnStore->VNForZeroObj(layout) - : vnStore->VNZeroForType(lhs->TypeGet()); - } - else - { - // Non-zero block init is very rare so we'll use a simple, unique VN here. - initObjVN = vnStore->VNForExpr(compCurBB, lhs->TypeGet()); - } - rhsVNPair.SetBoth(initObjVN); + initObjVN = (lhs->TypeGet() == TYP_STRUCT) ? vnStore->VNForZeroObj(layout) + : vnStore->VNZeroForType(lhs->TypeGet()); } else { - assert(tree->OperIsCopyBlkOp()); - rhsVNPair = vnStore->VNPNormalPair(rhs->gtVNPair); + // Non-zero block init is very rare so we'll use a simple, unique VN here. + initObjVN = vnStore->VNForExpr(compCurBB, lhs->TypeGet()); } - fgValueNumberLocalStore(tree, lclVarTree, offset, storeSize, rhsVNPair); + rhsVNPair.SetBoth(initObjVN); } - else if (lclVarTree->HasSsaName()) + else if (lhs->OperIs(GT_LCL_VAR) && lhsVarDsc->CanBeReplacedWithItsField(this)) { - // The local wasn't in SSA, the tree is still an SSA def. There is only one - // case when this can happen - a promoted "CanBeReplacedWithItsField" struct. - assert((lhs == lclVarTree) && rhs->IsCall() && isEntire); - assert(lhsVarDsc->CanBeReplacedWithItsField(this)); - // Give a new, unique, VN to the field. - LclVarDsc* fieldVarDsc = lvaGetDesc(lhsVarDsc->lvFieldLclStart); - LclSsaVarDsc* fieldVarSsaDsc = fieldVarDsc->GetPerSsaData(lclVarTree->GetSsaNum()); - ValueNum newUniqueVN = vnStore->VNForExpr(compCurBB, fieldVarDsc->TypeGet()); - - fieldVarSsaDsc->m_vnPair.SetBoth(newUniqueVN); - - JITDUMP("Tree [%06u] assigned VN to the only field V%02u/%u of promoted struct V%02u: new uniq ", - dspTreeID(tree), lhsVarDsc->lvFieldLclStart, lclVarTree->GetSsaNum(), lhsLclNum); - JITDUMPEXEC(vnPrint(newUniqueVN, 1)); - JITDUMP("\n"); - } - else if (lhsVarDsc->IsAddressExposed()) - { - fgMutateAddressExposedLocal(tree DEBUGARG("INITBLK/COPYBLK - address-exposed local")); + // TODO-CQ: remove this zero-diff quirk. + rhsVNPair.SetBoth(vnStore->VNForExpr(compCurBB, lvaGetDesc(lhsVarDsc->lvFieldLclStart)->TypeGet())); } else { - JITDUMP("LHS V%02u not in ssa at [%06u], so no VN assigned\n", lhsLclNum, dspTreeID(lclVarTree)); + assert(tree->OperIsCopyBlkOp()); + rhsVNPair = vnStore->VNPNormalPair(rhs->gtVNPair); } + + fgValueNumberLocalStore(tree, lclVarTree, offset, storeSize, rhsVNPair); } else { @@ -8497,8 +8502,7 @@ void Compiler::fgValueNumberTree(GenTree* tree) { if (lcl->HasSsaName()) { - // We expect all uses of promoted structs to be replaced with uses of their fields. - assert(lvaInSsa(lclNum) && !varDsc->CanBeReplacedWithItsField(this)); + assert(lvaInSsa(lclNum)); ValueNumPair wholeLclVarVNP = varDsc->GetPerSsaData(lcl->GetSsaNum())->m_vnPair; assert(wholeLclVarVNP.BothDefined()); @@ -8557,7 +8561,7 @@ void Compiler::fgValueNumberTree(GenTree* tree) { unsigned lclNum = lclFld->GetLclNum(); - if (!lvaInSsa(lclFld->GetLclNum()) || !lclFld->HasSsaName()) + if (!lclFld->HasSsaName()) { lclFld->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, lclFld->TypeGet())); } @@ -8719,8 +8723,7 @@ void Compiler::fgValueNumberTree(GenTree* tree) { tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, loadType)); } - else if (addr->DefinesLocalAddr(&lclVarTree, &offset) && lvaInSsa(lclVarTree->GetLclNum()) && - lclVarTree->HasSsaName()) + else if (addr->DefinesLocalAddr(&lclVarTree, &offset) && lclVarTree->HasSsaName()) { ValueNumPair lclVNPair = lvaGetDesc(lclVarTree)->GetPerSsaData(lclVarTree->GetSsaNum())->m_vnPair; unsigned lclSize = lvaLclExactSize(lclVarTree->GetLclNum()); @@ -10009,11 +10012,11 @@ void Compiler::fgValueNumberCall(GenTreeCall* call) // as well. GenTreeLclVarCommon* lclVarTree = nullptr; ssize_t offset = 0; - if (call->DefinesLocal(this, &lclVarTree, /* pIsEntire */ nullptr, &offset)) + unsigned storeSize = 0; + if (call->DefinesLocal(this, &lclVarTree, /* pIsEntire */ nullptr, &offset, &storeSize)) { ValueNumPair storeValue; storeValue.SetBoth(vnStore->VNForExpr(compCurBB, TYP_STRUCT)); - unsigned storeSize = typGetObjLayout(call->gtRetClsHnd)->GetSize(); fgValueNumberLocalStore(call, lclVarTree, offset, storeSize, storeValue); } diff --git a/src/coreclr/jit/valuenum.h b/src/coreclr/jit/valuenum.h index dad46cb8cd840..483641f703e4a 100644 --- a/src/coreclr/jit/valuenum.h +++ b/src/coreclr/jit/valuenum.h @@ -777,7 +777,7 @@ class ValueNumStore ValueNumPair VNPairForStore( ValueNumPair locationValue, unsigned locationSize, ssize_t offset, unsigned storeSize, ValueNumPair value); - bool LoadStoreIsEntire(unsigned locationSize, ssize_t offset, unsigned indSize) const + static bool LoadStoreIsEntire(unsigned locationSize, ssize_t offset, unsigned indSize) { return (offset == 0) && (locationSize == indSize); }