diff --git a/src/coreclr/jit/block.cpp b/src/coreclr/jit/block.cpp index 31e16f084cf31..2407afea59571 100644 --- a/src/coreclr/jit/block.cpp +++ b/src/coreclr/jit/block.cpp @@ -28,6 +28,10 @@ size_t BasicBlock::s_Count; // The max # of tree nodes in any BB /* static */ unsigned BasicBlock::s_nMaxTrees; + +// Temporary target to initialize blocks with jumps +/* static */ +BasicBlock BasicBlock::bbTempJumpDest; #endif // DEBUG #ifdef DEBUG @@ -1441,7 +1445,7 @@ bool BasicBlock::endsWithTailCallConvertibleToLoop(Compiler* comp, GenTree** tai * Allocate a basic block but don't append it to the current BB list. */ -BasicBlock* Compiler::bbNewBasicBlock(BBjumpKinds jumpKind) +BasicBlock* Compiler::bbNewBasicBlock() { BasicBlock* block; @@ -1492,15 +1496,6 @@ BasicBlock* Compiler::bbNewBasicBlock(BBjumpKinds jumpKind) block->bbEntryState = nullptr; - /* Record the jump kind in the block */ - - block->SetJumpKind(jumpKind DEBUG_ARG(this)); - - if (jumpKind == BBJ_THROW) - { - block->bbSetRunRarely(); - } - #ifdef DEBUG if (verbose) { @@ -1551,6 +1546,33 @@ BasicBlock* Compiler::bbNewBasicBlock(BBjumpKinds jumpKind) return block; } +BasicBlock* Compiler::bbNewBasicBlock(BBjumpKinds jumpKind, BasicBlock* jumpDest /* = nullptr */) +{ + BasicBlock* block = bbNewBasicBlock(); + block->SetJumpKindAndTarget(jumpKind, jumpDest DEBUG_ARG(this)); + + if (jumpKind == BBJ_THROW) + { + block->bbSetRunRarely(); + } + + return block; +} + +BasicBlock* Compiler::bbNewBasicBlock(BBswtDesc* jumpSwt) +{ + BasicBlock* block = bbNewBasicBlock(); + block->SetSwitchKindAndTarget(jumpSwt); + return block; +} + +BasicBlock* Compiler::bbNewBasicBlock(BBjumpKinds jumpKind, unsigned jumpOffs) +{ + BasicBlock* block = bbNewBasicBlock(); + block->SetJumpKindAndTarget(jumpKind, jumpOffs); + return block; +} + //------------------------------------------------------------------------ // isBBCallAlwaysPair: Determine if this is the first block of a BBJ_CALLFINALLY/BBJ_ALWAYS pair // diff --git a/src/coreclr/jit/block.h b/src/coreclr/jit/block.h index b636cbbcded3e..ecdce490528f6 100644 --- a/src/coreclr/jit/block.h +++ b/src/coreclr/jit/block.h @@ -538,20 +538,26 @@ struct BasicBlock : private LIR::Range }; public: +#ifdef DEBUG + // When creating a block with a jump, we require its jump kind and target be initialized simultaneously. + // In a few edge cases (for example, in Compiler::impImportLeave), we don't know the jump target at block creation. + // In these cases, temporarily set the jump target to bbTempJumpDest, and update the jump target later. + // We won't check jump targets against bbTempJumpDest in Release builds. + static BasicBlock bbTempJumpDest; +#endif // DEBUG + BBjumpKinds GetJumpKind() const { return bbJumpKind; } - void SetJumpKind(BBjumpKinds jumpKind DEBUG_ARG(Compiler* compiler)) + void SetJumpKind(BBjumpKinds jumpKind) { -#ifdef DEBUG - // BBJ_NONE should only be assigned when optimizing jumps in Compiler::optOptimizeLayout - // TODO: Change assert to check if compiler is in appropriate optimization phase to use BBJ_NONE - // (right now, this assertion does the null check to avoid unused variable warnings) - assert((jumpKind != BBJ_NONE) || (compiler != nullptr)); -#endif // DEBUG + // If this block's jump kind requires a target, ensure it is already set + assert(HasJump() || !KindIs(BBJ_ALWAYS, BBJ_CALLFINALLY, BBJ_COND, BBJ_EHCATCHRET, BBJ_LEAVE)); bbJumpKind = jumpKind; + // If new jump kind requires a target, ensure a target is already set + assert(HasJump() || !KindIs(BBJ_ALWAYS, BBJ_CALLFINALLY, BBJ_COND, BBJ_EHCATCHRET, BBJ_LEAVE)); } BasicBlock* Prev() const @@ -611,54 +617,84 @@ struct BasicBlock : private LIR::Range return bbJumpOffs; } - void SetJumpOffs(unsigned jumpOffs) + void SetJumpKindAndTarget(BBjumpKinds jumpKind, unsigned jumpOffs) { + bbJumpKind = jumpKind; bbJumpOffs = jumpOffs; + assert(KindIs(BBJ_ALWAYS, BBJ_COND, BBJ_LEAVE)); } BasicBlock* GetJumpDest() const { + // If bbJumpKind indicates this block has a jump, bbJumpDest cannot be null + assert(HasJump() || !KindIs(BBJ_ALWAYS, BBJ_CALLFINALLY, BBJ_COND, BBJ_EHCATCHRET, BBJ_LEAVE)); return bbJumpDest; } void SetJumpDest(BasicBlock* jumpDest) { + // If bbJumpKind indicates this block has a jump, + // bbJumpDest should have previously been set in SetJumpKindAndTarget(). + assert(HasJump() || !KindIs(BBJ_ALWAYS, BBJ_CALLFINALLY, BBJ_COND, BBJ_EHCATCHRET, BBJ_LEAVE)); + + // SetJumpKindAndTarget() nulls jumpDest for non-jump kinds, + // so don't use SetJumpDest() to null bbJumpDest without updating bbJumpKind. bbJumpDest = jumpDest; + assert(HasJump()); } - void SetJumpKindAndTarget(BBjumpKinds jumpKind, BasicBlock* jumpDest) + void SetJumpKindAndTarget(BBjumpKinds jumpKind, BasicBlock* jumpDest DEBUG_ARG(Compiler* compiler)) { - assert(jumpDest != nullptr); +#ifdef DEBUG + // BBJ_NONE should only be assigned when optimizing jumps in Compiler::optOptimizeLayout + // TODO: Change assert to check if compiler is in appropriate optimization phase to use BBJ_NONE + // + // (right now, this assertion does the null check to avoid unused variable warnings) + assert((jumpKind != BBJ_NONE) || (compiler != nullptr)); +#endif // DEBUG + bbJumpKind = jumpKind; bbJumpDest = jumpDest; - assert(KindIs(BBJ_ALWAYS, BBJ_CALLFINALLY, BBJ_COND, BBJ_EHCATCHRET, BBJ_LEAVE)); + + // If bbJumpKind indicates this block has a jump, bbJumpDest cannot be null + assert(HasJump() || !KindIs(BBJ_ALWAYS, BBJ_CALLFINALLY, BBJ_COND, BBJ_EHCATCHRET, BBJ_LEAVE)); + } + + void SetJumpKindAndTarget(BBjumpKinds jumpKind DEBUG_ARG(Compiler* compiler)) + { + BasicBlock* jumpDest = nullptr; + SetJumpKindAndTarget(jumpKind, jumpDest DEBUG_ARG(compiler)); + } + + bool HasJump() const + { + return (bbJumpDest != nullptr); } bool HasJumpTo(const BasicBlock* jumpDest) const { + assert(HasJump()); + assert(!KindIs(BBJ_SWITCH, BBJ_EHFINALLYRET)); return (bbJumpDest == jumpDest); } bool JumpsToNext() const { + assert(HasJump()); return (bbJumpDest == bbNext); } BBswtDesc* GetJumpSwt() const { + assert(KindIs(BBJ_SWITCH)); + assert(bbJumpSwt != nullptr); return bbJumpSwt; } - void SetJumpSwt(BBswtDesc* jumpSwt) + void SetSwitchKindAndTarget(BBswtDesc* jumpSwt) { - bbJumpSwt = jumpSwt; - } - - void SetJumpKindAndTarget(BBjumpKinds jumpKind, BBswtDesc* jumpSwt) - { - assert(jumpKind == BBJ_SWITCH); assert(jumpSwt != nullptr); - bbJumpKind = jumpKind; + bbJumpKind = BBJ_SWITCH; bbJumpSwt = jumpSwt; } @@ -1739,7 +1775,7 @@ inline BBArrayIterator BBEhfSuccList::end() const inline BasicBlock::BBSuccList::BBSuccList(const BasicBlock* block) { assert(block != nullptr); - switch (block->GetJumpKind()) + switch (block->bbJumpKind) { case BBJ_THROW: case BBJ_RETURN: @@ -1754,19 +1790,19 @@ inline BasicBlock::BBSuccList::BBSuccList(const BasicBlock* block) case BBJ_EHCATCHRET: case BBJ_EHFILTERRET: case BBJ_LEAVE: - m_succs[0] = block->GetJumpDest(); + m_succs[0] = block->bbJumpDest; m_begin = &m_succs[0]; m_end = &m_succs[1]; break; case BBJ_NONE: - m_succs[0] = block->Next(); + m_succs[0] = block->bbNext; m_begin = &m_succs[0]; m_end = &m_succs[1]; break; case BBJ_COND: - m_succs[0] = block->Next(); + m_succs[0] = block->bbNext; m_begin = &m_succs[0]; // If both fall-through and branch successors are identical, then only include @@ -1777,7 +1813,7 @@ inline BasicBlock::BBSuccList::BBSuccList(const BasicBlock* block) } else { - m_succs[1] = block->GetJumpDest(); + m_succs[1] = block->bbJumpDest; m_end = &m_succs[2]; } break; @@ -1800,10 +1836,10 @@ inline BasicBlock::BBSuccList::BBSuccList(const BasicBlock* block) case BBJ_SWITCH: // We don't use the m_succs in-line data for switches; use the existing jump table in the block. - assert(block->GetJumpSwt() != nullptr); - assert(block->GetJumpSwt()->bbsDstTab != nullptr); - m_begin = block->GetJumpSwt()->bbsDstTab; - m_end = block->GetJumpSwt()->bbsDstTab + block->GetJumpSwt()->bbsCount; + assert(block->bbJumpSwt != nullptr); + assert(block->bbJumpSwt->bbsDstTab != nullptr); + m_begin = block->bbJumpSwt->bbsDstTab; + m_end = block->bbJumpSwt->bbsDstTab + block->bbJumpSwt->bbsCount; break; default: diff --git a/src/coreclr/jit/codegenarm.cpp b/src/coreclr/jit/codegenarm.cpp index d6df2be573d9c..2015449fc8664 100644 --- a/src/coreclr/jit/codegenarm.cpp +++ b/src/coreclr/jit/codegenarm.cpp @@ -125,7 +125,7 @@ BasicBlock* CodeGen::genCallFinally(BasicBlock* block) assert(!block->IsLast()); assert(block->Next()->KindIs(BBJ_ALWAYS)); - assert(!block->Next()->HasJumpTo(nullptr)); + assert(block->Next()->HasJump()); assert(block->Next()->GetJumpDest()->bbFlags & BBF_FINALLY_TARGET); bbFinallyRet = block->Next()->GetJumpDest(); diff --git a/src/coreclr/jit/codegencommon.cpp b/src/coreclr/jit/codegencommon.cpp index 6d6dc3745e594..83270d95b2564 100644 --- a/src/coreclr/jit/codegencommon.cpp +++ b/src/coreclr/jit/codegencommon.cpp @@ -856,7 +856,8 @@ BasicBlock* CodeGen::genCreateTempLabel() compiler->fgSafeBasicBlockCreation = true; #endif - BasicBlock* block = compiler->bbNewBasicBlock(BBJ_NONE); + // Label doesn't need a jump kind + BasicBlock* block = compiler->bbNewBasicBlock(); #ifdef DEBUG compiler->fgSafeBasicBlockCreation = false; diff --git a/src/coreclr/jit/compiler.h b/src/coreclr/jit/compiler.h index e2fe9144ed4e8..5bfeb5d7de5e5 100644 --- a/src/coreclr/jit/compiler.h +++ b/src/coreclr/jit/compiler.h @@ -3221,7 +3221,10 @@ class Compiler bool fgSafeBasicBlockCreation; #endif - BasicBlock* bbNewBasicBlock(BBjumpKinds jumpKind); + BasicBlock* bbNewBasicBlock(); + BasicBlock* bbNewBasicBlock(BBjumpKinds jumpKind, BasicBlock* jumpDest = nullptr); + BasicBlock* bbNewBasicBlock(BBswtDesc* jumpSwt); + BasicBlock* bbNewBasicBlock(BBjumpKinds jumpKind, unsigned jumpOffs); /* XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX @@ -4582,7 +4585,6 @@ class Compiler } } - BasicBlock* fgNewBasicBlock(BBjumpKinds jumpKind); bool fgEnsureFirstBBisScratch(); bool fgFirstBBisScratch(); bool fgBBisScratch(BasicBlock* block); @@ -4590,31 +4592,34 @@ class Compiler void fgExtendEHRegionBefore(BasicBlock* block); void fgExtendEHRegionAfter(BasicBlock* block); - BasicBlock* fgNewBBbefore(BBjumpKinds jumpKind, BasicBlock* block, bool extendRegion); + BasicBlock* fgNewBBbefore(BBjumpKinds jumpKind, BasicBlock* block, bool extendRegion, BasicBlock* jumpDest = nullptr); - BasicBlock* fgNewBBafter(BBjumpKinds jumpKind, BasicBlock* block, bool extendRegion); + BasicBlock* fgNewBBafter(BBjumpKinds jumpKind, BasicBlock* block, bool extendRegion, BasicBlock* jumpDest = nullptr); - BasicBlock* fgNewBBFromTreeAfter(BBjumpKinds jumpKind, BasicBlock* block, GenTree* tree, DebugInfo& debugInfo, bool updateSideEffects = false); + BasicBlock* fgNewBBFromTreeAfter(BBjumpKinds jumpKind, BasicBlock* block, GenTree* tree, DebugInfo& debugInfo, BasicBlock* jumpDest = nullptr, bool updateSideEffects = false); BasicBlock* fgNewBBinRegion(BBjumpKinds jumpKind, unsigned tryIndex, unsigned hndIndex, BasicBlock* nearBlk, + BasicBlock* jumpDest = nullptr, bool putInFilter = false, bool runRarely = false, bool insertAtEnd = false); BasicBlock* fgNewBBinRegion(BBjumpKinds jumpKind, BasicBlock* srcBlk, + BasicBlock* jumpDest = nullptr, bool runRarely = false, bool insertAtEnd = false); - BasicBlock* fgNewBBinRegion(BBjumpKinds jumpKind); + BasicBlock* fgNewBBinRegion(BBjumpKinds jumpKind, BasicBlock* jumpDest = nullptr); BasicBlock* fgNewBBinRegionWorker(BBjumpKinds jumpKind, BasicBlock* afterBlk, unsigned xcptnIndex, - bool putInTryRegion); + bool putInTryRegion, + BasicBlock* jumpDest = nullptr); void fgInsertBBbefore(BasicBlock* insertBeforeBlk, BasicBlock* newBlk); void fgInsertBBafter(BasicBlock* insertAfterBlk, BasicBlock* newBlk); diff --git a/src/coreclr/jit/fgbasic.cpp b/src/coreclr/jit/fgbasic.cpp index 5218d9a679454..d6ba319a9631a 100644 --- a/src/coreclr/jit/fgbasic.cpp +++ b/src/coreclr/jit/fgbasic.cpp @@ -187,43 +187,6 @@ void Compiler::fgInit() fgPredListSortVector = nullptr; } -/***************************************************************************** - * - * Create a basic block and append it to the current BB list. - */ - -BasicBlock* Compiler::fgNewBasicBlock(BBjumpKinds jumpKind) -{ - // This method must not be called after the exception table has been - // constructed, because it doesn't not provide support for patching - // the exception table. - - noway_assert(compHndBBtabCount == 0); - - BasicBlock* block; - - /* Allocate the block descriptor */ - - block = bbNewBasicBlock(jumpKind); - noway_assert(block->KindIs(jumpKind)); - - /* Append the block to the end of the global basic block list */ - - if (fgFirstBB) - { - fgLastBB->SetNext(block); - } - else - { - fgFirstBB = block; - block->SetPrev(nullptr); - } - - fgLastBB = block; - - return block; -} - //------------------------------------------------------------------------ // fgEnsureFirstBBisScratch: Ensure that fgFirstBB is a scratch BasicBlock // @@ -400,7 +363,7 @@ void Compiler::fgConvertBBToThrowBB(BasicBlock* block) fgRemoveBlockAsPred(block); // Update jump kind after the scrub. - block->SetJumpKind(BBJ_THROW DEBUG_ARG(this)); + block->SetJumpKindAndTarget(BBJ_THROW DEBUG_ARG(this)); // Any block with a throw is rare block->bbSetRunRarely(); @@ -2968,7 +2931,9 @@ void Compiler::fgLinkBasicBlocks() case BBJ_LEAVE: { BasicBlock* const jumpDest = fgLookupBB(curBBdesc->GetJumpOffs()); - curBBdesc->SetJumpDest(jumpDest); + // Redundantly use SetJumpKindAndTarget() instead of SetJumpDest() just this once, + // so we don't break the HasJump() invariant of SetJumpDest(). + curBBdesc->SetJumpKindAndTarget(curBBdesc->GetJumpKind(), jumpDest DEBUG_ARG(this)); fgAddRefPred(jumpDest, curBBdesc, oldEdge); if (curBBdesc->GetJumpDest()->bbNum <= curBBdesc->bbNum) @@ -3508,31 +3473,44 @@ unsigned Compiler::fgMakeBasicBlocks(const BYTE* codeAddr, IL_OFFSET codeSize, F /* We need to create a new basic block */ - curBBdesc = fgNewBasicBlock(jmpKind); - - curBBdesc->bbFlags |= bbFlags; - curBBdesc->bbRefs = 0; - - curBBdesc->bbCodeOffs = curBBoffs; - curBBdesc->bbCodeOffsEnd = nxtBBoffs; - switch (jmpKind) { case BBJ_SWITCH: - curBBdesc->SetJumpSwt(swtDsc); + curBBdesc = bbNewBasicBlock(swtDsc); break; case BBJ_COND: case BBJ_ALWAYS: case BBJ_LEAVE: noway_assert(jmpAddr != DUMMY_INIT(BAD_IL_OFFSET)); - curBBdesc->SetJumpOffs(jmpAddr); + curBBdesc = bbNewBasicBlock(jmpKind, jmpAddr); break; default: + curBBdesc = bbNewBasicBlock(jmpKind); break; } + curBBdesc->bbFlags |= bbFlags; + curBBdesc->bbRefs = 0; + + curBBdesc->bbCodeOffs = curBBoffs; + curBBdesc->bbCodeOffsEnd = nxtBBoffs; + + /* Append the block to the end of the global basic block list */ + + if (fgFirstBB) + { + fgLastBB->SetNext(curBBdesc); + } + else + { + fgFirstBB = curBBdesc; + curBBdesc->SetPrev(nullptr); + } + + fgLastBB = curBBdesc; + DBEXEC(verbose, curBBdesc->dspBlockHeader(this, false, false, false)); /* Remember where the next BB will start */ @@ -3994,7 +3972,7 @@ void Compiler::fgFindBasicBlocks() // BBJ_EHFINALLYRET that were imported to BBJ_EHFAULTRET. if ((hndBegBB->bbCatchTyp == BBCT_FAULT) && block->KindIs(BBJ_EHFINALLYRET)) { - block->SetJumpKind(BBJ_EHFAULTRET DEBUG_ARG(this)); + block->SetJumpKind(BBJ_EHFAULTRET); } } @@ -4203,7 +4181,7 @@ void Compiler::fgFixEntryFlowForOSR() fgEnsureFirstBBisScratch(); assert(fgFirstBB->KindIs(BBJ_NONE)); fgRemoveRefPred(fgFirstBB->Next(), fgFirstBB); - fgFirstBB->SetJumpKindAndTarget(BBJ_ALWAYS, fgOSREntryBB); + fgFirstBB->SetJumpKindAndTarget(BBJ_ALWAYS, fgOSREntryBB DEBUG_ARG(this)); FlowEdge* const edge = fgAddRefPred(fgOSREntryBB, fgFirstBB); edge->setLikelihood(1.0); @@ -4745,16 +4723,17 @@ BasicBlock* Compiler::fgSplitBlockAtEnd(BasicBlock* curr) { // We'd like to use fgNewBBafter(), but we need to update the preds list before linking in the new block. // (We need the successors of 'curr' to be correct when we do this.) - BasicBlock* newBlock = bbNewBasicBlock(curr->GetJumpKind()); - - // Start the new block with no refs. When we set the preds below, this will get updated correctly. - newBlock->bbRefs = 0; + BasicBlock* newBlock; // For each successor of the original block, set the new block as their predecessor. // Note we are using the "rational" version of the successor iterator that does not hide the finallyret arcs. // Without these arcs, a block 'b' may not be a member of succs(preds(b)) if (!curr->KindIs(BBJ_SWITCH)) { + // Start the new block with no refs. When we set the preds below, this will get updated correctly. + newBlock = bbNewBasicBlock(curr->GetJumpKind(), curr->GetJumpDest()); + newBlock->bbRefs = 0; + for (BasicBlock* const succ : curr->Succs(this)) { if (succ != newBlock) @@ -4764,19 +4743,16 @@ BasicBlock* Compiler::fgSplitBlockAtEnd(BasicBlock* curr) fgReplacePred(succ, curr, newBlock); } } - - newBlock->SetJumpDest(curr->GetJumpDest()); - curr->SetJumpDest(nullptr); } else { + // Start the new block with no refs. When we set the preds below, this will get updated correctly. + newBlock = bbNewBasicBlock(curr->GetJumpSwt()); + newBlock->bbRefs = 0; + // In the case of a switch statement there's more complicated logic in order to wire up the predecessor lists // but fortunately there's an existing method that implements this functionality. - newBlock->SetJumpSwt(curr->GetJumpSwt()); - fgChangeSwitchBlock(curr, newBlock); - - curr->SetJumpSwt(nullptr); } newBlock->inheritWeight(curr); @@ -4812,7 +4788,7 @@ BasicBlock* Compiler::fgSplitBlockAtEnd(BasicBlock* curr) curr->bbFlags &= ~(BBF_HAS_JMP | BBF_RETLESS_CALL); // Default to fallthru, and add the arc for that. - curr->SetJumpKind(BBJ_NONE DEBUG_ARG(this)); + curr->SetJumpKindAndTarget(BBJ_NONE DEBUG_ARG(this)); fgAddRefPred(newBlock, curr); return newBlock; @@ -5049,9 +5025,8 @@ BasicBlock* Compiler::fgSplitEdge(BasicBlock* curr, BasicBlock* succ) } else { - newBlock = fgNewBBinRegion(BBJ_ALWAYS, curr, curr->isRunRarely()); // The new block always jumps to 'succ' - newBlock->SetJumpDest(succ); + newBlock = fgNewBBinRegion(BBJ_ALWAYS, curr, /* jumpDest */ succ, /* isRunRarely */ curr->isRunRarely()); } newBlock->bbFlags |= (curr->bbFlags & succ->bbFlags & (BBF_BACKWARD_JUMP)); @@ -5251,7 +5226,7 @@ void Compiler::fgRemoveBlock(BasicBlock* block, bool unreachable) // Note that we don't do it if bPrev follows a BBJ_CALLFINALLY block (BBF_KEEP_BBJ_ALWAYS), // because that would violate our invariant that BBJ_CALLFINALLY blocks are followed by // BBJ_ALWAYS blocks. - bPrev->SetJumpKind(BBJ_NONE DEBUG_ARG(this)); + bPrev->SetJumpKindAndTarget(BBJ_NONE DEBUG_ARG(this)); } // If this is the first Cold basic block update fgFirstColdBlock @@ -5450,7 +5425,7 @@ void Compiler::fgRemoveBlock(BasicBlock* block, bool unreachable) if (block->KindIs(BBJ_ALWAYS)) { /* bPrev now becomes a BBJ_ALWAYS */ - bPrev->SetJumpKindAndTarget(BBJ_ALWAYS, succBlock); + bPrev->SetJumpKindAndTarget(BBJ_ALWAYS, succBlock DEBUG_ARG(this)); } break; @@ -5520,7 +5495,7 @@ void Compiler::fgRemoveBlock(BasicBlock* block, bool unreachable) if ((bPrev == fgFirstBB) || !bPrev->isBBCallAlwaysPairTail()) { // It's safe to change the jump type - bPrev->SetJumpKind(BBJ_NONE DEBUG_ARG(this)); + bPrev->SetJumpKindAndTarget(BBJ_NONE DEBUG_ARG(this)); } } break; @@ -5568,7 +5543,7 @@ BasicBlock* Compiler::fgConnectFallThrough(BasicBlock* bSrc, BasicBlock* bDst) switch (bSrc->GetJumpKind()) { case BBJ_NONE: - bSrc->SetJumpKindAndTarget(BBJ_ALWAYS, bDst); + bSrc->SetJumpKindAndTarget(BBJ_ALWAYS, bDst DEBUG_ARG(this)); JITDUMP("Block " FMT_BB " ended with a BBJ_NONE, Changed to an unconditional jump to " FMT_BB "\n", bSrc->bbNum, bSrc->GetJumpDest()->bbNum); break; @@ -5577,7 +5552,7 @@ BasicBlock* Compiler::fgConnectFallThrough(BasicBlock* bSrc, BasicBlock* bDst) case BBJ_COND: // Add a new block after bSrc which jumps to 'bDst' - jmpBlk = fgNewBBafter(BBJ_ALWAYS, bSrc, true); + jmpBlk = fgNewBBafter(BBJ_ALWAYS, bSrc, true, bDst); fgAddRefPred(jmpBlk, bSrc, fgGetPredForBlock(bDst, bSrc)); // Record the loop number in the new block @@ -5626,8 +5601,6 @@ BasicBlock* Compiler::fgConnectFallThrough(BasicBlock* bSrc, BasicBlock* bDst) } } - jmpBlk->SetJumpDest(bDst); - fgReplacePred(bDst, bSrc, jmpBlk); JITDUMP("Added an unconditional jump to " FMT_BB " after block " FMT_BB "\n", @@ -5646,7 +5619,7 @@ BasicBlock* Compiler::fgConnectFallThrough(BasicBlock* bSrc, BasicBlock* bDst) // if (bSrc->KindIs(BBJ_ALWAYS) && !(bSrc->bbFlags & BBF_KEEP_BBJ_ALWAYS) && bSrc->JumpsToNext()) { - bSrc->SetJumpKind(BBJ_NONE DEBUG_ARG(this)); + bSrc->SetJumpKindAndTarget(BBJ_NONE DEBUG_ARG(this)); JITDUMP("Changed an unconditional jump from " FMT_BB " to the next block " FMT_BB " into a BBJ_NONE block\n", bSrc->bbNum, bSrc->Next()->bbNum); @@ -6244,11 +6217,14 @@ bool Compiler::fgMightHaveLoop() * Insert a BasicBlock before the given block. */ -BasicBlock* Compiler::fgNewBBbefore(BBjumpKinds jumpKind, BasicBlock* block, bool extendRegion) +BasicBlock* Compiler::fgNewBBbefore(BBjumpKinds jumpKind, + BasicBlock* block, + bool extendRegion, + BasicBlock* jumpDest /* = nullptr */) { // Create a new BasicBlock and chain it in - BasicBlock* newBlk = bbNewBasicBlock(jumpKind); + BasicBlock* newBlk = bbNewBasicBlock(jumpKind, jumpDest); newBlk->bbFlags |= BBF_INTERNAL; fgInsertBBbefore(block, newBlk); @@ -6283,11 +6259,14 @@ BasicBlock* Compiler::fgNewBBbefore(BBjumpKinds jumpKind, BasicBlock* block, boo * Insert a BasicBlock after the given block. */ -BasicBlock* Compiler::fgNewBBafter(BBjumpKinds jumpKind, BasicBlock* block, bool extendRegion) +BasicBlock* Compiler::fgNewBBafter(BBjumpKinds jumpKind, + BasicBlock* block, + bool extendRegion, + BasicBlock* jumpDest /* = nullptr */) { // Create a new BasicBlock and chain it in - BasicBlock* newBlk = bbNewBasicBlock(jumpKind); + BasicBlock* newBlk = bbNewBasicBlock(jumpKind, jumpDest); newBlk->bbFlags |= BBF_INTERNAL; fgInsertBBafter(block, newBlk); @@ -6327,6 +6306,7 @@ BasicBlock* Compiler::fgNewBBafter(BBjumpKinds jumpKind, BasicBlock* block, bool // tree - tree that will be wrapped into a statement and // inserted in the new block. // debugInfo - debug info to propagate into the new statement. +// jumpDest - the jump target of the new block. Defaults to nullptr. // updateSideEffects - update side effects for the whole statement. // // Return Value: @@ -6335,10 +6315,14 @@ BasicBlock* Compiler::fgNewBBafter(BBjumpKinds jumpKind, BasicBlock* block, bool // Notes: // The new block will have BBF_INTERNAL flag and EH region will be extended // -BasicBlock* Compiler::fgNewBBFromTreeAfter( - BBjumpKinds jumpKind, BasicBlock* block, GenTree* tree, DebugInfo& debugInfo, bool updateSideEffects) +BasicBlock* Compiler::fgNewBBFromTreeAfter(BBjumpKinds jumpKind, + BasicBlock* block, + GenTree* tree, + DebugInfo& debugInfo, + BasicBlock* jumpDest /* = nullptr */, + bool updateSideEffects /* = false */) { - BasicBlock* newBlock = fgNewBBafter(jumpKind, block, true); + BasicBlock* newBlock = fgNewBBafter(jumpKind, block, true, jumpDest); newBlock->bbFlags |= BBF_INTERNAL; Statement* stmt = fgNewStmtFromTree(tree, debugInfo); fgInsertStmtAtEnd(newBlock, stmt); @@ -6828,6 +6812,7 @@ BasicBlock* Compiler::fgFindInsertPoint(unsigned regionIndex, // [0..compHndBBtabCount]. // nearBlk - insert the new block closely after this block, if possible. If nullptr, put the new block anywhere // in the requested region. +// jumpDest - the jump target of the new block. Defaults to nullptr. // putInFilter - put the new block in the filter region given by hndIndex, as described above. // runRarely - 'true' if the new block is run rarely. // insertAtEnd - 'true' if the block should be inserted at the end of the region. Note: this is currently only @@ -6840,6 +6825,7 @@ BasicBlock* Compiler::fgNewBBinRegion(BBjumpKinds jumpKind, unsigned tryIndex, unsigned hndIndex, BasicBlock* nearBlk, + BasicBlock* jumpDest /* = nullptr */, bool putInFilter /* = false */, bool runRarely /* = false */, bool insertAtEnd /* = false */) @@ -6965,7 +6951,7 @@ _FoundAfterBlk:; BBjumpKindNames[jumpKind], tryIndex, hndIndex, dspBool(putInFilter), dspBool(runRarely), dspBool(insertAtEnd), afterBlk->bbNum); - return fgNewBBinRegionWorker(jumpKind, afterBlk, regionIndex, putInTryRegion); + return fgNewBBinRegionWorker(jumpKind, afterBlk, regionIndex, putInTryRegion, jumpDest); } //------------------------------------------------------------------------ @@ -6976,12 +6962,17 @@ _FoundAfterBlk:; // Arguments: // jumpKind - the jump kind of the new block to create. // srcBlk - insert the new block in the same EH region as this block, and closely after it if possible. +// jumpDest - the jump target of the new block. Defaults to nullptr. +// runRarely - 'true' if the new block is run rarely. +// insertAtEnd - 'true' if the block should be inserted at the end of the region. Note: this is currently only +// implemented when inserting into the main function (not into any EH region). // // Return Value: // The new block. BasicBlock* Compiler::fgNewBBinRegion(BBjumpKinds jumpKind, BasicBlock* srcBlk, + BasicBlock* jumpDest /* = nullptr */, bool runRarely /* = false */, bool insertAtEnd /* = false */) { @@ -7000,7 +6991,7 @@ BasicBlock* Compiler::fgNewBBinRegion(BBjumpKinds jumpKind, putInFilter = ehGetDsc(hndIndex - 1)->InFilterRegionBBRange(srcBlk); } - return fgNewBBinRegion(jumpKind, tryIndex, hndIndex, srcBlk, putInFilter, runRarely, insertAtEnd); + return fgNewBBinRegion(jumpKind, tryIndex, hndIndex, srcBlk, jumpDest, putInFilter, runRarely, insertAtEnd); } //------------------------------------------------------------------------ @@ -7010,13 +7001,14 @@ BasicBlock* Compiler::fgNewBBinRegion(BBjumpKinds jumpKind, // // Arguments: // jumpKind - the jump kind of the new block to create. +// jumpDest - the jump target of the new block. Defaults to nullptr. // // Return Value: // The new block. -BasicBlock* Compiler::fgNewBBinRegion(BBjumpKinds jumpKind) +BasicBlock* Compiler::fgNewBBinRegion(BBjumpKinds jumpKind, BasicBlock* jumpDest /* = nullptr */) { - return fgNewBBinRegion(jumpKind, 0, 0, nullptr, /* putInFilter */ false, /* runRarely */ false, + return fgNewBBinRegion(jumpKind, 0, 0, nullptr, jumpDest, /* putInFilter */ false, /* runRarely */ false, /* insertAtEnd */ true); } @@ -7035,6 +7027,7 @@ BasicBlock* Compiler::fgNewBBinRegion(BBjumpKinds jumpKind) // set its handler index to the most nested handler region enclosing that 'try' region. // Otherwise, put the block in the handler region specified by 'regionIndex', and set its 'try' // index to the most nested 'try' region enclosing that handler region. +// jumpDest - the jump target of the new block. Defaults to nullptr. // // Return Value: // The new block. @@ -7042,12 +7035,13 @@ BasicBlock* Compiler::fgNewBBinRegion(BBjumpKinds jumpKind) BasicBlock* Compiler::fgNewBBinRegionWorker(BBjumpKinds jumpKind, BasicBlock* afterBlk, unsigned regionIndex, - bool putInTryRegion) + bool putInTryRegion, + BasicBlock* jumpDest /* = nullptr */) { /* Insert the new block */ BasicBlock* afterBlkNext = afterBlk->Next(); (void)afterBlkNext; // prevent "unused variable" error from GCC - BasicBlock* newBlk = fgNewBBafter(jumpKind, afterBlk, false); + BasicBlock* newBlk = fgNewBBafter(jumpKind, afterBlk, false, jumpDest); if (putInTryRegion) { diff --git a/src/coreclr/jit/fgdiagnostic.cpp b/src/coreclr/jit/fgdiagnostic.cpp index 4f5ed77a1bc18..835c6f401fe9b 100644 --- a/src/coreclr/jit/fgdiagnostic.cpp +++ b/src/coreclr/jit/fgdiagnostic.cpp @@ -3039,6 +3039,13 @@ void Compiler::fgDebugCheckBBlist(bool checkBBNum /* = false */, bool checkBBRef assert(block->getTryIndex() < compHndBBtabCount); } + // Blocks with these jump kinds must have valid (non-temporary) jump targets + if (block->KindIs(BBJ_ALWAYS, BBJ_CALLFINALLY, BBJ_COND, BBJ_EHCATCHRET, BBJ_LEAVE)) + { + assert(block->HasJump()); + assert(!block->HasJumpTo(&BasicBlock::bbTempJumpDest)); + } + // A branch or fall-through to a BBJ_CALLFINALLY block must come from the `try` region associated // with the finally block the BBJ_CALLFINALLY is targeting. There is one special case: if the // BBJ_CALLFINALLY is the first block of a `try`, then its predecessor can be outside the `try`: diff --git a/src/coreclr/jit/fgehopt.cpp b/src/coreclr/jit/fgehopt.cpp index 0411eee566c57..309bc9a5a5557 100644 --- a/src/coreclr/jit/fgehopt.cpp +++ b/src/coreclr/jit/fgehopt.cpp @@ -165,7 +165,7 @@ PhaseStatus Compiler::fgRemoveEmptyFinally() noway_assert(leaveBlock->KindIs(BBJ_ALWAYS)); - currentBlock->SetJumpKindAndTarget(BBJ_ALWAYS, postTryFinallyBlock); + currentBlock->SetJumpKindAndTarget(BBJ_ALWAYS, postTryFinallyBlock DEBUG_ARG(this)); // Ref count updates. fgAddRefPred(postTryFinallyBlock, currentBlock); @@ -454,7 +454,8 @@ PhaseStatus Compiler::fgRemoveEmptyTry() // Time to optimize. // // (1) Convert the callfinally to a normal jump to the handler - callFinally->SetJumpKind(BBJ_ALWAYS DEBUG_ARG(this)); + assert(callFinally->HasJump()); + callFinally->SetJumpKind(BBJ_ALWAYS); // Identify the leave block and the continuation BasicBlock* const leave = callFinally->Next(); @@ -527,7 +528,7 @@ PhaseStatus Compiler::fgRemoveEmptyTry() GenTree* finallyRetExpr = finallyRet->GetRootNode(); assert(finallyRetExpr->gtOper == GT_RETFILT); fgRemoveStmt(block, finallyRet); - block->SetJumpKindAndTarget(BBJ_ALWAYS, continuation); + block->SetJumpKindAndTarget(BBJ_ALWAYS, continuation DEBUG_ARG(this)); fgAddRefPred(continuation, block); fgRemoveRefPred(leave, block); } @@ -1020,10 +1021,6 @@ PhaseStatus Compiler::fgCloneFinally() { BasicBlock* newBlock; - // Avoid asserts when `fgNewBBinRegion` verifies the handler table, by mapping any cloned finally - // return blocks to BBJ_ALWAYS (which we would do below if we didn't do it here). - BBjumpKinds bbNewJumpKind = block->KindIs(BBJ_EHFINALLYRET) ? BBJ_ALWAYS : block->GetJumpKind(); - if (block == firstBlock) { // Put first cloned finally block into the appropriate @@ -1031,7 +1028,7 @@ PhaseStatus Compiler::fgCloneFinally() // callfinallys, depending on the EH implementation. const unsigned hndIndex = 0; BasicBlock* const nearBlk = cloneInsertAfter; - newBlock = fgNewBBinRegion(bbNewJumpKind, finallyTryIndex, hndIndex, nearBlk); + newBlock = fgNewBBinRegion(BBJ_NONE, finallyTryIndex, hndIndex, nearBlk); // If the clone ends up just after the finally, adjust // the stopping point for finally traversal. @@ -1045,7 +1042,7 @@ PhaseStatus Compiler::fgCloneFinally() { // Put subsequent blocks in the same region... const bool extendRegion = true; - newBlock = fgNewBBafter(bbNewJumpKind, insertAfter, extendRegion); + newBlock = fgNewBBafter(BBJ_NONE, insertAfter, extendRegion); } cloneBBCount++; @@ -1081,7 +1078,7 @@ PhaseStatus Compiler::fgCloneFinally() newBlock->clearHndIndex(); // Jump dests are set in a post-pass; make sure CloneBlockState hasn't tried to set them. - assert(newBlock->HasJumpTo(nullptr)); + assert(!newBlock->HasJump()); } if (!clonedOk) @@ -1103,6 +1100,8 @@ PhaseStatus Compiler::fgCloneFinally() for (BasicBlock* block = firstBlock; block != nextBlock; block = block->Next()) { BasicBlock* newBlock = blockMap[block]; + // Jump kind/target should not be set yet + assert(newBlock->KindIs(BBJ_NONE)); if (block->KindIs(BBJ_EHFINALLYRET)) { @@ -1110,8 +1109,7 @@ PhaseStatus Compiler::fgCloneFinally() GenTree* finallyRetExpr = finallyRet->GetRootNode(); assert(finallyRetExpr->gtOper == GT_RETFILT); fgRemoveStmt(newBlock, finallyRet); - assert(newBlock->KindIs(BBJ_ALWAYS)); // we mapped this above already - newBlock->SetJumpDest(normalCallFinallyReturn); + newBlock->SetJumpKindAndTarget(BBJ_ALWAYS, normalCallFinallyReturn DEBUG_ARG(this)); fgAddRefPred(normalCallFinallyReturn, newBlock); } @@ -1151,7 +1149,7 @@ PhaseStatus Compiler::fgCloneFinally() // This call returns to the expected spot, so // retarget it to branch to the clone. - currentBlock->SetJumpKindAndTarget(BBJ_ALWAYS, firstCloneBlock); + currentBlock->SetJumpKindAndTarget(BBJ_ALWAYS, firstCloneBlock DEBUG_ARG(this)); // Ref count updates. fgAddRefPred(firstCloneBlock, currentBlock); @@ -1211,7 +1209,7 @@ PhaseStatus Compiler::fgCloneFinally() if (block->KindIs(BBJ_EHFINALLYRET)) { assert(block->GetJumpEhf()->bbeCount == 0); - block->SetJumpKind(BBJ_EHFAULTRET DEBUG_ARG(this)); + block->SetJumpKind(BBJ_EHFAULTRET); } } } @@ -2258,7 +2256,7 @@ void Compiler::fgTailMergeThrowsFallThroughHelper(BasicBlock* predBlock, { assert(predBlock->NextIs(nonCanonicalBlock)); - BasicBlock* const newBlock = fgNewBBafter(BBJ_ALWAYS, predBlock, true); + BasicBlock* const newBlock = fgNewBBafter(BBJ_ALWAYS, predBlock, true, canonicalBlock); JITDUMP("*** " FMT_BB " now falling through to empty " FMT_BB " and then to " FMT_BB "\n", predBlock->bbNum, newBlock->bbNum, canonicalBlock->bbNum); @@ -2269,7 +2267,6 @@ void Compiler::fgTailMergeThrowsFallThroughHelper(BasicBlock* predBlock, // Wire up the new flow fgAddRefPred(newBlock, predBlock, predEdge); - newBlock->SetJumpDest(canonicalBlock); fgAddRefPred(canonicalBlock, newBlock, predEdge); // If nonCanonicalBlock has only one pred, all its flow transfers. diff --git a/src/coreclr/jit/fginline.cpp b/src/coreclr/jit/fginline.cpp index b1481c9d29c17..7c40d037d8a99 100644 --- a/src/coreclr/jit/fginline.cpp +++ b/src/coreclr/jit/fginline.cpp @@ -675,13 +675,13 @@ class SubstitutePlaceholdersAndDevirtualizeWalker : public GenTreeVisitorIsIntegralConst(0)) { - block->SetJumpKind(BBJ_ALWAYS DEBUG_ARG(m_compiler)); + block->SetJumpKind(BBJ_ALWAYS); m_compiler->fgRemoveRefPred(block->Next(), block); } else { - block->SetJumpKind(BBJ_NONE DEBUG_ARG(m_compiler)); m_compiler->fgRemoveRefPred(block->GetJumpDest(), block); + block->SetJumpKindAndTarget(BBJ_NONE DEBUG_ARG(m_compiler)); } } } @@ -1529,13 +1529,13 @@ void Compiler::fgInsertInlineeBlocks(InlineInfo* pInlineInfo) if (block->IsLast()) { JITDUMP("\nConvert bbJumpKind of " FMT_BB " to BBJ_NONE\n", block->bbNum); - block->SetJumpKind(BBJ_NONE DEBUG_ARG(this)); + block->SetJumpKindAndTarget(BBJ_NONE DEBUG_ARG(this)); } else { JITDUMP("\nConvert bbJumpKind of " FMT_BB " to BBJ_ALWAYS to bottomBlock " FMT_BB "\n", block->bbNum, bottomBlock->bbNum); - block->SetJumpKindAndTarget(BBJ_ALWAYS, bottomBlock); + block->SetJumpKindAndTarget(BBJ_ALWAYS, bottomBlock DEBUG_ARG(this)); } fgAddRefPred(bottomBlock, block); diff --git a/src/coreclr/jit/fgopt.cpp b/src/coreclr/jit/fgopt.cpp index 393924f8ec07e..79b098547523b 100644 --- a/src/coreclr/jit/fgopt.cpp +++ b/src/coreclr/jit/fgopt.cpp @@ -470,7 +470,7 @@ bool Compiler::fgRemoveUnreachableBlocks(CanRemoveBlockBody canRemoveBlock) block->bbFlags &= ~(BBF_REMOVED | BBF_INTERNAL); block->bbFlags |= BBF_IMPORTED; - block->SetJumpKind(BBJ_THROW DEBUG_ARG(this)); + block->SetJumpKindAndTarget(BBJ_THROW DEBUG_ARG(this)); block->bbSetRunRarely(); // If this is a pair, we just converted it to a BBJ_THROW. @@ -1681,7 +1681,7 @@ PhaseStatus Compiler::fgPostImportationCleanup() // plausible flow target. Simplest is to just mark it as a throw. if (bbIsHandlerBeg(newTryEntry->Next())) { - newTryEntry->SetJumpKind(BBJ_THROW DEBUG_ARG(this)); + newTryEntry->SetJumpKindAndTarget(BBJ_THROW DEBUG_ARG(this)); } else { @@ -1818,7 +1818,7 @@ PhaseStatus Compiler::fgPostImportationCleanup() GenTree* const jumpIfEntryStateZero = gtNewOperNode(GT_JTRUE, TYP_VOID, compareEntryStateToZero); fgNewStmtAtBeg(fromBlock, jumpIfEntryStateZero); - fromBlock->SetJumpKindAndTarget(BBJ_COND, toBlock); + fromBlock->SetJumpKindAndTarget(BBJ_COND, toBlock DEBUG_ARG(this)); fgAddRefPred(toBlock, fromBlock); newBlock->inheritWeight(fromBlock); @@ -2368,7 +2368,7 @@ void Compiler::fgCompactBlocks(BasicBlock* block, BasicBlock* bNext) case BBJ_ALWAYS: case BBJ_EHCATCHRET: case BBJ_EHFILTERRET: - block->SetJumpKindAndTarget(bNext->GetJumpKind(), bNext->GetJumpDest()); + block->SetJumpKindAndTarget(bNext->GetJumpKind(), bNext->GetJumpDest() DEBUG_ARG(this)); /* Update the predecessor list for 'bNext->bbJumpDest' */ fgReplacePred(bNext->GetJumpDest(), bNext, block); @@ -2381,7 +2381,7 @@ void Compiler::fgCompactBlocks(BasicBlock* block, BasicBlock* bNext) break; case BBJ_NONE: - block->SetJumpKind(bNext->GetJumpKind() DEBUG_ARG(this)); + block->SetJumpKindAndTarget(BBJ_NONE DEBUG_ARG(this)); /* Update the predecessor list for 'bNext->bbNext' */ fgReplacePred(bNext->Next(), bNext, block); break; @@ -2395,11 +2395,11 @@ void Compiler::fgCompactBlocks(BasicBlock* block, BasicBlock* bNext) case BBJ_THROW: case BBJ_RETURN: /* no jumps or fall through blocks to set here */ - block->SetJumpKind(bNext->GetJumpKind() DEBUG_ARG(this)); + block->SetJumpKind(bNext->GetJumpKind()); break; case BBJ_SWITCH: - block->SetJumpKindAndTarget(bNext->GetJumpKind(), bNext->GetJumpSwt()); + block->SetSwitchKindAndTarget(bNext->GetJumpSwt()); // We are moving the switch jump from bNext to block. Examine the jump targets // of the BBJ_SWITCH at bNext and replace the predecessor to 'bNext' with ones to 'block' fgChangeSwitchBlock(bNext, block); @@ -2410,6 +2410,8 @@ void Compiler::fgCompactBlocks(BasicBlock* block, BasicBlock* bNext) break; } + assert(block->KindIs(bNext->GetJumpKind())); + if (bNext->KindIs(BBJ_COND, BBJ_ALWAYS) && bNext->GetJumpDest()->isLoopAlign()) { // `bNext` has a backward target to some block which mean bNext is part of a loop. @@ -2639,12 +2641,11 @@ void Compiler::fgRemoveConditionalJump(BasicBlock* block) noway_assert(flow->getDupCount() == 2); // Change the BBJ_COND to BBJ_NONE, and adjust the refCount and dupCount. - block->SetJumpKind(BBJ_NONE DEBUG_ARG(this)); + block->SetJumpKindAndTarget(BBJ_NONE DEBUG_ARG(this)); --block->Next()->bbRefs; flow->decrementDupCount(); #ifdef DEBUG - block->SetJumpDest(nullptr); if (verbose) { printf("Block " FMT_BB " becoming a BBJ_NONE to " FMT_BB " (jump target is the same whether the condition" @@ -3315,7 +3316,7 @@ bool Compiler::fgOptimizeSwitchBranches(BasicBlock* block) } // Change the switch jump into a BBJ_ALWAYS - block->SetJumpKindAndTarget(BBJ_ALWAYS, block->GetJumpSwt()->bbsDstTab[0]); + block->SetJumpKindAndTarget(BBJ_ALWAYS, block->GetJumpSwt()->bbsDstTab[0] DEBUG_ARG(this)); if (jmpCnt > 1) { for (unsigned i = 1; i < jmpCnt; ++i) @@ -3379,7 +3380,7 @@ bool Compiler::fgOptimizeSwitchBranches(BasicBlock* block) fgSetStmtSeq(switchStmt); } - block->SetJumpKindAndTarget(BBJ_COND, block->GetJumpSwt()->bbsDstTab[0]); + block->SetJumpKindAndTarget(BBJ_COND, block->GetJumpSwt()->bbsDstTab[0] DEBUG_ARG(this)); JITDUMP("After:\n"); DISPNODE(switchTree); @@ -3790,18 +3791,18 @@ bool Compiler::fgOptimizeUncondBranchToSimpleCond(BasicBlock* block, BasicBlock* // Fix up block's flow // - block->SetJumpKindAndTarget(BBJ_COND, target->GetJumpDest()); + block->SetJumpKindAndTarget(BBJ_COND, target->GetJumpDest() DEBUG_ARG(this)); fgAddRefPred(block->GetJumpDest(), block); fgRemoveRefPred(target, block); // add an unconditional block after this block to jump to the target block's fallthrough block // - BasicBlock* next = fgNewBBafter(BBJ_ALWAYS, block, true); + assert(!target->IsLast()); + BasicBlock* next = fgNewBBafter(BBJ_ALWAYS, block, true, target->Next()); // The new block 'next' will inherit its weight from 'block' // next->inheritWeight(block); - next->SetJumpDest(target->Next()); fgAddRefPred(next, block); fgAddRefPred(next->GetJumpDest(), next); @@ -3842,7 +3843,7 @@ bool Compiler::fgOptimizeBranchToNext(BasicBlock* block, BasicBlock* bNext, Basi if (!block->isBBCallAlwaysPairTail()) { /* the unconditional jump is to the next BB */ - block->SetJumpKind(BBJ_NONE DEBUG_ARG(this)); + block->SetJumpKindAndTarget(BBJ_NONE DEBUG_ARG(this)); #ifdef DEBUG if (verbose) { @@ -3968,7 +3969,7 @@ bool Compiler::fgOptimizeBranchToNext(BasicBlock* block, BasicBlock* bNext, Basi /* Conditional is gone - simply fall into the next block */ - block->SetJumpKind(BBJ_NONE DEBUG_ARG(this)); + block->SetJumpKindAndTarget(BBJ_NONE DEBUG_ARG(this)); /* Update bbRefs and bbNum - Conditional predecessors to the same * block are counted twice so we have to remove one of them */ @@ -4233,7 +4234,7 @@ bool Compiler::fgOptimizeBranch(BasicBlock* bJump) // We need to update the following flags of the bJump block if they were set in the bDest block bJump->bbFlags |= bDest->bbFlags & BBF_COPY_PROPAGATE; - bJump->SetJumpKindAndTarget(BBJ_COND, bDest->Next()); + bJump->SetJumpKindAndTarget(BBJ_COND, bDest->Next() DEBUG_ARG(this)); /* Update bbRefs and bbPreds */ @@ -4393,7 +4394,7 @@ bool Compiler::fgOptimizeSwitchJumps() // Wire up the new control flow. // - block->SetJumpKindAndTarget(BBJ_COND, dominantTarget); + block->SetJumpKindAndTarget(BBJ_COND, dominantTarget DEBUG_ARG(this)); FlowEdge* const blockToTargetEdge = fgAddRefPred(dominantTarget, block); FlowEdge* const blockToNewBlockEdge = newBlock->bbPreds; assert(blockToNewBlockEdge->getSourceBlock() == block); @@ -6118,6 +6119,7 @@ bool Compiler::fgUpdateFlowGraph(bool doTailDuplication, bool isPhase) bDest = nullptr; if (doTailDuplication && fgOptimizeUncondBranchToSimpleCond(block, block->Next())) { + assert(block->KindIs(BBJ_COND)); change = true; modified = true; bDest = block->GetJumpDest(); @@ -6269,9 +6271,8 @@ bool Compiler::fgUpdateFlowGraph(bool doTailDuplication, bool isPhase) // if (bDest->KindIs(BBJ_NONE, BBJ_COND)) { - BasicBlock* const bFixup = fgNewBBafter(BBJ_ALWAYS, bDest, true); + BasicBlock* const bFixup = fgNewBBafter(BBJ_ALWAYS, bDest, true, bDestNext); bFixup->inheritWeight(bDestNext); - bFixup->SetJumpDest(bDestNext); fgRemoveRefPred(bDestNext, bDest); fgAddRefPred(bFixup, bDest); @@ -6974,7 +6975,7 @@ PhaseStatus Compiler::fgHeadTailMerge(bool early) // Fix up the flow. // - predBlock->SetJumpKindAndTarget(BBJ_ALWAYS, crossJumpTarget); + predBlock->SetJumpKindAndTarget(BBJ_ALWAYS, crossJumpTarget DEBUG_ARG(this)); fgRemoveRefPred(block, predBlock); fgAddRefPred(crossJumpTarget, predBlock); diff --git a/src/coreclr/jit/fgprofile.cpp b/src/coreclr/jit/fgprofile.cpp index 62b752235dc81..a274f99b8596c 100644 --- a/src/coreclr/jit/fgprofile.cpp +++ b/src/coreclr/jit/fgprofile.cpp @@ -501,7 +501,7 @@ void BlockCountInstrumentor::RelocateProbes() // if (pred->KindIs(BBJ_NONE)) { - pred->SetJumpKindAndTarget(BBJ_ALWAYS, block); + pred->SetJumpKindAndTarget(BBJ_ALWAYS, block DEBUG_ARG(m_comp)); } assert(pred->KindIs(BBJ_ALWAYS)); } @@ -1553,7 +1553,7 @@ void EfficientEdgeCountInstrumentor::SplitCriticalEdges() // if (block->KindIs(BBJ_NONE)) { - block->SetJumpKindAndTarget(BBJ_ALWAYS, target); + block->SetJumpKindAndTarget(BBJ_ALWAYS, target DEBUG_ARG(m_comp)); } instrumentedBlock = m_comp->fgSplitEdge(block, target); @@ -1695,7 +1695,7 @@ void EfficientEdgeCountInstrumentor::RelocateProbes() // if (pred->KindIs(BBJ_NONE)) { - pred->SetJumpKindAndTarget(BBJ_ALWAYS, block); + pred->SetJumpKindAndTarget(BBJ_ALWAYS, block DEBUG_ARG(m_comp)); } assert(pred->KindIs(BBJ_ALWAYS)); } @@ -3800,7 +3800,7 @@ void EfficientEdgeCountReconstructor::PropagateEdges(BasicBlock* block, BlockInf { assert(nSucc == 1); assert(block == pseudoEdge->m_sourceBlock); - assert(!block->HasJumpTo(nullptr)); + assert(block->HasJump()); FlowEdge* const flowEdge = m_comp->fgGetPredForBlock(block->GetJumpDest(), block); assert(flowEdge != nullptr); flowEdge->setLikelihood(1.0); diff --git a/src/coreclr/jit/flowgraph.cpp b/src/coreclr/jit/flowgraph.cpp index 2dbb470d99974..1a9818ef08897 100644 --- a/src/coreclr/jit/flowgraph.cpp +++ b/src/coreclr/jit/flowgraph.cpp @@ -261,7 +261,7 @@ BasicBlock* Compiler::fgCreateGCPoll(GCPollType pollType, BasicBlock* block) } BasicBlock* poll = fgNewBBafter(BBJ_NONE, top, true); - bottom = fgNewBBafter(top->GetJumpKind(), poll, true); + bottom = fgNewBBafter(top->GetJumpKind(), poll, true, top->GetJumpDest()); BBjumpKinds oldJumpKind = top->GetJumpKind(); unsigned char lpIndex = top->bbNatLoopNum; @@ -283,8 +283,6 @@ BasicBlock* Compiler::fgCreateGCPoll(GCPollType pollType, BasicBlock* block) poll->bbSetRunRarely(); poll->bbNatLoopNum = lpIndex; // Set the bbNatLoopNum in case we are in a loop - // Bottom gets all the outgoing edges and inherited flags of Original. - bottom->SetJumpDest(top->GetJumpDest()); bottom->bbNatLoopNum = lpIndex; // Set the bbNatLoopNum in case we are in a loop if (lpIndex != BasicBlock::NOT_IN_LOOP) { @@ -371,8 +369,7 @@ BasicBlock* Compiler::fgCreateGCPoll(GCPollType pollType, BasicBlock* block) } #endif - top->SetJumpKindAndTarget(BBJ_COND, bottom); - + top->SetJumpKindAndTarget(BBJ_COND, bottom DEBUG_ARG(this)); // Bottom has Top and Poll as its predecessors. Poll has just Top as a predecessor. fgAddRefPred(bottom, poll); fgAddRefPred(bottom, top); @@ -1836,7 +1833,7 @@ void Compiler::fgConvertSyncReturnToLeave(BasicBlock* block) assert(ehDsc->ebdEnclosingHndIndex == EHblkDsc::NO_ENCLOSING_INDEX); // Convert the BBJ_RETURN to BBJ_ALWAYS, jumping to genReturnBB. - block->SetJumpKindAndTarget(BBJ_ALWAYS, genReturnBB); + block->SetJumpKindAndTarget(BBJ_ALWAYS, genReturnBB DEBUG_ARG(this)); fgAddRefPred(genReturnBB, block); #ifdef DEBUG @@ -2307,7 +2304,7 @@ class MergedReturns // Change BBJ_RETURN to BBJ_ALWAYS targeting const return block. assert((comp->info.compFlags & CORINFO_FLG_SYNCH) == 0); - returnBlock->SetJumpKindAndTarget(BBJ_ALWAYS, constReturnBlock); + returnBlock->SetJumpKindAndTarget(BBJ_ALWAYS, constReturnBlock DEBUG_ARG(comp)); comp->fgAddRefPred(constReturnBlock, returnBlock); // Remove GT_RETURN since constReturnBlock returns the constant. @@ -3512,8 +3509,8 @@ PhaseStatus Compiler::fgDetermineFirstColdBlock() } else { - BasicBlock* transitionBlock = fgNewBBafter(BBJ_ALWAYS, prevToFirstColdBlock, true); - transitionBlock->SetJumpDest(firstColdBlock); + BasicBlock* transitionBlock = + fgNewBBafter(BBJ_ALWAYS, prevToFirstColdBlock, true, firstColdBlock); transitionBlock->inheritWeight(firstColdBlock); // Update the predecessor list for firstColdBlock @@ -3528,7 +3525,7 @@ PhaseStatus Compiler::fgDetermineFirstColdBlock() // If the block preceding the first cold block is BBJ_NONE, // convert it to BBJ_ALWAYS to force an explicit jump. - prevToFirstColdBlock->SetJumpKindAndTarget(BBJ_ALWAYS, firstColdBlock); + prevToFirstColdBlock->SetJumpKindAndTarget(BBJ_ALWAYS, firstColdBlock DEBUG_ARG(this)); break; } } @@ -3723,8 +3720,8 @@ PhaseStatus Compiler::fgCreateThrowHelperBlocks() // assert((add->acdKind == SCK_FAIL_FAST) || (bbThrowIndex(srcBlk) == add->acdData)); - BasicBlock* const newBlk = - fgNewBBinRegion(jumpKinds[add->acdKind], srcBlk, /* runRarely */ true, /* insertAtEnd */ true); + BasicBlock* const newBlk = fgNewBBinRegion(jumpKinds[add->acdKind], srcBlk, /* jumpDest */ nullptr, + /* runRarely */ true, /* insertAtEnd */ true); // Update the descriptor // diff --git a/src/coreclr/jit/helperexpansion.cpp b/src/coreclr/jit/helperexpansion.cpp index 5ff68eb5333df..31a915d0a9238 100644 --- a/src/coreclr/jit/helperexpansion.cpp +++ b/src/coreclr/jit/helperexpansion.cpp @@ -277,16 +277,22 @@ bool Compiler::fgExpandRuntimeLookupsForCall(BasicBlock** pBlock, Statement* stm opts.OptimizationEnabled() ? fgMakeMultiUse(&fastPathValue) : gtCloneExpr(fastPathValue); GenTree* nullcheckOp = gtNewOperNode(GT_EQ, TYP_INT, fastPathValue, gtNewIconNode(0, TYP_I_IMPL)); nullcheckOp->gtFlags |= GTF_RELOP_JMP_USED; - BasicBlock* nullcheckBb = - fgNewBBFromTreeAfter(BBJ_COND, prevBb, gtNewOperNode(GT_JTRUE, TYP_VOID, nullcheckOp), debugInfo); + + // nullcheckBb conditionally jumps to fallbackBb, but we need to initialize fallbackBb last + // so we can place it after nullcheckBb. So use a temporary jump target for now. + BasicBlock* nullcheckBb = fgNewBBFromTreeAfter(BBJ_COND, prevBb, gtNewOperNode(GT_JTRUE, TYP_VOID, nullcheckOp), + debugInfo DEBUG_ARG(&BasicBlock::bbTempJumpDest)); // Fallback basic block GenTree* fallbackValueDef = gtNewStoreLclVarNode(rtLookupLcl->GetLclNum(), call); - BasicBlock* fallbackBb = fgNewBBFromTreeAfter(BBJ_NONE, nullcheckBb, fallbackValueDef, debugInfo, true); + BasicBlock* fallbackBb = fgNewBBFromTreeAfter(BBJ_NONE, nullcheckBb, fallbackValueDef, debugInfo, nullptr, true); + + // Set nullcheckBb's real jump target + nullcheckBb->SetJumpDest(fallbackBb); // Fast-path basic block GenTree* fastpathValueDef = gtNewStoreLclVarNode(rtLookupLcl->GetLclNum(), fastPathValueClone); - BasicBlock* fastPathBb = fgNewBBFromTreeAfter(BBJ_ALWAYS, nullcheckBb, fastpathValueDef, debugInfo); + BasicBlock* fastPathBb = fgNewBBFromTreeAfter(BBJ_ALWAYS, nullcheckBb, fastpathValueDef, debugInfo, block); BasicBlock* sizeCheckBb = nullptr; if (needsSizeCheck) @@ -327,7 +333,8 @@ bool Compiler::fgExpandRuntimeLookupsForCall(BasicBlock** pBlock, Statement* stm sizeCheck->gtFlags |= GTF_RELOP_JMP_USED; GenTree* jtrue = gtNewOperNode(GT_JTRUE, TYP_VOID, sizeCheck); - sizeCheckBb = fgNewBBFromTreeAfter(BBJ_COND, prevBb, jtrue, debugInfo); + // sizeCheckBb fails - jump to fallbackBb + sizeCheckBb = fgNewBBFromTreeAfter(BBJ_COND, prevBb, jtrue, debugInfo, fallbackBb); } // @@ -336,8 +343,6 @@ bool Compiler::fgExpandRuntimeLookupsForCall(BasicBlock** pBlock, Statement* stm fgRemoveRefPred(block, prevBb); fgAddRefPred(block, fastPathBb); fgAddRefPred(block, fallbackBb); - nullcheckBb->SetJumpDest(fallbackBb); - fastPathBb->SetJumpDest(block); if (needsSizeCheck) { @@ -350,8 +355,6 @@ bool Compiler::fgExpandRuntimeLookupsForCall(BasicBlock** pBlock, Statement* stm fgAddRefPred(fallbackBb, sizeCheckBb); // fastPathBb is only reachable from successful nullcheckBb fgAddRefPred(fastPathBb, nullcheckBb); - // sizeCheckBb fails - jump to fallbackBb - sizeCheckBb->SetJumpDest(fallbackBb); } else { @@ -737,21 +740,27 @@ bool Compiler::fgExpandThreadLocalAccessForCall(BasicBlock** pBlock, Statement* // use(threadStaticBlockBase); // maxThreadStaticBlocksCondBB - BasicBlock* maxThreadStaticBlocksCondBB = fgNewBBFromTreeAfter(BBJ_COND, prevBb, tlsValueDef, debugInfo); + + // maxThreadStaticBlocksCondBB conditionally jumps to fallbackBb, but fallbackBb must be initialized last + // so it can be placed after it. So use a temporary jump target for now. + BasicBlock* maxThreadStaticBlocksCondBB = + fgNewBBFromTreeAfter(BBJ_COND, prevBb, tlsValueDef, debugInfo DEBUG_ARG(&BasicBlock::bbTempJumpDest)); fgInsertStmtAfter(maxThreadStaticBlocksCondBB, maxThreadStaticBlocksCondBB->firstStmt(), fgNewStmtFromTree(maxThreadStaticBlocksCond)); - // threadStaticBlockNullCondBB + // Similarly, give threadStaticBlockNulLCondBB a temporary jump target for now, + // and update it to jump to its real target (fastPathBb) after it is initialized. BasicBlock* threadStaticBlockNullCondBB = - fgNewBBFromTreeAfter(BBJ_COND, maxThreadStaticBlocksCondBB, threadStaticBlockBaseDef, debugInfo); + fgNewBBFromTreeAfter(BBJ_COND, maxThreadStaticBlocksCondBB, threadStaticBlockBaseDef, + debugInfo DEBUG_ARG(&BasicBlock::bbTempJumpDest)); fgInsertStmtAfter(threadStaticBlockNullCondBB, threadStaticBlockNullCondBB->firstStmt(), fgNewStmtFromTree(threadStaticBlockNullCond)); // fallbackBb GenTree* fallbackValueDef = gtNewStoreLclVarNode(threadStaticBlockLclNum, call); BasicBlock* fallbackBb = - fgNewBBFromTreeAfter(BBJ_ALWAYS, threadStaticBlockNullCondBB, fallbackValueDef, debugInfo, true); + fgNewBBFromTreeAfter(BBJ_ALWAYS, threadStaticBlockNullCondBB, fallbackValueDef, debugInfo, block, true); // fastPathBb if (isGCThreadStatic) @@ -766,7 +775,13 @@ bool Compiler::fgExpandThreadLocalAccessForCall(BasicBlock** pBlock, Statement* GenTree* fastPathValueDef = gtNewStoreLclVarNode(threadStaticBlockLclNum, gtCloneExpr(threadStaticBlockBaseLclValueUse)); - BasicBlock* fastPathBb = fgNewBBFromTreeAfter(BBJ_ALWAYS, fallbackBb, fastPathValueDef, debugInfo, true); + BasicBlock* fastPathBb = fgNewBBFromTreeAfter(BBJ_ALWAYS, fallbackBb, fastPathValueDef, debugInfo, block, true); + + // Set maxThreadStaticBlocksCondBB's real jump target + maxThreadStaticBlocksCondBB->SetJumpDest(fallbackBb); + + // Set threadStaticBlockNullCondBB's real jump target + threadStaticBlockNullCondBB->SetJumpDest(fastPathBb); // // Update preds in all new blocks @@ -783,11 +798,6 @@ bool Compiler::fgExpandThreadLocalAccessForCall(BasicBlock** pBlock, Statement* fgAddRefPred(block, fastPathBb); fgAddRefPred(block, fallbackBb); - maxThreadStaticBlocksCondBB->SetJumpDest(fallbackBb); - threadStaticBlockNullCondBB->SetJumpDest(fastPathBb); - fastPathBb->SetJumpDest(block); - fallbackBb->SetJumpDest(block); - // Inherit the weights block->inheritWeight(prevBb); maxThreadStaticBlocksCondBB->inheritWeight(prevBb); @@ -1075,12 +1085,12 @@ bool Compiler::fgExpandStaticInitForCall(BasicBlock** pBlock, Statement* stmt, G GenTree* isInitedCmp = gtNewOperNode(GT_EQ, TYP_INT, isInitedActualValueNode, isInitedExpectedValue); isInitedCmp->gtFlags |= GTF_RELOP_JMP_USED; BasicBlock* isInitedBb = - fgNewBBFromTreeAfter(BBJ_COND, prevBb, gtNewOperNode(GT_JTRUE, TYP_VOID, isInitedCmp), debugInfo); + fgNewBBFromTreeAfter(BBJ_COND, prevBb, gtNewOperNode(GT_JTRUE, TYP_VOID, isInitedCmp), debugInfo, block); // Fallback basic block // TODO-CQ: for JIT we can replace the original call with CORINFO_HELP_INITCLASS // that only accepts a single argument - BasicBlock* helperCallBb = fgNewBBFromTreeAfter(BBJ_NONE, isInitedBb, call, debugInfo, true); + BasicBlock* helperCallBb = fgNewBBFromTreeAfter(BBJ_NONE, isInitedBb, call, debugInfo, nullptr, true); GenTree* replacementNode = nullptr; if (retValKind == SHRV_STATIC_BASE_PTR) @@ -1152,9 +1162,6 @@ bool Compiler::fgExpandStaticInitForCall(BasicBlock** pBlock, Statement* stmt, G // Both fastPathBb and helperCallBb have a single common pred - isInitedBb fgAddRefPred(helperCallBb, isInitedBb); - // helperCallBb unconditionally jumps to the last block (jumps over fastPathBb) - isInitedBb->SetJumpDest(block); - // // Re-distribute weights // @@ -1410,7 +1417,7 @@ bool Compiler::fgVNBasedIntrinsicExpansionForCall_ReadUtf8(BasicBlock** pBlock, // // Block 1: lengthCheckBb (we check that dstLen < srcLen) // - BasicBlock* lengthCheckBb = fgNewBBafter(BBJ_COND, prevBb, true); + BasicBlock* lengthCheckBb = fgNewBBafter(BBJ_COND, prevBb, true, block); lengthCheckBb->bbFlags |= BBF_INTERNAL; // Set bytesWritten -1 by default, if the fast path is not taken we'll return it as the result. @@ -1494,8 +1501,6 @@ bool Compiler::fgVNBasedIntrinsicExpansionForCall_ReadUtf8(BasicBlock** pBlock, fgAddRefPred(block, lengthCheckBb); // fastpathBb flows into block fgAddRefPred(block, fastpathBb); - // lengthCheckBb jumps to block if condition is met - lengthCheckBb->SetJumpDest(block); // // Re-distribute weights diff --git a/src/coreclr/jit/ifconversion.cpp b/src/coreclr/jit/ifconversion.cpp index fc49718cfe934..e6aa34c07736d 100644 --- a/src/coreclr/jit/ifconversion.cpp +++ b/src/coreclr/jit/ifconversion.cpp @@ -743,7 +743,8 @@ bool OptIfConversionDsc::optIfConvert() // Update the flow from the original block. m_comp->fgRemoveAllRefPreds(m_startBlock->Next(), m_startBlock); - m_startBlock->SetJumpKind(BBJ_ALWAYS DEBUG_ARG(m_comp)); + assert(m_startBlock->HasJump()); + m_startBlock->SetJumpKind(BBJ_ALWAYS); #ifdef DEBUG if (m_comp->verbose) diff --git a/src/coreclr/jit/importer.cpp b/src/coreclr/jit/importer.cpp index 21f41994ea3c8..0b24cbda69bba 100644 --- a/src/coreclr/jit/importer.cpp +++ b/src/coreclr/jit/importer.cpp @@ -2455,7 +2455,7 @@ GenTree* Compiler::impTypeIsAssignable(GenTree* typeTo, GenTree* typeFrom) void Compiler::verConvertBBToThrowVerificationException(BasicBlock* block DEBUGARG(bool logMsg)) { - block->SetJumpKind(BBJ_THROW DEBUG_ARG(this)); + block->SetJumpKindAndTarget(BBJ_THROW DEBUG_ARG(this)); block->bbFlags |= BBF_FAILED_VERIFICATION; block->bbFlags &= ~BBF_IMPORTED; @@ -4322,7 +4322,12 @@ void Compiler::impImportLeave(BasicBlock* block) { assert(step == DUMMY_INIT(NULL)); callBlock = block; - callBlock->SetJumpKind(BBJ_CALLFINALLY DEBUG_ARG(this)); // convert the BBJ_LEAVE to BBJ_CALLFINALLY + + assert(callBlock->HasJump()); + fgRemoveRefPred(callBlock->GetJumpDest(), callBlock); + + // callBlock will call the finally handler. Convert the BBJ_LEAVE to BBJ_CALLFINALLY + callBlock->SetJumpKindAndTarget(BBJ_CALLFINALLY, HBtab->ebdHndBeg DEBUG_ARG(this)); if (endCatches) { @@ -4343,14 +4348,15 @@ void Compiler::impImportLeave(BasicBlock* block) assert(step != DUMMY_INIT(NULL)); /* Calling the finally block */ - callBlock = fgNewBBinRegion(BBJ_CALLFINALLY, XTnum + 1, 0, step); - assert(step->KindIs(BBJ_ALWAYS)); - if (!step->HasJumpTo(nullptr)) - { - fgRemoveRefPred(step->GetJumpDest(), step); - } - step->SetJumpDest(callBlock); // the previous call to a finally returns to this call (to the next - // finally in the chain) + + // callBlock will call the finally handler + callBlock = fgNewBBinRegion(BBJ_CALLFINALLY, XTnum + 1, 0, step, HBtab->ebdHndBeg); + + // Jump target should be set to block as a dummy value + assert(step->HasJumpTo(&BasicBlock::bbTempJumpDest)); + + // the previous call to a finally returns to this call (to the next finally in the chain) + step->SetJumpDest(callBlock); fgAddRefPred(callBlock, step); /* The new block will inherit this block's weight */ @@ -4381,7 +4387,8 @@ void Compiler::impImportLeave(BasicBlock* block) impEndTreeList(callBlock, endLFinStmt, lastStmt); } - step = fgNewBBafter(BBJ_ALWAYS, callBlock, true); + // Note: we don't know the jump target yet + step = fgNewBBafter(BBJ_ALWAYS, callBlock, true DEBUG_ARG(&BasicBlock::bbTempJumpDest)); /* The new block will inherit this block's weight */ step->inheritWeight(block); step->bbFlags |= BBF_IMPORTED | BBF_KEEP_BBJ_ALWAYS; @@ -4397,12 +4404,9 @@ void Compiler::impImportLeave(BasicBlock* block) unsigned finallyNesting = compHndBBtab[XTnum].ebdHandlerNestingLevel; assert(finallyNesting <= compHndBBtabCount); - if (!callBlock->HasJumpTo(nullptr)) - { - fgRemoveRefPred(callBlock->GetJumpDest(), callBlock); - } - callBlock->SetJumpDest(HBtab->ebdHndBeg); // This callBlock will call the "finally" handler. - fgAddRefPred(HBtab->ebdHndBeg, callBlock); + assert(callBlock->KindIs(BBJ_CALLFINALLY)); + assert(callBlock->HasJumpTo(HBtab->ebdHndBeg)); + fgAddRefPred(callBlock->GetJumpDest(), callBlock); GenTree* endLFin = new (this, GT_END_LFIN) GenTreeVal(GT_END_LFIN, TYP_VOID, finallyNesting); endLFinStmt = gtNewStmt(endLFin); @@ -4419,7 +4423,7 @@ void Compiler::impImportLeave(BasicBlock* block) if (encFinallies == 0) { assert(step == DUMMY_INIT(NULL)); - block->SetJumpKind(BBJ_ALWAYS DEBUG_ARG(this)); // convert the BBJ_LEAVE to a BBJ_ALWAYS + block->SetJumpKind(BBJ_ALWAYS); // convert the BBJ_LEAVE to a BBJ_ALWAYS if (endCatches) { @@ -4445,12 +4449,12 @@ void Compiler::impImportLeave(BasicBlock* block) // Insert a new BB either in the try region indicated by tryIndex or // the handler region indicated by leaveTarget->bbHndIndex, // depending on which is the inner region. - BasicBlock* finalStep = fgNewBBinRegion(BBJ_ALWAYS, tryIndex, leaveTarget->bbHndIndex, step); + BasicBlock* finalStep = fgNewBBinRegion(BBJ_ALWAYS, tryIndex, leaveTarget->bbHndIndex, step, leaveTarget); finalStep->bbFlags |= BBF_KEEP_BBJ_ALWAYS; - if (!step->HasJumpTo(nullptr)) - { - fgRemoveRefPred(step->GetJumpDest(), step); - } + + // Jump target should be set to block as a dummy value + assert(step->HasJumpTo(&BasicBlock::bbTempJumpDest)); + step->SetJumpDest(finalStep); fgAddRefPred(finalStep, step); @@ -4480,7 +4484,7 @@ void Compiler::impImportLeave(BasicBlock* block) impEndTreeList(finalStep, endLFinStmt, lastStmt); - finalStep->SetJumpDest(leaveTarget); // this is the ultimate destination of the LEAVE + // this is the ultimate destination of the LEAVE fgAddRefPred(leaveTarget, finalStep); // Queue up the jump target for importing @@ -4573,7 +4577,7 @@ void Compiler::impImportLeave(BasicBlock* block) if (step == nullptr) { step = block; - step->SetJumpKind(BBJ_EHCATCHRET DEBUG_ARG(this)); // convert the BBJ_LEAVE to BBJ_EHCATCHRET + step->SetJumpKind(BBJ_EHCATCHRET); // convert the BBJ_LEAVE to BBJ_EHCATCHRET stepType = ST_Catch; #ifdef DEBUG @@ -4588,14 +4592,15 @@ void Compiler::impImportLeave(BasicBlock* block) } else { - BasicBlock* exitBlock; - /* Create a new catch exit block in the catch region for the existing step block to jump to in this * scope */ - exitBlock = fgNewBBinRegion(BBJ_EHCATCHRET, 0, XTnum + 1, step); + // Note: we don't know the jump target yet + BasicBlock* exitBlock = + fgNewBBinRegion(BBJ_EHCATCHRET, 0, XTnum + 1, step DEBUG_ARG(&BasicBlock::bbTempJumpDest)); assert(step->KindIs(BBJ_ALWAYS, BBJ_EHCATCHRET)); - if (!step->HasJumpTo(nullptr)) + assert((step == block) || step->HasJumpTo(&BasicBlock::bbTempJumpDest)); + if (step == block) { fgRemoveRefPred(step->GetJumpDest(), step); } @@ -4645,14 +4650,15 @@ void Compiler::impImportLeave(BasicBlock* block) (HBtab->ebdEnclosingTryIndex == EHblkDsc::NO_ENCLOSING_INDEX) ? 0 : HBtab->ebdEnclosingTryIndex + 1; unsigned callFinallyHndIndex = (HBtab->ebdEnclosingHndIndex == EHblkDsc::NO_ENCLOSING_INDEX) ? 0 : HBtab->ebdEnclosingHndIndex + 1; - callBlock = fgNewBBinRegion(BBJ_CALLFINALLY, callFinallyTryIndex, callFinallyHndIndex, block); + callBlock = + fgNewBBinRegion(BBJ_CALLFINALLY, callFinallyTryIndex, callFinallyHndIndex, block, HBtab->ebdHndBeg); // Convert the BBJ_LEAVE to BBJ_ALWAYS, jumping to the new BBJ_CALLFINALLY. This is because // the new BBJ_CALLFINALLY is in a different EH region, thus it can't just replace the BBJ_LEAVE, // which might be in the middle of the "try". In most cases, the BBJ_ALWAYS will jump to the // next block, and flow optimizations will remove it. fgRemoveRefPred(block->GetJumpDest(), block); - block->SetJumpKindAndTarget(BBJ_ALWAYS, callBlock); + block->SetJumpKindAndTarget(BBJ_ALWAYS, callBlock DEBUG_ARG(this)); fgAddRefPred(callBlock, block); /* The new block will inherit this block's weight */ @@ -4672,7 +4678,12 @@ void Compiler::impImportLeave(BasicBlock* block) #else // !FEATURE_EH_CALLFINALLY_THUNKS callBlock = block; - callBlock->SetJumpKind(BBJ_CALLFINALLY DEBUG_ARG(this)); // convert the BBJ_LEAVE to BBJ_CALLFINALLY + + assert(callBlock->HasJump()); + fgRemoveRefPred(callBlock->GetJumpDest(), callBlock); + + // callBlock will call the finally handler. Convert the BBJ_LEAVE to BBJ_CALLFINALLY + callBlock->SetJumpKindAndTarget(BBJ_CALLFINALLY, HBtab->ebdHndBeg DEBUG_ARG(this)); #ifdef DEBUG if (verbose) @@ -4705,14 +4716,17 @@ void Compiler::impImportLeave(BasicBlock* block) // stack walks.) assert(step->KindIs(BBJ_ALWAYS, BBJ_EHCATCHRET)); + assert((step == block) || step->HasJumpTo(&BasicBlock::bbTempJumpDest)); #if FEATURE_EH_CALLFINALLY_THUNKS if (step->KindIs(BBJ_EHCATCHRET)) { // Need to create another step block in the 'try' region that will actually branch to the // call-to-finally thunk. - BasicBlock* step2 = fgNewBBinRegion(BBJ_ALWAYS, XTnum + 1, 0, step); - if (!step->HasJumpTo(nullptr)) + // Note: we don't know the jump target yet + BasicBlock* step2 = + fgNewBBinRegion(BBJ_ALWAYS, XTnum + 1, 0, step DEBUG_ARG(&BasicBlock::bbTempJumpDest)); + if (step == block) { fgRemoveRefPred(step->GetJumpDest(), step); } @@ -4745,8 +4759,13 @@ void Compiler::impImportLeave(BasicBlock* block) unsigned callFinallyHndIndex = 0; // don't care #endif // !FEATURE_EH_CALLFINALLY_THUNKS - callBlock = fgNewBBinRegion(BBJ_CALLFINALLY, callFinallyTryIndex, callFinallyHndIndex, step); - if (!step->HasJumpTo(nullptr)) + assert(step->KindIs(BBJ_ALWAYS, BBJ_EHCATCHRET)); + assert((step == block) || step->HasJumpTo(&BasicBlock::bbTempJumpDest)); + + // callBlock will call the finally handler + callBlock = + fgNewBBinRegion(BBJ_CALLFINALLY, callFinallyTryIndex, callFinallyHndIndex, step, HBtab->ebdHndBeg); + if (step == block) { fgRemoveRefPred(step->GetJumpDest(), step); } @@ -4777,7 +4796,8 @@ void Compiler::impImportLeave(BasicBlock* block) #endif } - step = fgNewBBafter(BBJ_ALWAYS, callBlock, true); + // Note: we don't know the jump target yet + step = fgNewBBafter(BBJ_ALWAYS, callBlock, true DEBUG_ARG(&BasicBlock::bbTempJumpDest)); stepType = ST_FinallyReturn; /* The new block will inherit this block's weight */ @@ -4793,12 +4813,9 @@ void Compiler::impImportLeave(BasicBlock* block) } #endif - if (!callBlock->HasJumpTo(nullptr)) - { - fgRemoveRefPred(callBlock->GetJumpDest(), callBlock); - } - callBlock->SetJumpDest(HBtab->ebdHndBeg); // This callBlock will call the "finally" handler. - fgAddRefPred(HBtab->ebdHndBeg, callBlock); + assert(callBlock->KindIs(BBJ_CALLFINALLY)); + assert(callBlock->HasJumpTo(HBtab->ebdHndBeg)); + fgAddRefPred(callBlock->GetJumpDest(), callBlock); } else if (HBtab->HasCatchHandler() && jitIsBetween(blkAddr, tryBeg, tryEnd) && !jitIsBetween(jmpAddr, tryBeg, tryEnd)) @@ -4843,9 +4860,8 @@ void Compiler::impImportLeave(BasicBlock* block) if ((stepType == ST_FinallyReturn) || (stepType == ST_Catch)) { - BasicBlock* catchStep; - assert(step); + assert((step == block) || step->HasJumpTo(&BasicBlock::bbTempJumpDest)); if (stepType == ST_FinallyReturn) { @@ -4858,9 +4874,11 @@ void Compiler::impImportLeave(BasicBlock* block) } /* Create a new exit block in the try region for the existing step block to jump to in this scope */ - catchStep = fgNewBBinRegion(BBJ_ALWAYS, XTnum + 1, 0, step); + // Note: we don't know the jump target yet + BasicBlock* catchStep = + fgNewBBinRegion(BBJ_ALWAYS, XTnum + 1, 0, step DEBUG_ARG(&BasicBlock::bbTempJumpDest)); - if (!step->HasJumpTo(nullptr)) + if (step == block) { fgRemoveRefPred(step->GetJumpDest(), step); } @@ -4907,7 +4925,7 @@ void Compiler::impImportLeave(BasicBlock* block) if (step == nullptr) { - block->SetJumpKind(BBJ_ALWAYS DEBUG_ARG(this)); // convert the BBJ_LEAVE to a BBJ_ALWAYS + block->SetJumpKind(BBJ_ALWAYS); // convert the BBJ_LEAVE to a BBJ_ALWAYS #ifdef DEBUG if (verbose) @@ -4920,7 +4938,9 @@ void Compiler::impImportLeave(BasicBlock* block) } else { - if (!step->HasJumpTo(nullptr)) + assert((step == block) || step->HasJumpTo(&BasicBlock::bbTempJumpDest)); + + if (step == block) { fgRemoveRefPred(step->GetJumpDest(), step); } @@ -4993,9 +5013,8 @@ void Compiler::impResetLeaveBlock(BasicBlock* block, unsigned jmpAddr) // will be treated as pair and handled correctly. if (block->KindIs(BBJ_CALLFINALLY)) { - BasicBlock* dupBlock = bbNewBasicBlock(block->GetJumpKind()); + BasicBlock* dupBlock = bbNewBasicBlock(block->GetJumpKind(), block->GetJumpDest()); dupBlock->bbFlags = block->bbFlags; - dupBlock->SetJumpDest(block->GetJumpDest()); fgAddRefPred(dupBlock->GetJumpDest(), dupBlock); dupBlock->copyEHRegion(block); dupBlock->bbCatchTyp = block->bbCatchTyp; @@ -5026,7 +5045,7 @@ void Compiler::impResetLeaveBlock(BasicBlock* block, unsigned jmpAddr) fgInitBBLookup(); fgRemoveRefPred(block->GetJumpDest(), block); - block->SetJumpKindAndTarget(BBJ_LEAVE, fgLookupBB(jmpAddr)); + block->SetJumpKindAndTarget(BBJ_LEAVE, fgLookupBB(jmpAddr) DEBUG_ARG(this)); fgAddRefPred(block->GetJumpDest(), block); // We will leave the BBJ_ALWAYS block we introduced. When it's reimported @@ -6000,7 +6019,7 @@ void Compiler::impImportBlockCode(BasicBlock* block) // Change block to BBJ_THROW so we won't trigger importation of successors. // - block->SetJumpKind(BBJ_THROW DEBUG_ARG(this)); + block->SetJumpKindAndTarget(BBJ_THROW DEBUG_ARG(this)); // If this method has a explicit generic context, the only uses of it may be in // the IL for this block. So assume it's used. @@ -7299,7 +7318,7 @@ void Compiler::impImportBlockCode(BasicBlock* block) BADCODE("invalid type for brtrue/brfalse"); } - if (opts.OptimizationEnabled() && block->JumpsToNext()) + if (opts.OptimizationEnabled() && (block->KindIs(BBJ_NONE) || block->JumpsToNext())) { // We may have already modified `block`'s jump kind, if this is a re-importation. // @@ -7308,7 +7327,7 @@ void Compiler::impImportBlockCode(BasicBlock* block) JITDUMP(FMT_BB " both branches and falls through to " FMT_BB ", changing to BBJ_NONE\n", block->bbNum, block->Next()->bbNum); fgRemoveRefPred(block->GetJumpDest(), block); - block->SetJumpKind(BBJ_NONE DEBUG_ARG(this)); + block->SetJumpKindAndTarget(BBJ_NONE DEBUG_ARG(this)); } else { @@ -7374,14 +7393,16 @@ void Compiler::impImportBlockCode(BasicBlock* block) { JITDUMP("\nThe block falls through into the next " FMT_BB "\n", block->Next()->bbNum); fgRemoveRefPred(block->GetJumpDest(), block); + block->SetJumpKindAndTarget(BBJ_NONE DEBUG_ARG(this)); } else { JITDUMP("\nThe conditional jump becomes an unconditional jump to " FMT_BB "\n", block->GetJumpDest()->bbNum); fgRemoveRefPred(block->Next(), block); + assert(foldedJumpKind == BBJ_ALWAYS); + block->SetJumpKind(BBJ_ALWAYS); } - block->SetJumpKind(foldedJumpKind DEBUG_ARG(this)); } break; @@ -7545,7 +7566,12 @@ void Compiler::impImportBlockCode(BasicBlock* block) assertImp((genActualType(op1) == genActualType(op2)) || (varTypeIsI(op1) && varTypeIsI(op2)) || (varTypeIsFloating(op1) && varTypeIsFloating(op2))); - if (opts.OptimizationEnabled() && block->JumpsToNext()) + if (block->KindIs(BBJ_NONE)) + { + assert(!block->HasJump()); + } + + if (opts.OptimizationEnabled() && (block->KindIs(BBJ_NONE) || block->JumpsToNext())) { // We may have already modified `block`'s jump kind, if this is a re-importation. // @@ -7554,7 +7580,7 @@ void Compiler::impImportBlockCode(BasicBlock* block) JITDUMP(FMT_BB " both branches and falls through to " FMT_BB ", changing to BBJ_NONE\n", block->bbNum, block->Next()->bbNum); fgRemoveRefPred(block->GetJumpDest(), block); - block->SetJumpKind(BBJ_NONE DEBUG_ARG(this)); + block->SetJumpKindAndTarget(BBJ_NONE DEBUG_ARG(this)); } else { @@ -7631,15 +7657,15 @@ void Compiler::impImportBlockCode(BasicBlock* block) if ((val == switchVal) || (!foundVal && (val == jumpCnt - 1))) { - if (!block->NextIs(curJump)) + if (block->NextIs(curJump)) { - // transform the basic block into a BBJ_ALWAYS - block->SetJumpKindAndTarget(BBJ_ALWAYS, curJump); + // transform the basic block into a BBJ_NONE + block->SetJumpKindAndTarget(BBJ_NONE DEBUG_ARG(this)); } else { - // transform the basic block into a BBJ_NONE - block->SetJumpKind(BBJ_NONE DEBUG_ARG(this)); + // transform the basic block into a BBJ_ALWAYS + block->SetJumpKindAndTarget(BBJ_ALWAYS, curJump DEBUG_ARG(this)); } foundVal = true; } diff --git a/src/coreclr/jit/indirectcalltransformer.cpp b/src/coreclr/jit/indirectcalltransformer.cpp index bc88174a40ff0..14c2063c7022c 100644 --- a/src/coreclr/jit/indirectcalltransformer.cpp +++ b/src/coreclr/jit/indirectcalltransformer.cpp @@ -218,12 +218,15 @@ class IndirectCallTransformer // Arguments: // jumpKind - jump kind for the new basic block // insertAfter - basic block, after which compiler has to insert the new one. + // jumpDest - jump target for the new basic block. Defaults to nullptr. // // Return Value: // new basic block. - BasicBlock* CreateAndInsertBasicBlock(BBjumpKinds jumpKind, BasicBlock* insertAfter) + BasicBlock* CreateAndInsertBasicBlock(BBjumpKinds jumpKind, + BasicBlock* insertAfter, + BasicBlock* jumpDest = nullptr) { - BasicBlock* block = compiler->fgNewBBafter(jumpKind, insertAfter, true); + BasicBlock* block = compiler->fgNewBBafter(jumpKind, insertAfter, true, jumpDest); block->bbFlags |= BBF_IMPORTED; return block; } @@ -274,12 +277,13 @@ class IndirectCallTransformer } // checkBlock - checkBlock->SetJumpDest(elseBlock); + assert(checkBlock->KindIs(BBJ_NONE)); + checkBlock->SetJumpKindAndTarget(BBJ_COND, elseBlock DEBUG_ARG(compiler)); compiler->fgAddRefPred(elseBlock, checkBlock); compiler->fgAddRefPred(thenBlock, checkBlock); // thenBlock - thenBlock->SetJumpDest(remainderBlock); + assert(thenBlock->HasJumpTo(remainderBlock)); compiler->fgAddRefPred(remainderBlock, thenBlock); // elseBlock @@ -361,7 +365,7 @@ class IndirectCallTransformer { assert(checkIdx == 0); - checkBlock = CreateAndInsertBasicBlock(BBJ_COND, currBlock); + checkBlock = CreateAndInsertBasicBlock(BBJ_NONE, currBlock); GenTree* fatPointerMask = new (compiler, GT_CNS_INT) GenTreeIntCon(TYP_I_IMPL, FAT_POINTER_MASK); GenTree* fptrAddressCopy = compiler->gtCloneExpr(fptrAddress); GenTree* fatPointerAnd = compiler->gtNewOperNode(GT_AND, TYP_I_IMPL, fptrAddressCopy, fatPointerMask); @@ -378,7 +382,8 @@ class IndirectCallTransformer // virtual void CreateThen(uint8_t checkIdx) { - thenBlock = CreateAndInsertBasicBlock(BBJ_ALWAYS, checkBlock); + assert(remainderBlock != nullptr); + thenBlock = CreateAndInsertBasicBlock(BBJ_ALWAYS, checkBlock, remainderBlock); Statement* copyOfOriginalStmt = compiler->gtCloneStmt(stmt); compiler->fgInsertStmtAtEnd(thenBlock, copyOfOriginalStmt); } @@ -572,17 +577,18 @@ class IndirectCallTransformer { // There's no need for a new block here. We can just append to currBlock. // - checkBlock = currBlock; - checkBlock->SetJumpKind(BBJ_COND DEBUG_ARG(compiler)); + checkBlock = currBlock; + checkFallsThrough = false; } else { // In case of multiple checks, append to the previous thenBlock block BasicBlock* prevCheckBlock = checkBlock; - checkBlock = CreateAndInsertBasicBlock(BBJ_COND, thenBlock); + checkBlock = CreateAndInsertBasicBlock(BBJ_NONE, thenBlock); + checkFallsThrough = false; // prevCheckBlock is expected to jump to this new check (if its type check doesn't succeed) - prevCheckBlock->SetJumpDest(checkBlock); + prevCheckBlock->SetJumpKindAndTarget(BBJ_COND, checkBlock DEBUG_ARG(compiler)); compiler->fgAddRefPred(checkBlock, prevCheckBlock); // Calculate the total likelihood for this check as a sum of likelihoods @@ -651,8 +657,8 @@ class IndirectCallTransformer const bool isLastCheck = (checkIdx == origCall->GetInlineCandidatesCount() - 1); if (isLastCheck && ((origCall->gtCallMoreFlags & GTF_CALL_M_GUARDED_DEVIRT_EXACT) != 0)) { - checkBlock->SetJumpDest(nullptr); - checkBlock->SetJumpKind(BBJ_NONE DEBUG_ARG(compiler)); + checkBlock->SetJumpKindAndTarget(BBJ_NONE DEBUG_ARG(compiler)); + checkFallsThrough = true; return; } @@ -978,13 +984,12 @@ class IndirectCallTransformer // virtual void CreateThen(uint8_t checkIdx) { - thenBlock = CreateAndInsertBasicBlock(BBJ_ALWAYS, checkBlock); + // thenBlock always jumps to remainderBlock + thenBlock = CreateAndInsertBasicBlock(BBJ_ALWAYS, checkBlock, remainderBlock); thenBlock->bbFlags |= currBlock->bbFlags & BBF_SPLIT_GAINED; - thenBlock->SetJumpDest(remainderBlock); thenBlock->inheritWeightPercentage(currBlock, origCall->GetGDVCandidateInfo(checkIdx)->likelihood); - // thenBlock always jumps to remainderBlock. Also, it has a single pred - last checkBlock - thenBlock->SetJumpDest(remainderBlock); + // Also, thenBlock has a single pred - last checkBlock compiler->fgAddRefPred(thenBlock, checkBlock); compiler->fgAddRefPred(remainderBlock, thenBlock); @@ -1001,9 +1006,9 @@ class IndirectCallTransformer // CheckBlock flows into elseBlock unless we deal with the case // where we know the last check is always true (in case of "exact" GDV) - if (checkBlock->KindIs(BBJ_COND)) + if (!checkFallsThrough) { - checkBlock->SetJumpDest(elseBlock); + checkBlock->SetJumpKindAndTarget(BBJ_COND, elseBlock DEBUG_ARG(compiler)); compiler->fgAddRefPred(elseBlock, checkBlock); } else @@ -1126,7 +1131,7 @@ class IndirectCallTransformer // not fall through to the check block. // compiler->fgRemoveRefPred(checkBlock, coldBlock); - coldBlock->SetJumpKindAndTarget(BBJ_ALWAYS, elseBlock); + coldBlock->SetJumpKindAndTarget(BBJ_ALWAYS, elseBlock DEBUG_ARG(compiler)); compiler->fgAddRefPred(elseBlock, coldBlock); } @@ -1268,6 +1273,7 @@ class IndirectCallTransformer private: unsigned returnTemp; Statement* lastStmt; + bool checkFallsThrough; //------------------------------------------------------------------------ // CreateTreeForLookup: Create a tree representing a lookup of a method address. diff --git a/src/coreclr/jit/jiteh.cpp b/src/coreclr/jit/jiteh.cpp index 2f0a3cb4a3df2..c245cdb6ee026 100644 --- a/src/coreclr/jit/jiteh.cpp +++ b/src/coreclr/jit/jiteh.cpp @@ -2374,8 +2374,8 @@ bool Compiler::fgCreateFiltersForGenericExceptions() } // Create a new bb for the fake filter - BasicBlock* filterBb = bbNewBasicBlock(BBJ_EHFILTERRET); BasicBlock* handlerBb = eh->ebdHndBeg; + BasicBlock* filterBb = bbNewBasicBlock(BBJ_EHFILTERRET, handlerBb); // Now we need to spill CATCH_ARG (it should be the first thing evaluated) GenTree* arg = new (this, GT_CATCH_ARG) GenTree(GT_CATCH_ARG, TYP_REF); @@ -2410,7 +2410,6 @@ bool Compiler::fgCreateFiltersForGenericExceptions() filterBb->bbCodeOffs = handlerBb->bbCodeOffs; filterBb->bbHndIndex = handlerBb->bbHndIndex; filterBb->bbTryIndex = handlerBb->bbTryIndex; - filterBb->SetJumpDest(handlerBb); filterBb->bbSetRunRarely(); filterBb->bbFlags |= BBF_INTERNAL | BBF_DONT_REMOVE; diff --git a/src/coreclr/jit/loopcloning.cpp b/src/coreclr/jit/loopcloning.cpp index 727c4f3233a2b..2cdf1906295c0 100644 --- a/src/coreclr/jit/loopcloning.cpp +++ b/src/coreclr/jit/loopcloning.cpp @@ -835,10 +835,9 @@ BasicBlock* LoopCloneContext::CondToStmtInBlock(Compiler* for (unsigned i = 0; i < conds.Size(); ++i) { - newBlk = comp->fgNewBBafter(BBJ_COND, insertAfter, /*extendRegion*/ true); + newBlk = comp->fgNewBBafter(BBJ_COND, insertAfter, /*extendRegion*/ true, slowHead); newBlk->inheritWeight(insertAfter); newBlk->bbNatLoopNum = insertAfter->bbNatLoopNum; - newBlk->SetJumpDest(slowHead); JITDUMP("Adding " FMT_BB " -> " FMT_BB "\n", newBlk->bbNum, newBlk->GetJumpDest()->bbNum); comp->fgAddRefPred(newBlk->GetJumpDest(), newBlk); @@ -867,10 +866,9 @@ BasicBlock* LoopCloneContext::CondToStmtInBlock(Compiler* } else { - BasicBlock* newBlk = comp->fgNewBBafter(BBJ_COND, insertAfter, /*extendRegion*/ true); + BasicBlock* newBlk = comp->fgNewBBafter(BBJ_COND, insertAfter, /*extendRegion*/ true, slowHead); newBlk->inheritWeight(insertAfter); newBlk->bbNatLoopNum = insertAfter->bbNatLoopNum; - newBlk->SetJumpDest(slowHead); JITDUMP("Adding " FMT_BB " -> " FMT_BB "\n", newBlk->bbNum, newBlk->GetJumpDest()->bbNum); comp->fgAddRefPred(newBlk->GetJumpDest(), newBlk); @@ -2048,7 +2046,7 @@ void Compiler::optCloneLoop(unsigned loopInd, LoopCloneContext* context) { assert(h->KindIs(BBJ_ALWAYS)); assert(h->HasJumpTo(loop.lpEntry)); - h2->SetJumpKindAndTarget(BBJ_ALWAYS, loop.lpEntry); + h2->SetJumpKindAndTarget(BBJ_ALWAYS, loop.lpEntry DEBUG_ARG(this)); } fgReplacePred(loop.lpEntry, h, h2); @@ -2062,8 +2060,7 @@ void Compiler::optCloneLoop(unsigned loopInd, LoopCloneContext* context) // Make 'h' fall through to 'h2' (if it didn't already). // Don't add the h->h2 edge because we're going to insert the cloning conditions between 'h' and 'h2', and // optInsertLoopChoiceConditions() will add the edge. - h->SetJumpDest(nullptr); - h->SetJumpKind(BBJ_NONE DEBUG_ARG(this)); + h->SetJumpKindAndTarget(BBJ_NONE DEBUG_ARG(this)); // Make X2 after B, if necessary. (Not necessary if B is a BBJ_ALWAYS.) // "newPred" will be the predecessor of the blocks of the cloned loop. @@ -2077,14 +2074,13 @@ void Compiler::optCloneLoop(unsigned loopInd, LoopCloneContext* context) if (x != nullptr) { JITDUMP("Create branch around cloned loop\n"); - BasicBlock* x2 = fgNewBBafter(BBJ_ALWAYS, b, /*extendRegion*/ true); + BasicBlock* x2 = fgNewBBafter(BBJ_ALWAYS, b, /*extendRegion*/ true, x); JITDUMP("Adding " FMT_BB " after " FMT_BB "\n", x2->bbNum, b->bbNum); x2->bbWeight = x2->isRunRarely() ? BB_ZERO_WEIGHT : ambientWeight; // This is in the scope of a surrounding loop, if one exists -- the parent of the loop we're cloning. x2->bbNatLoopNum = ambientLoop; - x2->SetJumpDest(x); BlockSetOps::Assign(this, x2->bbReach, h->bbReach); fgAddRefPred(x2, b); // Add b->x2 pred edge @@ -2116,7 +2112,8 @@ void Compiler::optCloneLoop(unsigned loopInd, LoopCloneContext* context) BlockToBlockMap* blockMap = new (getAllocator(CMK_LoopClone)) BlockToBlockMap(getAllocator(CMK_LoopClone)); for (BasicBlock* const blk : loop.LoopBlocks()) { - BasicBlock* newBlk = fgNewBBafter(blk->GetJumpKind(), newPred, /*extendRegion*/ true); + // Initialize newBlk as BBJ_NONE, and fix up jump kind/target later with optCopyBlkDest() + BasicBlock* newBlk = fgNewBBafter(BBJ_NONE, newPred, /*extendRegion*/ true); JITDUMP("Adding " FMT_BB " (copy of " FMT_BB ") after " FMT_BB "\n", newBlk->bbNum, blk->bbNum, newPred->bbNum); // Call CloneBlockState to make a copy of the block's statements (and attributes), and assert that it @@ -2175,7 +2172,8 @@ void Compiler::optCloneLoop(unsigned loopInd, LoopCloneContext* context) bool b = blockMap->Lookup(blk, &newblk); assert(b && newblk != nullptr); - assert(blk->KindIs(newblk->GetJumpKind())); + // Jump kind/target should not be set yet + assert(newblk->KindIs(BBJ_NONE)); // First copy the jump destination(s) from "blk". optCopyBlkDest(blk, newblk); @@ -2255,7 +2253,7 @@ void Compiler::optCloneLoop(unsigned loopInd, LoopCloneContext* context) { // We can't just fall through to the slow path entry, so make it an unconditional branch. assert(slowHead->KindIs(BBJ_NONE)); // This is how we created it above. - slowHead->SetJumpKindAndTarget(BBJ_ALWAYS, e2); + slowHead->SetJumpKindAndTarget(BBJ_ALWAYS, e2 DEBUG_ARG(this)); } fgAddRefPred(e2, slowHead); diff --git a/src/coreclr/jit/lower.cpp b/src/coreclr/jit/lower.cpp index f43a9f032507c..5c25441db55ca 100644 --- a/src/coreclr/jit/lower.cpp +++ b/src/coreclr/jit/lower.cpp @@ -802,12 +802,11 @@ GenTree* Lowering::LowerSwitch(GenTree* node) noway_assert(comp->opts.OptimizationDisabled()); if (originalSwitchBB->NextIs(jumpTab[0])) { - originalSwitchBB->SetJumpKind(BBJ_NONE DEBUG_ARG(comp)); - originalSwitchBB->SetJumpDest(nullptr); + originalSwitchBB->SetJumpKindAndTarget(BBJ_NONE DEBUG_ARG(comp)); } else { - originalSwitchBB->SetJumpKindAndTarget(BBJ_ALWAYS, jumpTab[0]); + originalSwitchBB->SetJumpKindAndTarget(BBJ_ALWAYS, jumpTab[0] DEBUG_ARG(comp)); } // Remove extra predecessor links if there was more than one case. for (unsigned i = 1; i < jumpCnt; ++i) @@ -900,7 +899,7 @@ GenTree* Lowering::LowerSwitch(GenTree* node) // The GT_SWITCH code is still in originalSwitchBB (it will be removed later). // Turn originalSwitchBB into a BBJ_COND. - originalSwitchBB->SetJumpKindAndTarget(BBJ_COND, jumpTab[jumpCnt - 1]); + originalSwitchBB->SetJumpKindAndTarget(BBJ_COND, jumpTab[jumpCnt - 1] DEBUG_ARG(comp)); // Fix the pred for the default case: the default block target still has originalSwitchBB // as a predecessor, but the fgSplitBlockAfterStatement() moved all predecessors to point @@ -956,12 +955,11 @@ GenTree* Lowering::LowerSwitch(GenTree* node) } if (afterDefaultCondBlock->NextIs(uniqueSucc)) { - afterDefaultCondBlock->SetJumpKind(BBJ_NONE DEBUG_ARG(comp)); - afterDefaultCondBlock->SetJumpDest(nullptr); + afterDefaultCondBlock->SetJumpKindAndTarget(BBJ_NONE DEBUG_ARG(comp)); } else { - afterDefaultCondBlock->SetJumpKindAndTarget(BBJ_ALWAYS, uniqueSucc); + afterDefaultCondBlock->SetJumpKindAndTarget(BBJ_ALWAYS, uniqueSucc DEBUG_ARG(comp)); } } // If the number of possible destinations is small enough, we proceed to expand the switch @@ -1030,13 +1028,13 @@ GenTree* Lowering::LowerSwitch(GenTree* node) // case: there is no need to compare against the case index, since it's // guaranteed to be taken (since the default case was handled first, above). - currentBlock->SetJumpKindAndTarget(BBJ_ALWAYS, jumpTab[i]); + currentBlock->SetJumpKindAndTarget(BBJ_ALWAYS, jumpTab[i] DEBUG_ARG(comp)); } else { // Otherwise, it's a conditional branch. Set the branch kind, then add the // condition statement. - currentBlock->SetJumpKindAndTarget(BBJ_COND, jumpTab[i]); + currentBlock->SetJumpKindAndTarget(BBJ_COND, jumpTab[i] DEBUG_ARG(comp)); // Now, build the conditional statement for the current case that is // being evaluated: @@ -1069,7 +1067,7 @@ GenTree* Lowering::LowerSwitch(GenTree* node) JITDUMP("Lowering switch " FMT_BB ": all switch cases were fall-through\n", originalSwitchBB->bbNum); assert(currentBlock == afterDefaultCondBlock); assert(currentBlock->KindIs(BBJ_SWITCH)); - currentBlock->SetJumpKind(BBJ_NONE DEBUG_ARG(comp)); + currentBlock->SetJumpKindAndTarget(BBJ_NONE DEBUG_ARG(comp)); currentBlock->bbFlags &= ~BBF_DONT_REMOVE; comp->fgRemoveBlock(currentBlock, /* unreachable */ false); // It's an empty block. } @@ -1248,7 +1246,7 @@ bool Lowering::TryLowerSwitchToBitTest( { // GenCondition::C generates JC so we jump to bbCase1 when the bit is set bbSwitchCondition = GenCondition::C; - bbSwitch->SetJumpKindAndTarget(BBJ_COND, bbCase1); + bbSwitch->SetJumpKindAndTarget(BBJ_COND, bbCase1 DEBUG_ARG(comp)); comp->fgAddRefPred(bbCase0, bbSwitch); comp->fgAddRefPred(bbCase1, bbSwitch); @@ -1259,7 +1257,7 @@ bool Lowering::TryLowerSwitchToBitTest( // GenCondition::NC generates JNC so we jump to bbCase0 when the bit is not set bbSwitchCondition = GenCondition::NC; - bbSwitch->SetJumpKindAndTarget(BBJ_COND, bbCase0); + bbSwitch->SetJumpKindAndTarget(BBJ_COND, bbCase0 DEBUG_ARG(comp)); comp->fgAddRefPred(bbCase0, bbSwitch); comp->fgAddRefPred(bbCase1, bbSwitch); diff --git a/src/coreclr/jit/morph.cpp b/src/coreclr/jit/morph.cpp index 8c02648ca9405..4bae7722294e0 100644 --- a/src/coreclr/jit/morph.cpp +++ b/src/coreclr/jit/morph.cpp @@ -6159,7 +6159,8 @@ GenTree* Compiler::fgMorphPotentialTailCall(GenTreeCall* call) // Many tailcalls will have call and ret in the same block, and thus be // BBJ_RETURN, but if the call falls through to a ret, and we are doing a // tailcall, change it here. - compCurBB->SetJumpKind(BBJ_RETURN DEBUG_ARG(this)); + // (compCurBB may have a jump target, so use SetJumpKind() to avoid nulling it) + compCurBB->SetJumpKind(BBJ_RETURN); } GenTree* stmtExpr = fgMorphStmt->GetRootNode(); @@ -6307,7 +6308,7 @@ GenTree* Compiler::fgMorphPotentialTailCall(GenTreeCall* call) { // We call CORINFO_HELP_TAILCALL which does not return, so we will // not need epilogue. - compCurBB->SetJumpKind(BBJ_THROW DEBUG_ARG(this)); + compCurBB->SetJumpKindAndTarget(BBJ_THROW DEBUG_ARG(this)); } if (isRootReplaced) @@ -7445,7 +7446,7 @@ void Compiler::fgMorphRecursiveFastTailCallIntoLoop(BasicBlock* block, GenTreeCa { // Todo: this may not look like a viable loop header. // Might need the moral equivalent of a scratch BB. - block->SetJumpKindAndTarget(BBJ_ALWAYS, fgEntryBB); + block->SetJumpKindAndTarget(BBJ_ALWAYS, fgEntryBB DEBUG_ARG(this)); } else { @@ -7455,7 +7456,7 @@ void Compiler::fgMorphRecursiveFastTailCallIntoLoop(BasicBlock* block, GenTreeCa // block removal on it. fgEnsureFirstBBisScratch(); fgFirstBB->bbFlags |= BBF_DONT_REMOVE; - block->SetJumpKindAndTarget(BBJ_ALWAYS, fgFirstBB->Next()); + block->SetJumpKindAndTarget(BBJ_ALWAYS, fgFirstBB->Next() DEBUG_ARG(this)); } // Finish hooking things up. @@ -13149,7 +13150,7 @@ Compiler::FoldResult Compiler::fgFoldConditional(BasicBlock* block) if (cond->AsIntCon()->gtIconVal != 0) { /* JTRUE 1 - transform the basic block into a BBJ_ALWAYS */ - block->SetJumpKind(BBJ_ALWAYS DEBUG_ARG(this)); + block->SetJumpKind(BBJ_ALWAYS); bTaken = block->GetJumpDest(); bNotTaken = block->Next(); } @@ -13165,9 +13166,9 @@ Compiler::FoldResult Compiler::fgFoldConditional(BasicBlock* block) } /* JTRUE 0 - transform the basic block into a BBJ_NONE */ - block->SetJumpKind(BBJ_NONE DEBUG_ARG(this)); bTaken = block->Next(); bNotTaken = block->GetJumpDest(); + block->SetJumpKindAndTarget(BBJ_NONE DEBUG_ARG(this)); } if (fgHaveValidEdgeWeights) @@ -13394,12 +13395,12 @@ Compiler::FoldResult Compiler::fgFoldConditional(BasicBlock* block) if (!block->NextIs(curJump)) { // transform the basic block into a BBJ_ALWAYS - block->SetJumpKindAndTarget(BBJ_ALWAYS, curJump); + block->SetJumpKindAndTarget(BBJ_ALWAYS, curJump DEBUG_ARG(this)); } else { // transform the basic block into a BBJ_NONE - block->SetJumpKind(BBJ_NONE DEBUG_ARG(this)); + block->SetJumpKindAndTarget(BBJ_NONE DEBUG_ARG(this)); } foundVal = true; } @@ -13967,7 +13968,7 @@ void Compiler::fgMergeBlockReturn(BasicBlock* block) else #endif // !TARGET_X86 { - block->SetJumpKindAndTarget(BBJ_ALWAYS, genReturnBB); + block->SetJumpKindAndTarget(BBJ_ALWAYS, genReturnBB DEBUG_ARG(this)); fgAddRefPred(genReturnBB, block); fgReturnCount--; } @@ -14429,8 +14430,8 @@ void Compiler::fgExpandQmarkForCastInstOf(BasicBlock* block, Statement* stmt) fgRemoveRefPred(remainderBlock, block); // We're going to put more blocks between block and remainderBlock. BasicBlock* helperBlock = fgNewBBafter(BBJ_NONE, block, true); - BasicBlock* cond2Block = fgNewBBafter(BBJ_COND, block, true); - BasicBlock* cond1Block = fgNewBBafter(BBJ_COND, block, true); + BasicBlock* cond2Block = fgNewBBafter(BBJ_COND, block, true, remainderBlock); + BasicBlock* cond1Block = fgNewBBafter(BBJ_COND, block, true, remainderBlock); BasicBlock* asgBlock = fgNewBBafter(BBJ_NONE, block, true); remainderBlock->bbFlags |= propagateFlags; @@ -14458,9 +14459,6 @@ void Compiler::fgExpandQmarkForCastInstOf(BasicBlock* block, Statement* stmt) fgAddRefPred(remainderBlock, cond1Block); fgAddRefPred(remainderBlock, cond2Block); - cond1Block->SetJumpDest(remainderBlock); - cond2Block->SetJumpDest(remainderBlock); - // Set the weights; some are guesses. asgBlock->inheritWeight(block); cond1Block->inheritWeight(block); @@ -14613,7 +14611,7 @@ void Compiler::fgExpandQmarkStmt(BasicBlock* block, Statement* stmt) BasicBlock* remainderBlock = fgSplitBlockAfterStatement(block, stmt); fgRemoveRefPred(remainderBlock, block); // We're going to put more blocks between block and remainderBlock. - BasicBlock* condBlock = fgNewBBafter(BBJ_COND, block, true); + BasicBlock* condBlock = fgNewBBafter(BBJ_NONE, block, true); BasicBlock* elseBlock = fgNewBBafter(BBJ_NONE, condBlock, true); // These blocks are only internal if 'block' is (but they've been set as internal by fgNewBBafter). @@ -14649,10 +14647,9 @@ void Compiler::fgExpandQmarkStmt(BasicBlock* block, Statement* stmt) // bbj_cond(true) // gtReverseCond(condExpr); - condBlock->SetJumpDest(elseBlock); + condBlock->SetJumpKindAndTarget(BBJ_COND, elseBlock DEBUG_ARG(this)); - thenBlock = fgNewBBafter(BBJ_ALWAYS, condBlock, true); - thenBlock->SetJumpDest(remainderBlock); + thenBlock = fgNewBBafter(BBJ_ALWAYS, condBlock, true, remainderBlock); thenBlock->bbFlags |= propagateFlagsToAll; if ((block->bbFlags & BBF_INTERNAL) == 0) { @@ -14675,7 +14672,7 @@ void Compiler::fgExpandQmarkStmt(BasicBlock* block, Statement* stmt) // bbj_cond(true) // gtReverseCond(condExpr); - condBlock->SetJumpDest(remainderBlock); + condBlock->SetJumpKindAndTarget(BBJ_COND, remainderBlock DEBUG_ARG(this)); fgAddRefPred(remainderBlock, condBlock); // Since we have no false expr, use the one we'd already created. thenBlock = elseBlock; @@ -14691,12 +14688,14 @@ void Compiler::fgExpandQmarkStmt(BasicBlock* block, Statement* stmt) // +-->------------+ // bbj_cond(true) // - condBlock->SetJumpDest(remainderBlock); + condBlock->SetJumpKindAndTarget(BBJ_COND, remainderBlock DEBUG_ARG(this)); fgAddRefPred(remainderBlock, condBlock); elseBlock->inheritWeightPercentage(condBlock, 50); } + assert(condBlock->KindIs(BBJ_COND)); + GenTree* jmpTree = gtNewOperNode(GT_JTRUE, TYP_VOID, qmark->gtGetOp1()); Statement* jmpStmt = fgNewStmtFromTree(jmpTree, stmt->GetDebugInfo()); fgInsertStmtAtEnd(condBlock, jmpStmt); diff --git a/src/coreclr/jit/optimizebools.cpp b/src/coreclr/jit/optimizebools.cpp index 7f44a53407b7b..3c55cef198ab5 100644 --- a/src/coreclr/jit/optimizebools.cpp +++ b/src/coreclr/jit/optimizebools.cpp @@ -737,7 +737,7 @@ bool OptBoolsDsc::optOptimizeRangeTests() } //----------------------------------------------------------------------------- -// optOptimizeCompareChainCondBlock: Create a chain when when both m_b1 and m_b2 are BBJ_COND. +// optOptimizeCompareChainCondBlock: Create a chain when both m_b1 and m_b2 are BBJ_COND. // // Returns: // true if chain optimization is done and m_b1 and m_b2 are folded into m_b1, else false. @@ -919,7 +919,7 @@ bool OptBoolsDsc::optOptimizeCompareChainCondBlock() // Update the flow. m_comp->fgRemoveRefPred(m_b1->GetJumpDest(), m_b1); - m_b1->SetJumpKind(BBJ_NONE DEBUG_ARG(m_comp)); + m_b1->SetJumpKindAndTarget(BBJ_NONE DEBUG_ARG(m_comp)); // Fixup flags. m_b2->bbFlags |= (m_b1->bbFlags & BBF_COPY_PROPAGATE); @@ -1208,11 +1208,7 @@ void OptBoolsDsc::optOptimizeBoolsUpdateTrees() if (optReturnBlock) { - m_b1->SetJumpDest(nullptr); - m_b1->SetJumpKind(BBJ_RETURN DEBUG_ARG(m_comp)); -#ifdef DEBUG - m_b1->SetJumpSwt(m_b2->GetJumpSwt()); -#endif + m_b1->SetJumpKindAndTarget(BBJ_RETURN DEBUG_ARG(m_comp)); assert(m_b2->KindIs(BBJ_RETURN)); assert(m_b1->NextIs(m_b2)); assert(m_b3 != nullptr); diff --git a/src/coreclr/jit/optimizer.cpp b/src/coreclr/jit/optimizer.cpp index 2c6238fbc42e6..355e0f198d5a0 100644 --- a/src/coreclr/jit/optimizer.cpp +++ b/src/coreclr/jit/optimizer.cpp @@ -2404,7 +2404,7 @@ class LoopSearch case BBJ_CALLFINALLY: case BBJ_ALWAYS: case BBJ_EHCATCHRET: - assert(!block->HasJumpTo(nullptr)); + assert(block->HasJump()); exitPoint = block->GetJumpDest(); if (!loopBlocks.IsMember(exitPoint->bbNum)) @@ -2844,30 +2844,21 @@ void Compiler::optRedirectBlock(BasicBlock* blk, BlockToBlockMap* redirectMap, R // TODO-Cleanup: This should be a static member of the BasicBlock class. void Compiler::optCopyBlkDest(BasicBlock* from, BasicBlock* to) { - assert(from->KindIs(to->GetJumpKind())); // Precondition. - // copy the jump destination(s) from "from" to "to". - switch (to->GetJumpKind()) + switch (from->GetJumpKind()) { - case BBJ_ALWAYS: - case BBJ_LEAVE: - case BBJ_CALLFINALLY: - case BBJ_COND: - // All of these have a single jump destination to update. - to->SetJumpDest(from->GetJumpDest()); + case BBJ_SWITCH: + to->SetSwitchKindAndTarget(new (this, CMK_BasicBlock) BBswtDesc(this, from->GetJumpSwt())); break; - case BBJ_EHFINALLYRET: - to->SetJumpEhf(new (this, CMK_BasicBlock) BBehfDesc(this, from->GetJumpEhf())); - break; - - case BBJ_SWITCH: - to->SetJumpSwt(new (this, CMK_BasicBlock) BBswtDesc(this, from->GetJumpSwt())); + to->SetJumpKindAndTarget(BBJ_EHFINALLYRET, new (this, CMK_BasicBlock) BBehfDesc(this, from->GetJumpEhf())); break; - default: + to->SetJumpKindAndTarget(from->GetJumpKind(), from->GetJumpDest() DEBUG_ARG(this)); break; } + + assert(to->KindIs(from->GetJumpKind())); } // Returns true if 'block' is an entry block for any loop in 'optLoopTable' @@ -4390,8 +4381,9 @@ PhaseStatus Compiler::optUnrollLoops() // every iteration. for (BasicBlock* block = loop.lpTop; !loop.lpBottom->NextIs(block); block = block->Next()) { - BasicBlock* newBlock = insertAfter = - fgNewBBafter(block->GetJumpKind(), insertAfter, /*extendRegion*/ true); + // Use BBJ_NONE to avoid setting a jump target for now. + // Compiler::optCopyBlkDest() will fix the jump kind/target in the loop below. + BasicBlock* newBlock = insertAfter = fgNewBBafter(BBJ_NONE, insertAfter, /*extendRegion*/ true); blockMap.Set(block, newBlock, BlockToBlockMap::Overwrite); if (!BasicBlock::CloneBlockState(this, newBlock, block, lvar, lval)) @@ -4425,7 +4417,7 @@ PhaseStatus Compiler::optUnrollLoops() newBlock->scaleBBWeight(1.0 / BB_LOOP_WEIGHT_SCALE); // Jump dests are set in a post-pass; make sure CloneBlockState hasn't tried to set them. - assert(newBlock->HasJumpTo(nullptr)); + assert(!newBlock->HasJump()); if (block == bottom) { @@ -4444,7 +4436,6 @@ PhaseStatus Compiler::optUnrollLoops() { testCopyStmt->SetRootNode(sideEffList); } - newBlock->SetJumpKind(BBJ_NONE DEBUG_ARG(this)); } } @@ -4453,7 +4444,11 @@ PhaseStatus Compiler::optUnrollLoops() // newBlock->bbJumpKind, above. for (BasicBlock* block = loop.lpTop; block != loop.lpBottom; block = block->Next()) { + // Jump kind/target should not be set yet BasicBlock* newBlock = blockMap[block]; + assert(newBlock->KindIs(BBJ_NONE)); + + // Now copy the jump kind/target optCopyBlkDest(block, newBlock); optRedirectBlock(newBlock, &blockMap, RedirectBlockOption::AddToPredLists); } @@ -4515,8 +4510,7 @@ PhaseStatus Compiler::optUnrollLoops() fgRemoveAllRefPreds(succ, block); } - block->SetJumpKind(BBJ_NONE DEBUG_ARG(this)); - block->SetJumpDest(nullptr); + block->SetJumpKindAndTarget(BBJ_NONE DEBUG_ARG(this)); block->bbStmtList = nullptr; block->bbNatLoopNum = newLoopNum; @@ -4560,7 +4554,7 @@ PhaseStatus Compiler::optUnrollLoops() noway_assert(initBlockBranchStmt->GetRootNode()->OperIs(GT_JTRUE)); fgRemoveStmt(initBlock, initBlockBranchStmt); fgRemoveRefPred(initBlock->GetJumpDest(), initBlock); - initBlock->SetJumpKind(BBJ_NONE DEBUG_ARG(this)); + initBlock->SetJumpKindAndTarget(BBJ_NONE DEBUG_ARG(this)); } else { @@ -5106,9 +5100,8 @@ bool Compiler::optInvertWhileLoop(BasicBlock* block) bool foundCondTree = false; // Create a new block after `block` to put the copied condition code. - block->SetJumpKind(BBJ_NONE DEBUG_ARG(this)); - block->SetJumpDest(nullptr); - BasicBlock* bNewCond = fgNewBBafter(BBJ_COND, block, /*extendRegion*/ true); + block->SetJumpKindAndTarget(BBJ_NONE DEBUG_ARG(this)); + BasicBlock* bNewCond = fgNewBBafter(BBJ_COND, block, /*extendRegion*/ true, bJoin); // Clone each statement in bTest and append to bNewCond. for (Statement* const stmt : bTest->Statements()) @@ -5153,7 +5146,6 @@ bool Compiler::optInvertWhileLoop(BasicBlock* block) // Fix flow and profile // - bNewCond->SetJumpDest(bJoin); bNewCond->inheritWeight(block); if (allProfileWeightsAreValid) @@ -8176,15 +8168,19 @@ bool Compiler::fgCreateLoopPreHeader(unsigned lnum) // Allocate a new basic block for the pre-header. - const bool isTopEntryLoop = loop.lpIsTopEntry(); - - BasicBlock* preHead = bbNewBasicBlock(isTopEntryLoop ? BBJ_NONE : BBJ_ALWAYS); - preHead->bbFlags |= BBF_INTERNAL | BBF_LOOP_PREHEADER; + const bool isTopEntryLoop = loop.lpIsTopEntry(); + BasicBlock* preHead; - if (!isTopEntryLoop) + if (isTopEntryLoop) { - preHead->SetJumpDest(entry); + preHead = bbNewBasicBlock(BBJ_NONE); } + else + { + preHead = bbNewBasicBlock(BBJ_ALWAYS, entry); + } + + preHead->bbFlags |= BBF_INTERNAL | BBF_LOOP_PREHEADER; // Must set IL code offset preHead->bbCodeOffs = top->bbCodeOffs; diff --git a/src/coreclr/jit/patchpoint.cpp b/src/coreclr/jit/patchpoint.cpp index 3c7cb1f35fc85..d5ae08ddd1cc4 100644 --- a/src/coreclr/jit/patchpoint.cpp +++ b/src/coreclr/jit/patchpoint.cpp @@ -101,12 +101,13 @@ class PatchpointTransformer // Arguments: // jumpKind - jump kind for the new basic block // insertAfter - basic block, after which compiler has to insert the new one. + // jumpDest - jump target for the new basic block. Defaults to nullptr. // // Return Value: // new basic block. - BasicBlock* CreateAndInsertBasicBlock(BBjumpKinds jumpKind, BasicBlock* insertAfter) + BasicBlock* CreateAndInsertBasicBlock(BBjumpKinds jumpKind, BasicBlock* insertAfter, BasicBlock* jumpDest = nullptr) { - BasicBlock* block = compiler->fgNewBBafter(jumpKind, insertAfter, true); + BasicBlock* block = compiler->fgNewBBafter(jumpKind, insertAfter, true, jumpDest); block->bbFlags |= BBF_IMPORTED; return block; } @@ -145,7 +146,7 @@ class PatchpointTransformer BasicBlock* helperBlock = CreateAndInsertBasicBlock(BBJ_NONE, block); // Update flow and flags - block->SetJumpKindAndTarget(BBJ_COND, remainderBlock); + block->SetJumpKindAndTarget(BBJ_COND, remainderBlock DEBUG_ARG(compiler)); block->bbFlags |= BBF_INTERNAL; helperBlock->bbFlags |= BBF_BACKWARD_JUMP; @@ -232,8 +233,7 @@ class PatchpointTransformer } // Update flow - block->SetJumpDest(nullptr); - block->SetJumpKind(BBJ_THROW DEBUG_ARG(compiler)); + block->SetJumpKindAndTarget(BBJ_THROW DEBUG_ARG(compiler)); // Add helper call // diff --git a/src/coreclr/jit/redundantbranchopts.cpp b/src/coreclr/jit/redundantbranchopts.cpp index 5ec4d1da1d46e..5c149bd35a141 100644 --- a/src/coreclr/jit/redundantbranchopts.cpp +++ b/src/coreclr/jit/redundantbranchopts.cpp @@ -1460,7 +1460,7 @@ bool Compiler::optJumpThreadCore(JumpThreadInfo& jti) // Possibly defer this until after early out below. // - jti.m_fallThroughPred->SetJumpKindAndTarget(BBJ_ALWAYS, jti.m_block); + jti.m_fallThroughPred->SetJumpKindAndTarget(BBJ_ALWAYS, jti.m_block DEBUG_ARG(this)); modifiedFlow = true; } else @@ -1531,7 +1531,7 @@ bool Compiler::optJumpThreadCore(JumpThreadInfo& jti) fgRemoveStmt(jti.m_block, lastStmt); JITDUMP(" repurposing " FMT_BB " to always jump to " FMT_BB "\n", jti.m_block->bbNum, jti.m_trueTarget->bbNum); fgRemoveRefPred(jti.m_falseTarget, jti.m_block); - jti.m_block->SetJumpKind(BBJ_ALWAYS DEBUG_ARG(this)); + jti.m_block->SetJumpKind(BBJ_ALWAYS); } else if (falsePredsWillReuseBlock) { @@ -1540,7 +1540,7 @@ bool Compiler::optJumpThreadCore(JumpThreadInfo& jti) JITDUMP(" repurposing " FMT_BB " to always fall through to " FMT_BB "\n", jti.m_block->bbNum, jti.m_falseTarget->bbNum); fgRemoveRefPred(jti.m_trueTarget, jti.m_block); - jti.m_block->SetJumpKind(BBJ_NONE DEBUG_ARG(this)); + jti.m_block->SetJumpKindAndTarget(BBJ_NONE DEBUG_ARG(this)); } // Now reroute the flow from the predecessors. diff --git a/src/coreclr/jit/switchrecognition.cpp b/src/coreclr/jit/switchrecognition.cpp index 7ffbb8207ab0b..bc1ca0474e427 100644 --- a/src/coreclr/jit/switchrecognition.cpp +++ b/src/coreclr/jit/switchrecognition.cpp @@ -319,7 +319,7 @@ bool Compiler::optSwitchConvert(BasicBlock* firstBlock, int testsCount, ssize_t* assert(isTest); // Convert firstBlock to a switch block - firstBlock->SetJumpKindAndTarget(BBJ_SWITCH, new (this, CMK_BasicBlock) BBswtDesc); + firstBlock->SetSwitchKindAndTarget(new (this, CMK_BasicBlock) BBswtDesc); firstBlock->bbCodeOffsEnd = lastBlock->bbCodeOffsEnd; firstBlock->lastStmt()->GetRootNode()->ChangeOper(GT_SWITCH);