Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions src/coreclr/jit/block.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -521,6 +521,10 @@ void BasicBlock::dspFlags()
{
printf("mdarr ");
}
if (bbFlags & BBF_NEEDS_GCPOLL)
{
printf("gcpoll ");
}
}

/*****************************************************************************
Expand Down
8 changes: 4 additions & 4 deletions src/coreclr/jit/block.h
Original file line number Diff line number Diff line change
Expand Up @@ -386,7 +386,7 @@ enum BasicBlockFlags : unsigned __int64
BBF_IMPORTED = MAKE_BBFLAG( 4), // BB byte-code has been imported
BBF_INTERNAL = MAKE_BBFLAG( 5), // BB has been added by the compiler
BBF_FAILED_VERIFICATION = MAKE_BBFLAG( 6), // BB has verification exception
// BBF_UNUSED = MAKE_BBFLAG( 7),
BBF_NEEDS_GCPOLL = MAKE_BBFLAG( 7), // BB may need a GC poll because it uses the slow tail call helper
BBF_FUNCLET_BEG = MAKE_BBFLAG( 8), // BB is the beginning of a funclet
BBF_CLONED_FINALLY_BEGIN = MAKE_BBFLAG( 9), // First block of a cloned finally region
BBF_CLONED_FINALLY_END = MAKE_BBFLAG(10), // Last block of a cloned finally region
Expand Down Expand Up @@ -449,7 +449,7 @@ enum BasicBlockFlags : unsigned __int64

// Flags to update when two blocks are compacted

BBF_COMPACT_UPD = BBF_GC_SAFE_POINT | BBF_HAS_JMP | BBF_HAS_IDX_LEN | BBF_HAS_MD_IDX_LEN | BBF_BACKWARD_JUMP | \
BBF_COMPACT_UPD = BBF_GC_SAFE_POINT | BBF_NEEDS_GCPOLL | BBF_HAS_JMP | BBF_HAS_IDX_LEN | BBF_HAS_MD_IDX_LEN | BBF_BACKWARD_JUMP | \
BBF_HAS_NEWOBJ | BBF_HAS_NULLCHECK | BBF_HAS_MDARRAYREF | BBF_LOOP_PREHEADER,

// Flags a block should not have had before it is split.
Expand All @@ -461,15 +461,15 @@ enum BasicBlockFlags : unsigned __int64
// For example, the top block might or might not have BBF_GC_SAFE_POINT,
// but we assume it does not have BBF_GC_SAFE_POINT any more.

BBF_SPLIT_LOST = BBF_GC_SAFE_POINT | BBF_HAS_JMP | BBF_KEEP_BBJ_ALWAYS | BBF_CLONED_FINALLY_END | BBF_RECURSIVE_TAILCALL,
BBF_SPLIT_LOST = BBF_GC_SAFE_POINT | BBF_NEEDS_GCPOLL | BBF_HAS_JMP | BBF_KEEP_BBJ_ALWAYS | BBF_CLONED_FINALLY_END | BBF_RECURSIVE_TAILCALL,

// Flags gained by the bottom block when a block is split.
// Note, this is a conservative guess.
// For example, the bottom block might or might not have BBF_HAS_NULLCHECK, but we assume it has BBF_HAS_NULLCHECK.
// TODO: Should BBF_RUN_RARELY be added to BBF_SPLIT_GAINED ?

BBF_SPLIT_GAINED = BBF_DONT_REMOVE | BBF_HAS_JMP | BBF_BACKWARD_JUMP | BBF_HAS_IDX_LEN | BBF_HAS_MD_IDX_LEN | BBF_PROF_WEIGHT | \
BBF_HAS_NEWOBJ | BBF_KEEP_BBJ_ALWAYS | BBF_CLONED_FINALLY_END | BBF_HAS_NULLCHECK | BBF_HAS_HISTOGRAM_PROFILE | BBF_HAS_MDARRAYREF,
BBF_HAS_NEWOBJ | BBF_KEEP_BBJ_ALWAYS | BBF_CLONED_FINALLY_END | BBF_HAS_NULLCHECK | BBF_HAS_HISTOGRAM_PROFILE | BBF_HAS_MDARRAYREF | BBF_NEEDS_GCPOLL,

// Flags that must be propagated to a new block if code is copied from a block to a new block. These are flags that
// limit processing of a block if the code in question doesn't exist. This is conservative; we might not
Expand Down
27 changes: 22 additions & 5 deletions src/coreclr/jit/flowgraph.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
//
static bool blockNeedsGCPoll(BasicBlock* block)
{
bool blockMayNeedGCPoll = false;
bool blockMayNeedGCPoll = (block->bbFlags & BBF_NEEDS_GCPOLL) != 0;
for (Statement* const stmt : block->NonPhiStatements())
{
if ((stmt->GetRootNode()->gtFlags & GTF_CALL) != 0)
Expand Down Expand Up @@ -86,9 +86,19 @@ PhaseStatus Compiler::fgInsertGCPolls()

// When optimizations are enabled, we can't rely on BBF_HAS_SUPPRESSGC_CALL flag:
// the call could've been moved, e.g., hoisted from a loop, CSE'd, etc.
if (opts.OptimizationDisabled() ? ((block->bbFlags & BBF_HAS_SUPPRESSGC_CALL) == 0) : !blockNeedsGCPoll(block))
if (opts.OptimizationDisabled())
{
continue;
if ((block->bbFlags & (BBF_HAS_SUPPRESSGC_CALL | BBF_NEEDS_GCPOLL)) == 0)
{
continue;
}
}
else
{
if (!blockNeedsGCPoll(block))
{
continue;
}
}

result = PhaseStatus::MODIFIED_EVERYTHING;
Expand Down Expand Up @@ -189,7 +199,14 @@ BasicBlock* Compiler::fgCreateGCPoll(GCPollType pollType, BasicBlock* block)
createdPollBlocks = false;

Statement* newStmt = nullptr;
if (block->KindIs(BBJ_ALWAYS, BBJ_CALLFINALLY, BBJ_NONE))

if ((block->bbFlags & BBF_NEEDS_GCPOLL) != 0)
{
// This is a block that ends in a tail call; gc probe early.
//
newStmt = fgNewStmtAtBeg(block, call);
}
else if (block->KindIs(BBJ_ALWAYS, BBJ_CALLFINALLY, BBJ_NONE))
{
// For BBJ_ALWAYS, BBJ_CALLFINALLY, and BBJ_NONE we don't need to insert it before the condition.
// Just append it.
Expand Down Expand Up @@ -312,7 +329,7 @@ BasicBlock* Compiler::fgCreateGCPoll(GCPollType pollType, BasicBlock* block)
{
stmt = stmt->GetNextStmt();
}
fgRemoveStmt(top, stmt);
fgUnlinkStmt(top, stmt);
fgInsertStmtAtEnd(bottom, stmt);
}

Expand Down
21 changes: 13 additions & 8 deletions src/coreclr/jit/morph.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -6270,7 +6270,7 @@ GenTree* Compiler::fgMorphPotentialTailCall(GenTreeCall* call)
// if we're not going to return and the helper doesn't have enough info
// to safely poll, so we poll before the tail call, if the block isn't
// already safe. Since tail call via helper is a slow mechanism it
// doesn't matter whether we emit GC poll. his is done to be in parity
// doesn't matter whether we emit GC poll. This is done to be in parity
// with Jit64. Also this avoids GC info size increase if all most all
// methods are expected to be tail calls (e.g. F#).
//
Expand All @@ -6287,15 +6287,18 @@ GenTree* Compiler::fgMorphPotentialTailCall(GenTreeCall* call)
// fgSetBlockOrder() is going to mark the method as fully interruptible
// if the block containing this tail call is reachable without executing
// any call.
BasicBlock* curBlock = compCurBB;
if (canFastTailCall || (fgFirstBB->bbFlags & BBF_GC_SAFE_POINT) || (compCurBB->bbFlags & BBF_GC_SAFE_POINT) ||
(fgCreateGCPoll(GCPOLL_INLINE, compCurBB) == curBlock))
if (canFastTailCall || (fgFirstBB->bbFlags & BBF_GC_SAFE_POINT) || (compCurBB->bbFlags & BBF_GC_SAFE_POINT))
{
// We didn't insert a poll block, so we need to morph the call now
// (Normally it will get morphed when we get to the split poll block)
GenTree* temp = fgMorphCall(call);
noway_assert(temp == call);
// No gc poll needed
}
else
{
JITDUMP("Marking " FMT_BB " as needs gc poll\n", compCurBB->bbNum);
compCurBB->bbFlags |= BBF_NEEDS_GCPOLL;
optMethodFlags |= OMF_NEEDS_GCPOLLS;
}

fgMorphCall(call);

// Fast tail call: in case of fast tail calls, we need a jmp epilog and
// hence mark it as BBJ_RETURN with BBF_JMP flag set.
Expand Down Expand Up @@ -14447,6 +14450,7 @@ void Compiler::fgExpandQmarkForCastInstOf(BasicBlock* block, Statement* stmt)
BasicBlock* cond1Block = fgNewBBafter(BBJ_COND, block, true, remainderBlock);
BasicBlock* asgBlock = fgNewBBafter(BBJ_NONE, block, true);

block->bbFlags &= ~BBF_NEEDS_GCPOLL;
remainderBlock->bbFlags |= propagateFlags;

// These blocks are only internal if 'block' is (but they've been set as internal by fgNewBBafter).
Expand Down Expand Up @@ -14637,6 +14641,7 @@ void Compiler::fgExpandQmarkStmt(BasicBlock* block, Statement* stmt)
elseBlock->bbFlags |= BBF_IMPORTED;
}

block->bbFlags &= ~BBF_NEEDS_GCPOLL;
remainderBlock->bbFlags |= (propagateFlagsToRemainder | propagateFlagsToAll);

condBlock->inheritWeight(block);
Expand Down