Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Split assignment rationalization into a separate phase #85180

Merged
merged 9 commits into from
May 1, 2023
Merged
3 changes: 3 additions & 0 deletions src/coreclr/jit/compiler.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1919,6 +1919,7 @@ void Compiler::compInit(ArenaAllocator* pAlloc,
compLocallocUsed = false;
compLocallocOptimized = false;
compQmarkRationalized = false;
compAssignmentRationalized = false;
compQmarkUsed = false;
compFloatingPointUsed = false;

Expand Down Expand Up @@ -5064,6 +5065,8 @@ void Compiler::compCompile(void** methodCodePtr, uint32_t* methodCodeSize, JitFl
//
DoPhase(this, PHASE_DETERMINE_FIRST_COLD_BLOCK, &Compiler::fgDetermineFirstColdBlock);

DoPhase(this, PHASE_RATIONALIZE_ASSIGNMENTS, &Compiler::fgRationalizeAssignments);

#ifdef DEBUG
// Stash the current estimate of the function's size if necessary.
if (verbose)
Expand Down
8 changes: 7 additions & 1 deletion src/coreclr/jit/compiler.h
Original file line number Diff line number Diff line change
Expand Up @@ -2509,7 +2509,9 @@ class Compiler

GenTree* gtNewOneConNode(var_types type, var_types simdBaseType = TYP_UNDEF);

GenTreeLclVar* gtNewStoreLclVar(unsigned dstLclNum, GenTree* src);
GenTreeLclVar* gtNewStoreLclVarNode(unsigned lclNum, GenTree* data);

GenTreeLclFld* gtNewStoreLclFldNode(unsigned lclNum, var_types type, unsigned offset, GenTree* data);

GenTree* gtNewBlkOpNode(GenTree* dst, GenTree* srcOrFillVal, bool isVolatile = false);

Expand Down Expand Up @@ -4828,6 +4830,9 @@ class Compiler
void fgExpandQmarkStmt(BasicBlock* block, Statement* stmt);
void fgExpandQmarkNodes();

PhaseStatus fgRationalizeAssignments();
GenTree* fgRationalizeAssignment(GenTreeOp* assignment);

// Do "simple lowering." This functionality is (conceptually) part of "general"
// lowering that is distributed between fgMorph and the lowering phase of LSRA.
PhaseStatus fgSimpleLowering();
Expand Down Expand Up @@ -9230,6 +9235,7 @@ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
bool compLocallocOptimized; // Does the method have an optimized localloc
bool compQmarkUsed; // Does the method use GT_QMARK/GT_COLON
bool compQmarkRationalized; // Is it allowed to use a GT_QMARK/GT_COLON node.
bool compAssignmentRationalized; // Have the ASG nodes been turned into their store equivalents?
bool compHasBackwardJump; // Does the method (or some inlinee) have a lexically backwards jump?
bool compHasBackwardJumpInHandler; // Does the method have a lexically backwards jump in a handler?
bool compSwitchedToOptimized; // Codegen initially was Tier0 but jit switched to FullOpts
Expand Down
1 change: 1 addition & 0 deletions src/coreclr/jit/compphases.h
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,7 @@ CompPhaseNameMacro(PHASE_IF_CONVERSION, "If conversion",
CompPhaseNameMacro(PHASE_VN_BASED_DEAD_STORE_REMOVAL,"VN-based dead store removal", false, -1, false)
CompPhaseNameMacro(PHASE_OPT_UPDATE_FLOW_GRAPH, "Update flow graph opt pass", false, -1, false)
CompPhaseNameMacro(PHASE_COMPUTE_EDGE_WEIGHTS2, "Compute edge weights (2, false)",false, -1, false)
CompPhaseNameMacro(PHASE_RATIONALIZE_ASSIGNMENTS, "Rationalize assignments", false, -1, false)
CompPhaseNameMacro(PHASE_STRESS_SPLIT_TREE, "Stress gtSplitTree", false, -1, false)
CompPhaseNameMacro(PHASE_EXPAND_RTLOOKUPS, "Expand runtime lookups", false, -1, true)
CompPhaseNameMacro(PHASE_EXPAND_STATIC_INIT, "Expand static init", false, -1, true)
Expand Down
4 changes: 2 additions & 2 deletions src/coreclr/jit/fgdiagnostic.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3436,7 +3436,7 @@ void Compiler::fgDebugCheckLinkedLocals()

bool ShouldLink(GenTree* node)
{
return node->OperIsLocal() || node->OperIs(GT_LCL_ADDR);
return node->OperIsAnyLocal();
}

public:
Expand Down Expand Up @@ -3524,7 +3524,7 @@ void Compiler::fgDebugCheckLinkedLocals()
int nodeIndex = 0;
for (GenTree* cur = first; cur != nullptr; cur = cur->gtNext)
{
success &= cur->OperIsLocal() || cur->OperIs(GT_LCL_ADDR);
success &= cur->OperIsAnyLocal();
success &= (nodeIndex < expected->Height()) && (cur == expected->Bottom(nodeIndex));
nodeIndex++;
}
Expand Down
182 changes: 182 additions & 0 deletions src/coreclr/jit/flowgraph.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2841,6 +2841,188 @@ PhaseStatus Compiler::fgFindOperOrder()
return PhaseStatus::MODIFIED_EVERYTHING;
}

//------------------------------------------------------------------------
// fgRationalizeAssignments: Rewrite assignment nodes into stores.
//
// TODO-ASG: delete.
//
PhaseStatus Compiler::fgRationalizeAssignments()
{
class AssignmentRationalizationVisitor : public GenTreeVisitor<AssignmentRationalizationVisitor>
{
public:
enum
{
DoPreOrder = true
};

AssignmentRationalizationVisitor(Compiler* compiler) : GenTreeVisitor(compiler)
{
}

fgWalkResult PreOrderVisit(GenTree** use, GenTree* user)
{
GenTree* node = *use;

// GTF_ASG is sometimes not propagated from setup arg assignments so we have to check for GTF_CALL too.
if ((node->gtFlags & (GTF_ASG | GTF_CALL)) == 0)
{
return fgWalkResult::WALK_SKIP_SUBTREES;
}

if (node->OperIs(GT_ASG))
{
GenTreeFlags lhsRhsFlags = node->gtGetOp1()->gtFlags | node->gtGetOp2()->gtFlags;
*use = m_compiler->fgRationalizeAssignment(node->AsOp());

// TP: return early quickly for simple assignments.
if ((lhsRhsFlags & (GTF_ASG | GTF_CALL)) == 0)
{
return fgWalkResult::WALK_SKIP_SUBTREES;
}
}

return fgWalkResult::WALK_CONTINUE;
}
};

AssignmentRationalizationVisitor visitor(this);
for (BasicBlock* block : Blocks())
{
for (Statement* stmt : block->Statements())
{
GenTree** use = stmt->GetRootNodePointer();
if (visitor.PreOrderVisit(use, nullptr) == fgWalkResult::WALK_CONTINUE)
{
visitor.WalkTree(use, nullptr);
}
}
}

compAssignmentRationalized = true;

#ifdef DEBUG
if (JitConfig.JitStressMorphStores())
{
for (BasicBlock* block : Blocks())
{
for (Statement* stmt : block->Statements())
{
fgMorphBlockStmt(block, stmt DEBUGARG("fgRationalizeAssignments"));
}
}
}
#endif // DEBUG

return PhaseStatus::MODIFIED_EVERYTHING;
}

//------------------------------------------------------------------------
// fgRationalizeAssignment: Rewrite GT_ASG into a store node.
//
// Arguments:
// assignment - The assignment node to rewrite
//
// Return Value:
// Assignment's location, turned into the appropriate store node.
//
GenTree* Compiler::fgRationalizeAssignment(GenTreeOp* assignment)
{
assert(assignment->OperGet() == GT_ASG);

bool isReverseOp = assignment->IsReverseOp();
GenTree* location = assignment->gtGetOp1();
GenTree* value = assignment->gtGetOp2();
if (location->OperIsLocal())
{
assert((location->gtFlags & GTF_VAR_DEF) != 0);
}
else if (value->OperIs(GT_LCL_VAR))
{
assert((value->gtFlags & GTF_VAR_DEF) == 0);
}

if (assignment->OperIsInitBlkOp())
{
// No SIMD types are allowed for InitBlks (including zero-inits).
assert(assignment->TypeIs(TYP_STRUCT) && location->TypeIs(TYP_STRUCT));
}

genTreeOps storeOp;
switch (location->OperGet())
{
case GT_LCL_VAR:
storeOp = GT_STORE_LCL_VAR;
break;
case GT_LCL_FLD:
storeOp = GT_STORE_LCL_FLD;
break;
case GT_BLK:
storeOp = GT_STORE_BLK;
break;
case GT_IND:
storeOp = GT_STOREIND;
break;
default:
unreached();
}

JITDUMP("Rewriting GT_ASG(%s, X) to %s(X)\n", GenTree::OpName(location->OperGet()), GenTree::OpName(storeOp));

GenTree* store = location;
store->SetOperRaw(storeOp);
store->Data() = value;
store->gtFlags |= GTF_ASG;
store->AddAllEffectsFlags(value);
store->AddAllEffectsFlags(assignment->gtFlags & GTF_GLOB_REF); // TODO-ASG: zero-diff quirk, delete.
if (isReverseOp && !store->OperIsLocalStore())
{
store->SetReverseOp();
}

if (storeOp == GT_STOREIND)
{
store->AsStoreInd()->SetRMWStatusDefault();
}

// [..., LHS, ..., RHS, ASG] -> [..., ..., RHS, LHS<STORE>] (normal)
// [..., RHS, ..., LHS, ASG] -> [..., RHS, ..., LHS<STORE>] (reversed)
if (assignment->gtPrev != nullptr)
{
assert(fgNodeThreading == NodeThreading::AllTrees);
if (isReverseOp)
{
GenTree* nextNode = assignment->gtNext;
store->gtNext = nextNode;
if (nextNode != nullptr)
{
nextNode->gtPrev = store;
}
}
else
{
if (store->gtPrev != nullptr)
{
store->gtPrev->gtNext = store->gtNext;
}
store->gtNext->gtPrev = store->gtPrev;

store->gtPrev = assignment->gtPrev;
store->gtNext = assignment->gtNext;
store->gtPrev->gtNext = store;
if (store->gtNext != nullptr)
{
store->gtNext->gtPrev = store;
}
}
}

DISPNODE(store);
JITDUMP("\n");

return store;
}

//------------------------------------------------------------------------
// fgSimpleLowering: do full walk of all IR, lowering selected operations
// and computing lvaOutgoingArgSpaceSize.
Expand Down
Loading