Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

JIT: Extend escape analysis to account for arrays with non-gcref elements #104906

Open
wants to merge 59 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 10 commits
Commits
Show all changes
59 commits
Select commit Hold shift + click to select a range
1b0e3d3
initial prototype
hez2010 Jul 15, 2024
57b7e42
Morph ARR_LENGTH and INDEX_ADDR
hez2010 Jul 15, 2024
1b5b25e
Fix incorrect array length storage
hez2010 Jul 15, 2024
395b735
Use offset and correct type
hez2010 Jul 15, 2024
17de70b
handle reassignment
hez2010 Jul 15, 2024
5443c42
range check
hez2010 Jul 15, 2024
b2d07da
throw range check failure
hez2010 Jul 15, 2024
b5ae9e7
update comments
hez2010 Jul 15, 2024
87b29de
add metrics
hez2010 Jul 15, 2024
eeb681d
minor cleanup
hez2010 Jul 15, 2024
dee9f38
Introduce new temp and implement local address morphing
hez2010 Jul 16, 2024
94c103b
handle index out-of-range
hez2010 Jul 16, 2024
12b297b
Refactor to remove duplicates
hez2010 Jul 16, 2024
e0fa91e
Remove invalid asserts
hez2010 Jul 16, 2024
9e0a04f
make compiler happy
hez2010 Jul 16, 2024
ae822f8
Address review feedbacks
hez2010 Jul 16, 2024
a4588bb
Fix INDEX_ADDR and add Sub
hez2010 Jul 16, 2024
32b9e26
Support IsAddressLessThan and its friends
hez2010 Jul 16, 2024
39d1ad9
Fix assertions
hez2010 Jul 16, 2024
0df0d58
Merge remote-tracking branch 'origin/main' into value-array-stack-alloc
hez2010 Jul 16, 2024
9f408b2
Use new overload
hez2010 Jul 16, 2024
418a62b
JIT: Remove GTF_IND_INVARIANT and GTF_IND_NONFAULTING flags checking
jakobbotsch Jul 16, 2024
4572408
Remove old comment
jakobbotsch Jul 16, 2024
9255762
Expose jitconfig
hez2010 Jul 16, 2024
1af84b9
Remove another assert
jakobbotsch Jul 16, 2024
629c793
Count
jakobbotsch Jul 16, 2024
b578203
Try 2 at counting
jakobbotsch Jul 16, 2024
b4445f6
Introduce BBF_HAS_NEWARR
hez2010 Jul 16, 2024
af9c40e
Early exit on debug as well
hez2010 Jul 16, 2024
8b54f5a
Update computed flags
hez2010 Jul 16, 2024
cba4d2c
Merge remote-tracking branch 'jakobbotsch/indir-flags' into value-arr…
hez2010 Jul 17, 2024
b497fae
Merge remote-tracking branch 'origin/main' into value-array-stack-alloc
hez2010 Jul 17, 2024
6eca58d
Partially revert 39d1ad9
hez2010 Jul 17, 2024
49d8509
Reuse existing comma node
hez2010 Jul 17, 2024
4c6e359
Respect IsBoundsChecked
hez2010 Jul 17, 2024
4d84379
Check lowerbound too
hez2010 Jul 17, 2024
c0cad85
Fix assertion take 2
hez2010 Jul 18, 2024
d28553a
Remove redundant jit-ee calls
hez2010 Jul 18, 2024
f42e78f
Merge branch 'main' into value-array-stack-alloc
hez2010 Jul 18, 2024
c21c4f7
Fix assertion again
hez2010 Jul 18, 2024
18ec558
Check array length
hez2010 Jul 18, 2024
eadb4ad
Fix assertion in another way
hez2010 Jul 18, 2024
9d4021c
Unset the flag to avoid unnecessary assert
hez2010 Jul 18, 2024
1cba8c0
Merge branch 'main' into value-array-stack-alloc
hez2010 Jul 19, 2024
1fff53e
Add tests
hez2010 Jul 19, 2024
d521a94
sigh
hez2010 Jul 19, 2024
97ee2be
Support R2R/NativeAOT
hez2010 Jul 19, 2024
c66bdb8
Merge branch 'main' into value-array-stack-alloc
hez2010 Jul 19, 2024
5bcb786
Fix building
hez2010 Jul 19, 2024
a01562e
cleanup
hez2010 Jul 19, 2024
e728d4f
remove invalid assert
hez2010 Jul 19, 2024
d73c5c5
double align on 32bit platform
hez2010 Jul 19, 2024
c9fea23
Use correct alignment for align8
hez2010 Jul 19, 2024
772bee6
Fix intrinsic expansion
hez2010 Jul 20, 2024
499ac0b
Merge branch 'main' into value-array-stack-alloc
hez2010 Jul 20, 2024
ffcbd85
Merge remote-tracking branch 'origin/main' into value-array-stack-alloc
hez2010 Jul 22, 2024
4789090
Merge branch 'main' into value-array-stack-alloc
hez2010 Aug 24, 2024
1b33170
Merge branch 'main' into value-array-stack-alloc
hez2010 Sep 20, 2024
9c81c04
Address some review feedback
hez2010 Sep 21, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions src/coreclr/jit/jitmetadatalist.h
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,8 @@ JITMETADATAMETRIC(NewRefClassHelperCalls, int, 0)
JITMETADATAMETRIC(StackAllocatedRefClasses, int, 0)
JITMETADATAMETRIC(NewBoxedValueClassHelperCalls, int, 0)
JITMETADATAMETRIC(StackAllocatedBoxedValueClasses, int, 0)
JITMETADATAMETRIC(NewArrayHelperCalls, int, 0)
JITMETADATAMETRIC(StackAllocatedArrays, int, 0)

#undef JITMETADATA
#undef JITMETADATAINFO
Expand Down
228 changes: 224 additions & 4 deletions src/coreclr/jit/objectalloc.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,9 @@ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
//
PhaseStatus ObjectAllocator::DoPhase()
{
if ((comp->optMethodFlags & OMF_HAS_NEWOBJ) == 0)
if ((comp->optMethodFlags & OMF_HAS_NEWOBJ) == 0 && (comp->optMethodFlags & OMF_HAS_NEWARRAY) == 0)
{
JITDUMP("no newobjs in this method; punting\n");
JITDUMP("no newobjs or newarr in this method; punting\n");
return PhaseStatus::MODIFIED_NOTHING;
}

Expand Down Expand Up @@ -89,6 +89,27 @@ void ObjectAllocator::MarkLclVarAsEscaping(unsigned int lclNum)
BitVecOps::AddElemD(&m_bitVecTraits, m_EscapingPointers, lclNum);
}

//------------------------------------------------------------------------------
// MarkLclVarHasLocalStore : Mark local variable having local store.
//
//
// Arguments:
// lclNum - Pointing local variable number
// Returns:
// true if the local variable is first marked as having local store
// false if the local variable is already marked as having local store

bool ObjectAllocator::MarkLclVarHasLocalStore(unsigned int lclNum)
{
if (BitVecOps::IsMember(&m_bitVecTraits, m_PointersHasLocalStore, lclNum))
{
return false;
}

BitVecOps::AddElemD(&m_bitVecTraits, m_PointersHasLocalStore, lclNum);
return true;
}

//------------------------------------------------------------------------------
// MarkLclVarAsPossiblyStackPointing : Mark local variable as possibly pointing
// to a stack-allocated object.
Expand Down Expand Up @@ -140,6 +161,7 @@ void ObjectAllocator::DoAnalysis()
if (comp->lvaCount > 0)
{
m_EscapingPointers = BitVecOps::MakeEmpty(&m_bitVecTraits);
m_PointersHasLocalStore = BitVecOps::MakeEmpty(&m_bitVecTraits);
m_ConnGraphAdjacencyMatrix = new (comp->getAllocator(CMK_ObjectAllocator)) BitSetShortLongRep[comp->lvaCount];

MarkEscapingVarsAndBuildConnGraph();
Expand Down Expand Up @@ -201,7 +223,9 @@ void ObjectAllocator::MarkEscapingVarsAndBuildConnGraph()

if (tree->OperIsLocalStore())
{
lclEscapes = false;
// conservatively marking locals being reassigned as escaping
// this can happen on arrays
lclEscapes = !m_allocator->MarkLclVarHasLocalStore(lclNum);
}
else if (tree->OperIs(GT_LCL_VAR) && tree->TypeIs(TYP_REF, TYP_BYREF, TYP_I_IMPL))
{
Expand Down Expand Up @@ -391,6 +415,7 @@ bool ObjectAllocator::MorphAllocObjNodes()
GenTree* data = nullptr;

bool canonicalAllocObjFound = false;
bool canonicalAllocArrFound = false;

if (stmtExpr->OperIs(GT_STORE_LCL_VAR) && stmtExpr->TypeIs(TYP_REF))
{
Expand All @@ -400,6 +425,81 @@ bool ObjectAllocator::MorphAllocObjNodes()
{
canonicalAllocObjFound = true;
}
else if (data->IsHelperCall())
{
GenTreeCall* call = data->AsCall();
if (call->GetHelperNum() == CORINFO_HELP_NEWARR_1_VC &&
call->gtArgs.GetArgByIndex(0)->GetNode()->IsCnsIntOrI())
hez2010 marked this conversation as resolved.
Show resolved Hide resolved
{
canonicalAllocArrFound = true;
}
}
}

if (canonicalAllocArrFound)
{
GenTreeCall* asCall = data->AsCall();
assert(asCall->GetHelperNum() == CORINFO_HELP_NEWARR_1_VC);

unsigned int lclNum = stmtExpr->AsLclVar()->GetLclNum();
CallArg* arg = asCall->gtArgs.GetArgByIndex(0);
GenTree* node = arg->GetNode();
CORINFO_CLASS_HANDLE clsHnd = (CORINFO_CLASS_HANDLE)node->AsIntConCommon()->IntegralValue();
GenTree* len = arg->GetNext()->GetNode();
const char* onHeapReason = nullptr;
unsigned int blockSize = 0;
bool canStack = false;

// Don't attempt to do stack allocations inside basic blocks that may be in a loop.
//
if (!IsObjectStackAllocationEnabled())
{
onHeapReason = "[object stack allocation disabled]";
canStack = false;
}
else if (basicBlockHasBackwardJump)
{
onHeapReason = "[alloc in loop]";
canStack = false;
}
else if (!len->IsCnsIntOrI())
{
onHeapReason = "[non-constant size]";
canStack = false;
}
else if (!CanAllocateLclVarOnStack(lclNum, clsHnd, (unsigned int)len->AsIntCon()->gtIconVal, &blockSize,
&onHeapReason))
{
// reason set by the call
canStack = false;
}
else
{
JITDUMP("Allocating V%02u on the stack\n", lclNum);
canStack = true;
const unsigned int stackLclNum =
MorphNewArrNodeIntoStackAlloc(asCall, clsHnd, (unsigned int)len->AsIntCon()->gtIconVal,
blockSize, block, stmt);
m_HeapLocalToStackLocalMap.AddOrUpdate(lclNum, stackLclNum);
m_LocalArrToLenMap.AddOrUpdate(stackLclNum, (unsigned int)len->AsIntCon()->gtIconVal);
// We keep the set of possibly-stack-pointing pointers as a superset of the set of
// definitely-stack-pointing pointers. All definitely-stack-pointing pointers are in both sets.
MarkLclVarAsDefinitelyStackPointing(lclNum);
MarkLclVarAsPossiblyStackPointing(lclNum);
stmt->GetRootNode()->gtBashToNOP();
comp->optMethodFlags |= OMF_HAS_OBJSTACKALLOC;
didStackAllocate = true;
}
hez2010 marked this conversation as resolved.
Show resolved Hide resolved

if (canStack)
{
comp->Metrics.StackAllocatedArrays++;
}
else
{
assert(onHeapReason != nullptr);
JITDUMP("Allocating V%02u on the heap: %s\n", lclNum, onHeapReason);
}
}

if (canonicalAllocObjFound)
Expand Down Expand Up @@ -443,7 +543,7 @@ bool ObjectAllocator::MorphAllocObjNodes()
onHeapReason = "[alloc in loop]";
canStack = false;
}
else if (!CanAllocateLclVarOnStack(lclNum, clsHnd, &onHeapReason))
else if (!CanAllocateLclVarOnStack(lclNum, clsHnd, 0, nullptr, &onHeapReason))
{
// reason set by the call
canStack = false;
Expand Down Expand Up @@ -555,6 +655,78 @@ GenTree* ObjectAllocator::MorphAllocObjNodeIntoHelperCall(GenTreeAllocObj* alloc
return helperCall;
}

unsigned int ObjectAllocator::MorphNewArrNodeIntoStackAlloc(GenTreeCall* newArr,
CORINFO_CLASS_HANDLE clsHnd,
unsigned int length,
unsigned int blockSize,
BasicBlock* block,
Statement* stmt)
hez2010 marked this conversation as resolved.
Show resolved Hide resolved
{
assert(newArr != nullptr);
assert(m_AnalysisDone);
assert(clsHnd != NO_CLASS_HANDLE);

const bool shortLifetime = false;
const unsigned int lclNum = comp->lvaGrabTemp(shortLifetime DEBUGARG("stack allocated array temp"));

comp->lvaSetStruct(lclNum, comp->typGetBlkLayout(blockSize), /* unsafeValueClsCheck */ false);

// Initialize the object memory if necessary.
bool bbInALoop = block->HasFlag(BBF_BACKWARD_JUMP);
bool bbIsReturn = block->KindIs(BBJ_RETURN);
LclVarDsc* const lclDsc = comp->lvaGetDesc(lclNum);
lclDsc->lvStackAllocatedBox = false;
if (comp->fgVarNeedsExplicitZeroInit(lclNum, bbInALoop, bbIsReturn))
{
//------------------------------------------------------------------------
// STMTx (IL 0x... ???)
// * STORE_LCL_VAR struct
// \--* CNS_INT int 0
//------------------------------------------------------------------------

GenTree* init = comp->gtNewStoreLclVarNode(lclNum, comp->gtNewIconNode(0));
Statement* initStmt = comp->gtNewStmt(init);

comp->fgInsertStmtBefore(block, stmt, initStmt);
}
else
{
JITDUMP("\nSuppressing zero-init for V%02u -- expect to zero in prolog\n", lclNum);
lclDsc->lvSuppressedZeroInit = 1;
comp->compSuppressedZeroInit = true;
}

// Initialize the vtable slot.
//
//------------------------------------------------------------------------
// STMTx (IL 0x... ???)
// * STORE_LCL_FLD long
// \--* CNS_INT(h) long
//------------------------------------------------------------------------

// Initialize the method table pointer.
GenTree* init = comp->gtNewStoreLclFldNode(lclNum, TYP_I_IMPL, 0, newArr->gtArgs.GetArgByIndex(0)->GetNode());
Statement* initStmt = comp->gtNewStmt(init);

comp->fgInsertStmtBefore(block, stmt, initStmt);

// Initialize the array length.
//
//------------------------------------------------------------------------
// STMTx (IL 0x... ???)
// * STORE_LCL_FLD int
// \--* CNS_INT int
//------------------------------------------------------------------------

// Pass the total length of the array.
GenTree* len = comp->gtNewStoreLclFldNode(lclNum, TYP_INT, OFFSETOF__CORINFO_Array__length,
comp->gtNewIconNode(length, TYP_INT));
Statement* lenStmt = comp->gtNewStmt(len);
comp->fgInsertStmtBefore(block, stmt, lenStmt);

return lclNum;
}

//------------------------------------------------------------------------
// MorphAllocObjNodeIntoStackAlloc: Morph a GT_ALLOCOBJ node into stack
// allocation.
Expand Down Expand Up @@ -682,6 +854,8 @@ bool ObjectAllocator::CanLclVarEscapeViaParentStack(ArrayStack<GenTree*>* parent
case GT_EQ:
case GT_NE:
case GT_NULLCHECK:
case GT_ARR_LENGTH:
case GT_INDEX_ADDR:
hez2010 marked this conversation as resolved.
Show resolved Hide resolved
canLclVarEscapeViaParentStack = false;
break;

Expand Down Expand Up @@ -778,6 +952,8 @@ void ObjectAllocator::UpdateAncestorTypes(GenTree* tree, ArrayStack<GenTree*>* p
case GT_EQ:
case GT_NE:
case GT_NULLCHECK:
case GT_ARR_LENGTH:
case GT_INDEX_ADDR:
break;

case GT_COMMA:
Expand Down Expand Up @@ -1025,6 +1201,50 @@ void ObjectAllocator::RewriteUses()
}
}
}
// Rewrite INDEX_ADDR to ADD for stack-allocated arrays.
else if (tree->OperIs(GT_INDEX_ADDR))
{
GenTreeIndexAddr* const gtIndexAddr = tree->AsIndexAddr();
GenTree* const gtArr = gtIndexAddr->Arr();
GenTree* const gtInd = gtIndexAddr->Index();
unsigned int arrLen = 0;

if (gtArr->OperIs(GT_LCL_ADDR) && gtInd->IsCnsIntOrI() &&
m_allocator->m_LocalArrToLenMap.TryGetValue(gtArr->AsLclVarCommon()->GetLclNum(), &arrLen))
{
if ((unsigned int)gtInd->AsIntCon()->gtIconVal < arrLen)
{
JITDUMP("Rewriting INDEX_ADDR to ADD [%06u]\n", m_compiler->dspTreeID(tree));
const ssize_t offset =
OFFSETOF__CORINFO_Array__data + gtInd->AsIntCon()->gtIconVal * gtIndexAddr->gtElemSize;
GenTree* const gtOffset = m_compiler->gtNewIconNode(offset, TYP_I_IMPL);
GenTree* const gtAdd = m_compiler->gtNewOperNode(GT_ADD, TYP_BYREF, gtArr, gtOffset);
*use = gtAdd;
}
else
{
JITDUMP("Rewriting INDEX_ADDR to RNGCHKFAIL helper call [%06u]\n", m_compiler->dspTreeID(tree));
GenTree* const gtRngChkFail =
m_compiler->gtNewMustThrowException(CORINFO_HELP_RNGCHKFAIL, gtIndexAddr->gtType,
gtIndexAddr->gtStructElemClass);
*use = gtRngChkFail;
}
}
}
// Rewrite ARR_LENGTH to LCL_FLD for stack-allocated arrays.
else if (tree->OperIsArrLength())
{
GenTreeArrLen* const gtArrLen = tree->AsArrLen();
GenTree* const gtArr = gtArrLen->ArrRef();

if (gtArr->OperIs(GT_LCL_ADDR))
{
JITDUMP("Rewriting ARR_LENGTH to LCL_FLD [%06u]\n", m_compiler->dspTreeID(tree));
GenTree* const gtLclFld = m_compiler->gtNewLclFldNode(gtArr->AsLclVarCommon()->GetLclNum(), TYP_INT,
OFFSETOF__CORINFO_Array__length);
*use = gtLclFld;
}
}

return Compiler::fgWalkResult::WALK_CONTINUE;
}
Expand Down
Loading
Loading