From 5d676a2046f71e80003f51dbd92c94f2a6d837b6 Mon Sep 17 00:00:00 2001 From: Jakob Botsch Nielsen Date: Mon, 8 May 2023 21:01:39 +0200 Subject: [PATCH 01/16] JIT: Physical promotion liveness --- src/coreclr/jit/CMakeLists.txt | 1 + src/coreclr/jit/promotion.cpp | 418 ++++++++++++++++++-- src/coreclr/jit/promotion.h | 110 +++++- src/coreclr/jit/promotiondecomposition.cpp | 405 +------------------ src/coreclr/jit/promotionliveness.cpp | 433 +++++++++++++++++++++ 5 files changed, 932 insertions(+), 435 deletions(-) create mode 100644 src/coreclr/jit/promotionliveness.cpp diff --git a/src/coreclr/jit/CMakeLists.txt b/src/coreclr/jit/CMakeLists.txt index c7c6ddb30b9284..ef9eaf8f402bd9 100644 --- a/src/coreclr/jit/CMakeLists.txt +++ b/src/coreclr/jit/CMakeLists.txt @@ -160,6 +160,7 @@ set( JIT_SOURCES phase.cpp promotion.cpp promotiondecomposition.cpp + promotionliveness.cpp rangecheck.cpp rationalize.cpp redundantbranchopts.cpp diff --git a/src/coreclr/jit/promotion.cpp b/src/coreclr/jit/promotion.cpp index 1ece87d7c3b50d..5c39f202bfd61b 100644 --- a/src/coreclr/jit/promotion.cpp +++ b/src/coreclr/jit/promotion.cpp @@ -126,6 +126,57 @@ inline AccessKindFlags& operator&=(AccessKindFlags& a, AccessKindFlags b) return a = (AccessKindFlags)((uint32_t)a & (uint32_t)b); } +bool AggregateInfo::OverlappingReplacements(unsigned offset, unsigned size, Replacement** firstReplacement, Replacement** endReplacement) +{ + size_t firstIndex = Promotion::BinarySearch(Replacements, offset); + if ((ssize_t)firstIndex < 0) + { + firstIndex = ~firstIndex; + if (firstIndex > 0) + { + Replacement& lastRepBefore = Replacements[firstIndex - 1]; + if ((lastRepBefore.Offset + genTypeSize(lastRepBefore.AccessType)) > offset) + { + // Overlap with last entry starting before offs. + firstIndex--; + } + else if (firstIndex >= Replacements.size()) + { + // Starts after last replacement ends. + return false; + } + } + + const Replacement& first = Replacements[firstIndex]; + if (first.Offset >= (offset + size)) + { + // First candidate starts after this ends. + return false; + } + } + + assert((firstIndex < Replacements.size()) && Replacements[firstIndex].Overlaps(offset, size)); + *firstReplacement = &Replacements[firstIndex]; + + if (endReplacement != nullptr) + { + size_t lastIndex = Promotion::BinarySearch(Replacements, offset + size); + if ((ssize_t)lastIndex < 0) + { + lastIndex = ~lastIndex; + } + + // Since we verified above that there is an overlapping replacement + // we know that lastIndex exists and is the next one that does not + // overlap. + assert(lastIndex > 0); + *endReplacement = Replacements.data() + lastIndex; + } + + return true; +} + + // Tracks all the accesses into one particular struct local. class LocalUses { @@ -223,9 +274,9 @@ class LocalUses // Parameters: // comp - Compiler instance // lclNum - Local num for this struct local - // replacements - [out] Pointer to vector to create and insert replacements into + // aggregateInfo - [out] Pointer to aggregate info to create and insert replacements into. // - void PickPromotions(Compiler* comp, unsigned lclNum, jitstd::vector** replacements) + void PickPromotions(Compiler* comp, unsigned lclNum, AggregateInfo** aggregateInfo) { if (m_accesses.size() <= 0) { @@ -261,13 +312,13 @@ class LocalUses LclVarDsc* dsc = comp->lvaGetDesc(newLcl); dsc->lvType = access.AccessType; - if (*replacements == nullptr) + if (*aggregateInfo == nullptr) { - *replacements = - new (comp, CMK_Promotion) jitstd::vector(comp->getAllocator(CMK_Promotion)); + *aggregateInfo = + new (comp, CMK_Promotion) AggregateInfo(comp->getAllocator(CMK_Promotion)); } - (*replacements)->push_back(Replacement(access.Offset, access.AccessType, newLcl DEBUGARG(bufp))); + (*aggregateInfo)->Replacements.push_back(Replacement(access.Offset, access.AccessType, newLcl DEBUGARG(bufp))); } JITDUMP("\n"); @@ -618,6 +669,283 @@ bool Replacement::Overlaps(unsigned otherStart, unsigned otherSize) const return true; } +bool StructSegments::Segment::IntersectsInclusive(const Segment& other) const +{ + if (End < other.Start) + { + return false; + } + + if (other.End < Start) + { + return false; + } + + return true; +} + +bool StructSegments::Segment::Contains(const Segment& other) const +{ + return (other.Start >= Start) && (other.End <= End); +} + +void StructSegments::Segment::Merge(const Segment& other) +{ + Start = min(Start, other.Start); + End = max(End, other.End); +} + +//------------------------------------------------------------------------ +// Add: +// Add a segment to the data structure. +// +// Parameters: +// segment - The segment to add. +// +void StructSegments::Add(const Segment& segment) +{ + size_t index = Promotion::BinarySearch(m_segments, segment.Start); + + if ((ssize_t)index < 0) + { + index = ~index; + } + + m_segments.insert(m_segments.begin() + index, segment); + size_t endIndex; + for (endIndex = index + 1; endIndex < m_segments.size(); endIndex++) + { + if (!m_segments[index].IntersectsInclusive(m_segments[endIndex])) + { + break; + } + + m_segments[index].Merge(m_segments[endIndex]); + } + + m_segments.erase(m_segments.begin() + index + 1, m_segments.begin() + endIndex); +} + +//------------------------------------------------------------------------ +// Subtract: +// Subtract a segment from the data structure. +// +// Parameters: +// segment - The segment to subtract. +// +void StructSegments::Subtract(const Segment& segment) +{ + size_t index = Promotion::BinarySearch(m_segments, segment.Start); + if ((ssize_t)index < 0) + { + index = ~index; + } + else + { + // Start == segment[index].End, which makes it non-interesting. + index++; + } + + if (index >= m_segments.size()) + { + return; + } + + // Here we know Start < segment[index].End. Do they not intersect at all? + if (m_segments[index].Start >= segment.End) + { + // Does not intersect any segment. + return; + } + + assert(m_segments[index].IntersectsInclusive(segment)); + + if (m_segments[index].Contains(segment)) + { + if (segment.Start > m_segments[index].Start) + { + // New segment (existing.Start, segment.Start) + if (segment.End < m_segments[index].End) + { + m_segments.insert(m_segments.begin() + index, Segment(m_segments[index].Start, segment.Start)); + + // And new segment (segment.End, existing.End) + m_segments[index + 1].Start = segment.End; + return; + } + + m_segments[index].End = segment.Start; + return; + } + if (segment.End < m_segments[index].End) + { + // New segment (segment.End, existing.End) + m_segments[index].Start = segment.End; + return; + } + + // Full segment is being removed + m_segments.erase(m_segments.begin() + index); + return; + } + + if (segment.Start > m_segments[index].Start) + { + m_segments[index].End = segment.Start; + index++; + } + + size_t endIndex = Promotion::BinarySearch(m_segments, segment.End); + if ((ssize_t)endIndex >= 0) + { + m_segments.erase(m_segments.begin() + index, m_segments.begin() + endIndex + 1); + return; + } + + endIndex = ~endIndex; + if (endIndex == m_segments.size()) + { + m_segments.erase(m_segments.begin() + index, m_segments.end()); + return; + } + + if (segment.End > m_segments[endIndex].Start) + { + m_segments[endIndex].Start = segment.End; + } + + m_segments.erase(m_segments.begin() + index, m_segments.begin() + endIndex); +} + +//------------------------------------------------------------------------ +// IsEmpty: +// Check if the segment tree is empty. +// +// Returns: +// True if so. +// +bool StructSegments::IsEmpty() +{ + return m_segments.size() == 0; +} + +//------------------------------------------------------------------------ +// IsSingleSegment: +// Check if the segment tree contains only a single segment, and return +// it if so. +// +// Parameters: +// result - [out] The single segment. Only valid if the method returns true. +// +// Returns: +// True if so. +// +bool StructSegments::IsSingleSegment(Segment* result) +{ + if (m_segments.size() == 1) + { + *result = m_segments[0]; + return true; + } + + return false; +} + +bool StructSegments::CoveringSegment(Segment* result) +{ + if (m_segments.size() == 0) + { + return false; + } + + result->Start = m_segments[0].Start; + result->End = m_segments[m_segments.size() - 1].End; + return true; +} + +StructSegments Promotion::SignificantSegments(Compiler* compiler, ClassLayout* layout) +{ + COMP_HANDLE compHnd = compiler->info.compCompHnd; + + bool significantPadding; + if (layout->IsBlockLayout()) + { + significantPadding = true; + JITDUMP(" Block op has significant padding due to block layout\n"); + } + else + { + uint32_t attribs = compHnd->getClassAttribs(layout->GetClassHandle()); + if ((attribs & CORINFO_FLG_INDEXABLE_FIELDS) != 0) + { + significantPadding = true; + JITDUMP(" Block op has significant padding due to indexable fields\n"); + } + else if ((attribs & CORINFO_FLG_DONT_DIG_FIELDS) != 0) + { + significantPadding = true; + JITDUMP(" Block op has significant padding due to CORINFO_FLG_DONT_DIG_FIELDS\n"); + } + else if (((attribs & CORINFO_FLG_CUSTOMLAYOUT) != 0) && ((attribs & CORINFO_FLG_CONTAINS_GC_PTR) == 0)) + { + significantPadding = true; + JITDUMP(" Block op has significant padding due to CUSTOMLAYOUT without GC pointers\n"); + } + else + { + significantPadding = false; + } + } + + StructSegments segments(compiler->getAllocator(CMK_Promotion)); + + // Validate with "obviously correct" but less scalable fixed bit vector implementation. + INDEBUG(FixedBitVect* segmentBitVect = FixedBitVect::bitVectInit(layout->GetSize(), compiler)); + + if (significantPadding) + { + segments.Add(StructSegments::Segment(0, layout->GetSize())); + +#ifdef DEBUG + for (unsigned i = 0; i < layout->GetSize(); i++) + segmentBitVect->bitVectSet(i); +#endif + } + else + { + unsigned numFields = compHnd->getClassNumInstanceFields(layout->GetClassHandle()); + for (unsigned i = 0; i < numFields; i++) + { + CORINFO_FIELD_HANDLE fieldHnd = compHnd->getFieldInClass(layout->GetClassHandle(), (int)i); + unsigned fldOffset = compHnd->getFieldOffset(fieldHnd); + CORINFO_CLASS_HANDLE fieldClassHandle; + CorInfoType corType = compHnd->getFieldType(fieldHnd, &fieldClassHandle); + var_types varType = JITtype2varType(corType); + unsigned size = genTypeSize(varType); + if (size == 0) + { + // TODO-CQ: Recursively handle padding in sub structures + // here. Might be better to introduce a single JIT-EE call + // to query the significant segments -- that would also be + // usable by R2R even outside the version bubble in many + // cases. + size = compHnd->getClassSize(fieldClassHandle); + assert(size != 0); + } + + segments.Add(StructSegments::Segment(fldOffset, fldOffset + size)); +#ifdef DEBUG + for (unsigned i = 0; i < size; i++) + segmentBitVect->bitVectSet(fldOffset + i); +#endif + } + } + + // TODO-TP: Cache this per class layout, we call this for every struct + // operation on a promoted local. + return segments; +} + //------------------------------------------------------------------------ // CreateWriteBack: // Create IR that writes a replacement local's value back to its struct local: @@ -780,12 +1108,12 @@ void ReplaceVisitor::ReplaceLocal(GenTree** use, GenTree* user) { GenTreeLclVarCommon* lcl = (*use)->AsLclVarCommon(); unsigned lclNum = lcl->GetLclNum(); - if (m_replacements[lclNum] == nullptr) + if (m_aggregates[lclNum] == nullptr) { return; } - jitstd::vector& replacements = *m_replacements[lclNum]; + jitstd::vector& replacements = m_aggregates[lclNum]->Replacements; unsigned offs = lcl->GetLclOffs(); var_types accessType = lcl->TypeGet(); @@ -903,12 +1231,12 @@ void ReplaceVisitor::StoreBeforeReturn(GenTreeUnOp* ret) // void ReplaceVisitor::WriteBackBefore(GenTree** use, unsigned lcl, unsigned offs, unsigned size) { - if (m_replacements[lcl] == nullptr) + if (m_aggregates[lcl] == nullptr) { return; } - jitstd::vector& replacements = *m_replacements[lcl]; + jitstd::vector& replacements = m_aggregates[lcl]->Replacements; size_t index = Promotion::BinarySearch(replacements, offs); if ((ssize_t)index < 0) @@ -951,12 +1279,12 @@ void ReplaceVisitor::WriteBackBefore(GenTree** use, unsigned lcl, unsigned offs, // void ReplaceVisitor::MarkForReadBack(unsigned lcl, unsigned offs, unsigned size) { - if (m_replacements[lcl] == nullptr) + if (m_aggregates[lcl] == nullptr) { return; } - jitstd::vector& replacements = *m_replacements[lcl]; + jitstd::vector& replacements = m_aggregates[lcl]->Replacements; size_t index = Promotion::BinarySearch(replacements, offs); if ((ssize_t)index < 0) @@ -1025,8 +1353,7 @@ PhaseStatus Promotion::Run() // Pick promotions based on the use information we just collected. bool anyReplacements = false; - jitstd::vector** replacements = - new (m_compiler, CMK_Promotion) jitstd::vector*[m_compiler->lvaCount]{}; + AggregateInfo** aggregates = new (m_compiler, CMK_Promotion) AggregateInfo*[m_compiler->lvaCount]{}; for (unsigned i = 0; i < numLocals; i++) { LocalUses* uses = localsUse.GetUsesByLocal(i); @@ -1035,21 +1362,43 @@ PhaseStatus Promotion::Run() continue; } - uses->PickPromotions(m_compiler, i, &replacements[i]); + uses->PickPromotions(m_compiler, i, &aggregates[i]); - if (replacements[i] != nullptr) + if (aggregates[i] == nullptr) { - assert(replacements[i]->size() > 0); - anyReplacements = true; + continue; + } + + jitstd::vector& reps = aggregates[i]->Replacements; + + StructSegments unpromotedParts = SignificantSegments(m_compiler, m_compiler->lvaGetDesc(i)->GetLayout()); + for (size_t i = 0; i < reps.size(); i++) + { + unpromotedParts.Subtract(StructSegments::Segment(reps[i].Offset, reps[i].Offset + genTypeSize(reps[i].AccessType))); + } + + StructSegments::Segment unpromotedSegment; + if (unpromotedParts.CoveringSegment(&unpromotedSegment)) + { + aggregates[i]->UnpromotedMin = unpromotedSegment.Start; + aggregates[i]->UnpromotedMax = unpromotedSegment.End; + assert(unpromotedSegment.Start < unpromotedSegment.End); + } + else + { + // Aggregate is fully promoted, leave UnpromotedMin == UnpromotedMax to indicate this. + } + + assert(reps.size() > 0); + anyReplacements = true; #ifdef DEBUG - JITDUMP("V%02u promoted with %d replacements\n", i, (int)replacements[i]->size()); - for (const Replacement& rep : *replacements[i]) - { - JITDUMP(" [%03u..%03u) promoted as %s V%02u\n", rep.Offset, rep.Offset + genTypeSize(rep.AccessType), - varTypeName(rep.AccessType), rep.LclNum); - } -#endif + JITDUMP("V%02u promoted with %d replacements\n", i, (int)reps.size()); + for (const Replacement& rep : reps) + { + JITDUMP(" [%03u..%03u) promoted as %s V%02u\n", rep.Offset, rep.Offset + genTypeSize(rep.AccessType), + varTypeName(rep.AccessType), rep.LclNum); } +#endif } if (!anyReplacements) @@ -1057,14 +1406,19 @@ PhaseStatus Promotion::Run() return PhaseStatus::MODIFIED_NOTHING; } + // Do a liveness pass to refine liveness for the promotions we picked. + PromotionLiveness liveness(m_compiler, aggregates); + liveness.Run(); + // Make all replacements we decided on. - ReplaceVisitor replacer(this, replacements); + ReplaceVisitor replacer(this, aggregates, &liveness); for (BasicBlock* bb : m_compiler->Blocks()) { + replacer.StartBlock(bb); for (Statement* stmt : bb->Statements()) { DISPSTMT(stmt); - replacer.Reset(); + replacer.StartStatement(stmt); replacer.WalkTree(stmt->GetRootNodePointer(), nullptr); if (replacer.MadeChanges()) @@ -1078,12 +1432,12 @@ PhaseStatus Promotion::Run() for (unsigned i = 0; i < numLocals; i++) { - if (replacements[i] == nullptr) + if (aggregates[i] == nullptr) { continue; } - for (Replacement& rep : *replacements[i]) + for (Replacement& rep : aggregates[i]->Replacements) { assert(!rep.NeedsReadBack || !rep.NeedsWriteBack); if (rep.NeedsReadBack) @@ -1108,7 +1462,7 @@ PhaseStatus Promotion::Run() Statement* prevStmt = nullptr; for (unsigned lclNum = 0; lclNum < numLocals; lclNum++) { - if (replacements[lclNum] == nullptr) + if (aggregates[lclNum] == nullptr) { continue; } @@ -1116,7 +1470,7 @@ PhaseStatus Promotion::Run() LclVarDsc* dsc = m_compiler->lvaGetDesc(lclNum); if (dsc->lvIsParam || dsc->lvIsOSRLocal) { - InsertInitialReadBack(lclNum, *replacements[lclNum], &prevStmt); + InsertInitialReadBack(lclNum, aggregates[lclNum]->Replacements, &prevStmt); } else if (dsc->lvSuppressedZeroInit) { @@ -1125,7 +1479,7 @@ PhaseStatus Promotion::Run() // Now that we are promoting some fields that assumption may be // invalidated for those fields, and we may need to insert explicit // zero inits again. - ExplicitlyZeroInitReplacementLocals(lclNum, *replacements[lclNum], &prevStmt); + ExplicitlyZeroInitReplacementLocals(lclNum, aggregates[lclNum]->Replacements, &prevStmt); } } diff --git a/src/coreclr/jit/promotion.h b/src/coreclr/jit/promotion.h index 6c8f71a077e727..ca29e0d7199a64 100644 --- a/src/coreclr/jit/promotion.h +++ b/src/coreclr/jit/promotion.h @@ -37,16 +37,77 @@ struct Replacement bool Overlaps(unsigned otherStart, unsigned otherSize) const; }; +// Represents significant segments of a struct operation. +// +// Essentially a segment tree (but not stored as a tree) that supports boolean +// Add/Subtract operations of segments. Used to compute the remainder after +// replacements have been handled as part of a decomposed block operation. +class StructSegments +{ +public: + struct Segment + { + unsigned Start = 0; + unsigned End = 0; + + Segment() + { + } + + Segment(unsigned start, unsigned end) : Start(start), End(end) + { + } + + bool IntersectsInclusive(const Segment& other) const; + bool Contains(const Segment& other) const; + void Merge(const Segment& other); + }; + +private: + jitstd::vector m_segments; + +public: + StructSegments(CompAllocator allocator) : m_segments(allocator) + { + } + + void Add(const Segment& segment); + void Subtract(const Segment& segment); + bool IsEmpty(); + bool IsSingleSegment(Segment* result); + bool CoveringSegment(Segment* result); +}; + +// Represents information about an aggregate that now has replacements in it. +struct AggregateInfo +{ + jitstd::vector Replacements; + // Min offset in the struct local of the unpromoted part. + unsigned UnpromotedMin = 0; + // Max offset in the struct local of the unpromoted part. + unsigned UnpromotedMax = 0; + + AggregateInfo(CompAllocator alloc) : Replacements(alloc) + { + } + + bool OverlappingReplacements(unsigned offset, unsigned size, Replacement** firstReplacement, Replacement** endReplacement); +}; + class Promotion { Compiler* m_compiler; friend class LocalUses; friend class LocalsUseVisitor; + friend class AggregateInfo; + friend class PromotionLiveness; friend class ReplaceVisitor; friend class DecompositionPlan; friend class StructSegments; + static StructSegments SignificantSegments(Compiler* compiler, ClassLayout* layout); + void InsertInitialReadBack(unsigned lclNum, const jitstd::vector& replacements, Statement** prevStmt); void ExplicitlyZeroInitReplacementLocals(unsigned lclNum, const jitstd::vector& replacements, @@ -106,13 +167,49 @@ class Promotion PhaseStatus Run(); }; +struct BasicBlockLiveness; + +class PromotionLiveness +{ + Compiler* m_compiler; + AggregateInfo** m_aggregates; + BitVecTraits* m_bvTraits = nullptr; + unsigned* m_structLclToTrackedIndex = nullptr; + BasicBlockLiveness* m_bbInfo = nullptr; + bool m_hasPossibleBackEdge = false; + BitVec m_liveIn; + BitVec m_liveOut; + BitVec m_ehLiveVars; + + friend class PromotionLivenessBitSetTraits; + +public: + PromotionLiveness(Compiler* compiler, AggregateInfo** aggregates) + : m_compiler(compiler), m_aggregates(aggregates) + { + } + + void Run(); + +private: + void MarkUseDef(GenTreeLclVarCommon* lcl, BitVec& useSet, BitVec& defSet); + void MarkIndex(unsigned index, bool isUse, bool isDef, BitVec& useSet, BitVec& defSet); + void ComputeUseDefSets(); + void InterBlockLiveness(); + bool PerBlockLiveness(BasicBlock* block); + void AddHandlerLiveVars(BasicBlock* block, BitVec& ehLiveVars); + void FillInLiveness(); + void FillInLiveness(BitVec& life, BitVec volatileVars, GenTreeLclVarCommon* lcl); +}; + class DecompositionStatementList; class DecompositionPlan; class ReplaceVisitor : public GenTreeVisitor { Promotion* m_prom; - jitstd::vector** m_replacements; + AggregateInfo** m_aggregates; + PromotionLiveness* m_liveness; bool m_madeChanges = false; public: @@ -122,8 +219,8 @@ class ReplaceVisitor : public GenTreeVisitor UseExecutionOrder = true, }; - ReplaceVisitor(Promotion* prom, jitstd::vector** replacements) - : GenTreeVisitor(prom->m_compiler), m_prom(prom), m_replacements(replacements) + ReplaceVisitor(Promotion* prom, AggregateInfo** aggregates, PromotionLiveness* liveness) + : GenTreeVisitor(prom->m_compiler), m_prom(prom), m_aggregates(aggregates), m_liveness(liveness) { } @@ -132,7 +229,12 @@ class ReplaceVisitor : public GenTreeVisitor return m_madeChanges; } - void Reset() + void StartBlock(BasicBlock* block) + { + m_liveness->StartBlock(block); + } + + void StartStatement(Statement* stmt) { m_madeChanges = false; } diff --git a/src/coreclr/jit/promotiondecomposition.cpp b/src/coreclr/jit/promotiondecomposition.cpp index 076e3f720d67a9..fc2091757e5097 100644 --- a/src/coreclr/jit/promotiondecomposition.cpp +++ b/src/coreclr/jit/promotiondecomposition.cpp @@ -32,278 +32,6 @@ class DecompositionStatementList } }; -// Represents significant segments of a struct operation. -// -// Essentially a segment tree (but not stored as a tree) that supports boolean -// Add/Subtract operations of segments. Used to compute the remainder after -// replacements have been handled as part of a decomposed block operation. -class StructSegments -{ -public: - struct Segment - { - unsigned Start = 0; - unsigned End = 0; - - Segment() - { - } - - Segment(unsigned start, unsigned end) : Start(start), End(end) - { - } - - bool IntersectsInclusive(const Segment& other) const - { - if (End < other.Start) - { - return false; - } - - if (other.End < Start) - { - return false; - } - - return true; - } - - bool Contains(const Segment& other) const - { - return other.Start >= Start && other.End <= End; - } - - void Merge(const Segment& other) - { - Start = min(Start, other.Start); - End = max(End, other.End); - } - }; - -private: - jitstd::vector m_segments; - -public: - StructSegments(CompAllocator allocator) : m_segments(allocator) - { - } - - //------------------------------------------------------------------------ - // Add: - // Add a segment to the data structure. - // - // Parameters: - // segment - The segment to add. - // - void Add(const Segment& segment) - { - size_t index = Promotion::BinarySearch(m_segments, segment.Start); - - if ((ssize_t)index < 0) - { - index = ~index; - } - - m_segments.insert(m_segments.begin() + index, segment); - size_t endIndex; - for (endIndex = index + 1; endIndex < m_segments.size(); endIndex++) - { - if (!m_segments[index].IntersectsInclusive(m_segments[endIndex])) - { - break; - } - - m_segments[index].Merge(m_segments[endIndex]); - } - - m_segments.erase(m_segments.begin() + index + 1, m_segments.begin() + endIndex); - } - - //------------------------------------------------------------------------ - // Subtract: - // Subtract a segment from the data structure. - // - // Parameters: - // segment - The segment to subtract. - // - void Subtract(const Segment& segment) - { - size_t index = Promotion::BinarySearch(m_segments, segment.Start); - if ((ssize_t)index < 0) - { - index = ~index; - } - else - { - // Start == segment[index].End, which makes it non-interesting. - index++; - } - - if (index >= m_segments.size()) - { - return; - } - - // Here we know Start < segment[index].End. Do they not intersect at all? - if (m_segments[index].Start >= segment.End) - { - // Does not intersect any segment. - return; - } - - assert(m_segments[index].IntersectsInclusive(segment)); - - if (m_segments[index].Contains(segment)) - { - if (segment.Start > m_segments[index].Start) - { - // New segment (existing.Start, segment.Start) - if (segment.End < m_segments[index].End) - { - m_segments.insert(m_segments.begin() + index, Segment(m_segments[index].Start, segment.Start)); - - // And new segment (segment.End, existing.End) - m_segments[index + 1].Start = segment.End; - return; - } - - m_segments[index].End = segment.Start; - return; - } - if (segment.End < m_segments[index].End) - { - // New segment (segment.End, existing.End) - m_segments[index].Start = segment.End; - return; - } - - // Full segment is being removed - m_segments.erase(m_segments.begin() + index); - return; - } - - if (segment.Start > m_segments[index].Start) - { - m_segments[index].End = segment.Start; - index++; - } - - size_t endIndex = Promotion::BinarySearch(m_segments, segment.End); - if ((ssize_t)endIndex >= 0) - { - m_segments.erase(m_segments.begin() + index, m_segments.begin() + endIndex + 1); - return; - } - - endIndex = ~endIndex; - if (endIndex == m_segments.size()) - { - m_segments.erase(m_segments.begin() + index, m_segments.end()); - return; - } - - if (segment.End > m_segments[endIndex].Start) - { - m_segments[endIndex].Start = segment.End; - } - - m_segments.erase(m_segments.begin() + index, m_segments.begin() + endIndex); - } - - //------------------------------------------------------------------------ - // IsEmpty: - // Check if the segment tree is empty. - // - // Returns: - // True if so. - // - bool IsEmpty() - { - return m_segments.size() == 0; - } - - //------------------------------------------------------------------------ - // IsSingleSegment: - // Check if the segment tree contains only a single segment, and return - // it if so. - // - // Parameters: - // result - [out] The single segment. Only valid if the method returns true. - // - // Returns: - // True if so. - // - bool IsSingleSegment(Segment* result) - { - if (m_segments.size() == 1) - { - *result = m_segments[0]; - return true; - } - - return false; - } - -#ifdef DEBUG - //------------------------------------------------------------------------ - // Check: - // Validate that the data structure is normalized and that it equals a - // specific fixed bit vector. - // - // Parameters: - // vect - The bit vector - // - // Remarks: - // This validates that the internal representation is normalized (i.e. - // all adjacent intervals are merged) and that it contains an index iff - // the specified vector contains that index. - // - void Check(FixedBitVect* vect) - { - bool first = true; - unsigned last = 0; - for (const Segment& segment : m_segments) - { - assert(first || (last < segment.Start)); - assert(segment.End <= vect->bitVectGetSize()); - - for (unsigned i = last; i < segment.Start; i++) - assert(!vect->bitVectTest(i)); - - for (unsigned i = segment.Start; i < segment.End; i++) - assert(vect->bitVectTest(i)); - - first = false; - last = segment.End; - } - - for (unsigned i = last, size = vect->bitVectGetSize(); i < size; i++) - assert(!vect->bitVectTest(i)); - } - - //------------------------------------------------------------------------ - // Dump: - // Dump a string representation of the segment tree to stdout. - // - void Dump() - { - if (m_segments.size() == 0) - { - printf(""); - } - else - { - const char* sep = ""; - for (const Segment& segment : m_segments) - { - printf("%s[%03u..%03u)", sep, segment.Start, segment.End); - sep = " "; - } - } - } -#endif -}; - // Represents a plan for decomposing a block operation into direct treatment of // replacement fields and the remainder. class DecompositionPlan @@ -515,86 +243,11 @@ class DecompositionPlan StructSegments ComputeRemainder() { ClassLayout* dstLayout = m_dst->GetLayout(m_compiler); - - COMP_HANDLE compHnd = m_compiler->info.compCompHnd; - - bool significantPadding; - if (dstLayout->IsBlockLayout()) - { - significantPadding = true; - JITDUMP(" Block op has significant padding due to block layout\n"); - } - else - { - uint32_t attribs = compHnd->getClassAttribs(dstLayout->GetClassHandle()); - if ((attribs & CORINFO_FLG_INDEXABLE_FIELDS) != 0) - { - significantPadding = true; - JITDUMP(" Block op has significant padding due to indexable fields\n"); - } - else if ((attribs & CORINFO_FLG_DONT_DIG_FIELDS) != 0) - { - significantPadding = true; - JITDUMP(" Block op has significant padding due to CORINFO_FLG_DONT_DIG_FIELDS\n"); - } - else if (((attribs & CORINFO_FLG_CUSTOMLAYOUT) != 0) && ((attribs & CORINFO_FLG_CONTAINS_GC_PTR) == 0)) - { - significantPadding = true; - JITDUMP(" Block op has significant padding due to CUSTOMLAYOUT without GC pointers\n"); - } - else - { - significantPadding = false; - } - } - - StructSegments segments(m_compiler->getAllocator(CMK_Promotion)); + StructSegments segments = Promotion::SignificantSegments(m_compiler, dstLayout); // Validate with "obviously correct" but less scalable fixed bit vector implementation. INDEBUG(FixedBitVect* segmentBitVect = FixedBitVect::bitVectInit(dstLayout->GetSize(), m_compiler)); - if (significantPadding) - { - segments.Add(StructSegments::Segment(0, dstLayout->GetSize())); - -#ifdef DEBUG - for (unsigned i = 0; i < dstLayout->GetSize(); i++) - segmentBitVect->bitVectSet(i); -#endif - } - else - { - unsigned numFields = compHnd->getClassNumInstanceFields(dstLayout->GetClassHandle()); - for (unsigned i = 0; i < numFields; i++) - { - CORINFO_FIELD_HANDLE fieldHnd = compHnd->getFieldInClass(dstLayout->GetClassHandle(), (int)i); - unsigned fldOffset = compHnd->getFieldOffset(fieldHnd); - CORINFO_CLASS_HANDLE fieldClassHandle; - CorInfoType corType = compHnd->getFieldType(fieldHnd, &fieldClassHandle); - var_types varType = JITtype2varType(corType); - unsigned size = genTypeSize(varType); - if (size == 0) - { - // TODO-CQ: Recursively handle padding in sub structures - // here. Might be better to introduce a single JIT-EE call - // to query the significant segments -- that would also be - // usable by R2R even outside the version bubble in many - // cases. - size = compHnd->getClassSize(fieldClassHandle); - assert(size != 0); - } - - segments.Add(StructSegments::Segment(fldOffset, fldOffset + size)); -#ifdef DEBUG - for (unsigned i = 0; i < size; i++) - segmentBitVect->bitVectSet(fldOffset + i); -#endif - } - } - - // TODO-TP: Cache above StructSegments per class layout and just clone - // it there before the following subtract operations. - for (int i = 0; i < m_entries.Height(); i++) { const Entry& entry = m_entries.BottomRef(i); @@ -1356,61 +1009,15 @@ bool ReplaceVisitor::OverlappingReplacements(GenTreeLclVarCommon* lcl, Replacement** firstReplacement, Replacement** endReplacement) { - if (m_replacements[lcl->GetLclNum()] == nullptr) + AggregateInfo* agg = m_aggregates[lcl->GetLclNum()]; + if (agg == nullptr) { return false; } - jitstd::vector& replacements = *m_replacements[lcl->GetLclNum()]; - - unsigned offs = lcl->GetLclOffs(); - unsigned size = lcl->GetLayout(m_compiler)->GetSize(); - size_t firstIndex = Promotion::BinarySearch(replacements, offs); - if ((ssize_t)firstIndex < 0) - { - firstIndex = ~firstIndex; - if (firstIndex > 0) - { - Replacement& lastRepBefore = replacements[firstIndex - 1]; - if ((lastRepBefore.Offset + genTypeSize(lastRepBefore.AccessType)) > offs) - { - // Overlap with last entry starting before offs. - firstIndex--; - } - else if (firstIndex >= replacements.size()) - { - // Starts after last replacement ends. - return false; - } - } - - const Replacement& first = replacements[firstIndex]; - if (first.Offset >= (offs + size)) - { - // First candidate starts after this ends. - return false; - } - } - - assert((firstIndex < replacements.size()) && replacements[firstIndex].Overlaps(offs, size)); - *firstReplacement = &replacements[firstIndex]; - - if (endReplacement != nullptr) - { - size_t lastIndex = Promotion::BinarySearch(replacements, offs + size); - if ((ssize_t)lastIndex < 0) - { - lastIndex = ~lastIndex; - } - - // Since we verified above that there is an overlapping replacement - // we know that lastIndex exists and is the next one that does not - // overlap. - assert(lastIndex > 0); - *endReplacement = replacements.data() + lastIndex; - } - - return true; + unsigned offs = lcl->GetLclOffs(); + unsigned size = lcl->GetLayout(m_compiler)->GetSize(); + return agg->OverlappingReplacements(offs, size, firstReplacement, endReplacement); } //------------------------------------------------------------------------ diff --git a/src/coreclr/jit/promotionliveness.cpp b/src/coreclr/jit/promotionliveness.cpp new file mode 100644 index 00000000000000..f4300f6da05f20 --- /dev/null +++ b/src/coreclr/jit/promotionliveness.cpp @@ -0,0 +1,433 @@ +#include "jitpch.h" +#include "promotion.h" + +struct BasicBlockLiveness +{ + BitVec VarUse; + BitVec VarDef; + BitVec LiveIn; + BitVec LiveOut; +}; + +void PromotionLiveness::Run() +{ + m_structLclToTrackedIndex = new (m_compiler, CMK_Promotion) unsigned[m_compiler->lvaTableCnt] {}; + unsigned trackedIndex = 0; + for (unsigned lclNum = 0; lclNum < m_compiler->lvaTableCnt; lclNum++) + { + AggregateInfo* agg = m_aggregates[lclNum]; + if (agg == nullptr) + { + continue; + } + + m_structLclToTrackedIndex[lclNum] = trackedIndex; + // Remainder. + // TODO-TP: We can avoid allocating an index for this when agg->UnpromotedMin == agg->UnpromotedMax, + // but it makes liveness use/def marking less uniform. + trackedIndex++; + // Fields + trackedIndex += (unsigned)agg->Replacements.size(); + } + + m_bvTraits = new (m_compiler, CMK_Promotion) BitVecTraits(trackedIndex, m_compiler); + + m_bbInfo = m_compiler->fgAllocateTypeForEachBlk(CMK_Promotion); + for (BasicBlock* block : m_compiler->Blocks()) + { + BasicBlockLiveness& bb = m_bbInfo[block->bbNum]; + BitVecOps::AssignNoCopy(m_bvTraits, bb.VarUse, BitVecOps::MakeEmpty(m_bvTraits)); + BitVecOps::AssignNoCopy(m_bvTraits, bb.VarDef, BitVecOps::MakeEmpty(m_bvTraits)); + BitVecOps::AssignNoCopy(m_bvTraits, bb.LiveIn, BitVecOps::MakeEmpty(m_bvTraits)); + BitVecOps::AssignNoCopy(m_bvTraits, bb.LiveOut, BitVecOps::MakeEmpty(m_bvTraits)); + } + + while (true) + { + ComputeUseDefSets(); + + InterBlockLiveness(); + + FillInLiveness(); + } +} + +void PromotionLiveness::ComputeUseDefSets() +{ + for (BasicBlock* block : m_compiler->Blocks()) + { + BasicBlockLiveness& bb = m_bbInfo[block->bbNum]; + BitVecOps::ClearD(m_bvTraits, bb.VarUse); + BitVecOps::ClearD(m_bvTraits, bb.VarDef); + BitVecOps::ClearD(m_bvTraits, bb.LiveOut); + + for (Statement* stmt : block->Statements()) + { + for (GenTreeLclVarCommon* lcl : stmt->LocalsTreeList()) + { + MarkUseDef(lcl, bb.VarUse, bb.VarDef); + } + } + } +} + +static bool Intersects(unsigned segment1Start, unsigned segment1End, unsigned segment2Start, unsigned segment2End) +{ + if (segment1End <= segment2Start) + { + return false; + } + + if (segment2End <= segment1Start) + { + return false; + } + + return true; +} + +void PromotionLiveness::MarkUseDef(GenTreeLclVarCommon* lcl, BitSetShortLongRep& useSet, BitSetShortLongRep& defSet) +{ + AggregateInfo* agg = m_aggregates[lcl->GetLclNum()]; + if (agg == nullptr) + { + return; + } + + jitstd::vector& reps = agg->Replacements; + bool isUse = (lcl->gtFlags & GTF_VAR_DEF) == 0; + bool isDef = !isUse; + + unsigned baseIndex = m_structLclToTrackedIndex[lcl->GetLclNum()]; + unsigned offs = lcl->GetLclOffs(); + var_types accessType = lcl->TypeGet(); + size_t index = Promotion::BinarySearch(reps, offs); + + if (accessType == TYP_STRUCT) + { + unsigned size; + if (lcl->OperIs(GT_LCL_ADDR)) + { + size = m_compiler->lvaGetDesc(lcl->GetLclNum())->lvExactSize() - offs; + } + else + { + size = lcl->GetLayout(m_compiler)->GetSize(); + } + + if ((ssize_t)index < 0) + { + index = ~index; + if ((index > 0) && reps[index - 1].Overlaps(offs, size)) + { + index--; + } + } + + unsigned lastEnd = offs; + bool usesRemainder = false; + while ((index < reps.size()) && (reps[index].Offset < offs + size)) + { + Replacement& rep = reps[index]; + bool isFullFieldDef = isDef && (offs <= rep.Offset) && (offs + size >= rep.Offset + genTypeSize(rep.AccessType)); + MarkIndex(baseIndex + (unsigned)index, isUse, isFullFieldDef, useSet, defSet); + + // Check if [lastEnd..rep.Offset) intersects with [UnpromotedMin..UnpromotedMax) to determine if this is a use of the remainder. + if (isUse && (rep.Offset > lastEnd)) + { + // TODO-CQ: This doesn't take padding inside the struct into account. + // We can compute a cumulative indicator that indicates from + // replacement to replacement whether there is any remainder + // between them. + usesRemainder |= Intersects(lastEnd, rep.Offset, agg->UnpromotedMin, agg->UnpromotedMax); + } + } + + if (isUse && (lastEnd < offs + size)) + { + usesRemainder |= Intersects(lastEnd, offs + size, agg->UnpromotedMin, agg->UnpromotedMax); + } + + bool isFullDefOfRemainder = isDef && (agg->UnpromotedMin >= offs) && (agg->UnpromotedMax <= (offs + size)); + bool isUseOfRemainder = isUse && usesRemainder; + MarkIndex(baseIndex, isUseOfRemainder, isFullDefOfRemainder, useSet, defSet); + } + else + { + if ((ssize_t)index < 0) + { + unsigned size = genTypeSize(accessType); + bool isFullDefOfRemainder = isDef && (agg->UnpromotedMin >= offs) && (agg->UnpromotedMax <= (offs + size)); + bool isUseOfRemainder = isUse && Intersects(agg->UnpromotedMin, agg->UnpromotedMax, offs, offs + size); + MarkIndex(baseIndex, isUseOfRemainder, isFullDefOfRemainder, useSet, defSet); + } + else + { + // Accessing element. + MarkIndex(baseIndex + (unsigned)index, isUse, isDef, useSet, defSet); + } + } +} + +void PromotionLiveness::MarkIndex(unsigned index, bool isUse, bool isDef, BitVec& useSet, BitVec& defSet) +{ + if (isUse && !BitVecOps::IsMember(m_bvTraits, defSet, index)) + { + BitVecOps::AddElemD(m_bvTraits, useSet, index); + } + + if (isDef) + { + BitVecOps::AddElemD(m_bvTraits, defSet, index); + } +} + +void PromotionLiveness::InterBlockLiveness() +{ + bool changed; + do + { + changed = false; + + for (BasicBlock* block = m_compiler->fgLastBB; block != nullptr; block = block->bbPrev) + { + m_hasPossibleBackEdge |= block->bbNext && (block->bbNext->bbNum <= block->bbNum); + changed |= PerBlockLiveness(block); + } + + if (!m_hasPossibleBackEdge) + { + break; + } + } while (changed); +} + +bool PromotionLiveness::PerBlockLiveness(BasicBlock* block) +{ + // We disable promotion for GT_JMP methods. + assert(!block->endsWithJmpMethod(m_compiler)); + + BasicBlockLiveness& bbInfo = m_bbInfo[block->bbNum]; + BitVecOps::ClearD(m_bvTraits, bbInfo.LiveOut); + for (BasicBlock* succ : block->GetAllSuccs(m_compiler)) + { + BitVecOps::UnionD(m_bvTraits, bbInfo.LiveOut, m_bbInfo[succ->bbNum].LiveIn); + m_hasPossibleBackEdge |= succ->bbNum <= block->bbNum; + } + + BitVecOps::LivenessD(m_bvTraits, m_liveIn, bbInfo.VarDef, bbInfo.VarUse, bbInfo.LiveOut); + + if (m_compiler->ehBlockHasExnFlowDsc(block)) + { + BitVecOps::ClearD(m_bvTraits, m_ehLiveVars); + AddHandlerLiveVars(block, m_ehLiveVars); + BitVecOps::UnionD(m_bvTraits, m_liveIn, m_ehLiveVars); + BitVecOps::UnionD(m_bvTraits, bbInfo.LiveOut, m_ehLiveVars); + m_hasPossibleBackEdge = true; + } + + bool liveInChanged = BitVecOps::Equal(m_bvTraits, bbInfo.LiveIn, m_liveIn); + + if (liveInChanged) + { + BitVecOps::Assign(m_bvTraits, bbInfo.LiveIn, m_liveIn); + } + + return liveInChanged; +} + +void PromotionLiveness::AddHandlerLiveVars(BasicBlock* block, BitVec& ehLiveVars) +{ + assert(m_compiler->ehBlockHasExnFlowDsc(block)); + EHblkDsc* HBtab = m_compiler->ehGetBlockExnFlowDsc(block); + + do + { + /* Either we enter the filter first or the catch/finally */ + if (HBtab->HasFilter()) + { + BitVecOps::UnionD(m_bvTraits, ehLiveVars, m_bbInfo[HBtab->ebdFilter->bbNum].LiveIn); +#if defined(FEATURE_EH_FUNCLETS) + // The EH subsystem can trigger a stack walk after the filter + // has returned, but before invoking the handler, and the only + // IP address reported from this method will be the original + // faulting instruction, thus everything in the try body + // must report as live any variables live-out of the filter + // (which is the same as those live-in to the handler) + BitVecOps::UnionD(m_bvTraits, ehLiveVars, m_bbInfo[HBtab->ebdHndBeg->bbNum].LiveIn); +#endif // FEATURE_EH_FUNCLETS + } + else + { + BitVecOps::UnionD(m_bvTraits, ehLiveVars, m_bbInfo[HBtab->ebdHndBeg->bbNum].LiveIn); + } + + /* If we have nested try's edbEnclosing will provide them */ + assert((HBtab->ebdEnclosingTryIndex == EHblkDsc::NO_ENCLOSING_INDEX) || + (HBtab->ebdEnclosingTryIndex > m_compiler->ehGetIndex(HBtab))); + + unsigned outerIndex = HBtab->ebdEnclosingTryIndex; + if (outerIndex == EHblkDsc::NO_ENCLOSING_INDEX) + { + break; + } + HBtab = m_compiler->ehGetDsc(outerIndex); + + } while (true); + + // If this block is within a filter, we also need to report as live + // any vars live into enclosed finally or fault handlers, since the + // filter will run during the first EH pass, and enclosed or enclosing + // handlers will run during the second EH pass. So all these handlers + // are "exception flow" successors of the filter. + // + // Note we are relying on ehBlockHasExnFlowDsc to return true + // for any filter block that we should examine here. + if (block->hasHndIndex()) + { + const unsigned thisHndIndex = block->getHndIndex(); + EHblkDsc* enclosingHBtab = m_compiler->ehGetDsc(thisHndIndex); + + if (enclosingHBtab->InFilterRegionBBRange(block)) + { + assert(enclosingHBtab->HasFilter()); + + // Search the EH table for enclosed regions. + // + // All the enclosed regions will be lower numbered and + // immediately prior to and contiguous with the enclosing + // region in the EH tab. + unsigned index = thisHndIndex; + + while (index > 0) + { + index--; + unsigned enclosingIndex = m_compiler->ehGetEnclosingTryIndex(index); + bool isEnclosed = false; + + // To verify this is an enclosed region, search up + // through the enclosing regions until we find the + // region associated with the filter. + while (enclosingIndex != EHblkDsc::NO_ENCLOSING_INDEX) + { + if (enclosingIndex == thisHndIndex) + { + isEnclosed = true; + break; + } + + enclosingIndex = m_compiler->ehGetEnclosingTryIndex(enclosingIndex); + } + + // If we found an enclosed region, check if the region + // is a try fault or try finally, and if so, add any + // locals live into the enclosed region's handler into this + // block's live-in set. + if (isEnclosed) + { + EHblkDsc* enclosedHBtab = m_compiler->ehGetDsc(index); + + if (enclosedHBtab->HasFinallyOrFaultHandler()) + { + BitVecOps::UnionD(m_bvTraits, ehLiveVars, m_bbInfo[enclosedHBtab->ebdHndBeg->bbNum].LiveIn); + } + } + // Once we run across a non-enclosed region, we can stop searching. + else + { + break; + } + } + } + } +} + +void PromotionLiveness::FillInLiveness() +{ + BitVec life(BitVecOps::MakeEmpty(m_bvTraits)); + BitVec volatileVars(BitVecOps::MakeEmpty(m_bvTraits)); + + for (BasicBlock* block : m_compiler->Blocks()) + { + BasicBlockLiveness& bbInfo = m_bbInfo[block->bbNum]; + + BitVecOps::ClearD(m_bvTraits, volatileVars); + + if (m_compiler->ehBlockHasExnFlowDsc(block)) + { + AddHandlerLiveVars(block, volatileVars); + } + + BitVecOps::Assign(m_bvTraits, life, bbInfo.LiveOut); + + Statement* firstStmt = block->firstStmt(); + + if (firstStmt == nullptr) + { + continue; + } + + Statement* stmt = block->lastStmt(); + + while (true) + { + Statement* prevStmt = stmt->GetPrevStmt(); + + for (GenTree* cur = stmt->GetTreeListEnd(); cur != nullptr; cur = cur->gtPrev) + { + FillInLiveness(life, volatileVars, cur->AsLclVarCommon()); + } + + if (stmt == firstStmt) + { + break; + } + + stmt = prevStmt; + } + } +} + +void PromotionLiveness::FillInLiveness(BitVec& life, BitVec volatileVars, GenTreeLclVarCommon* lcl) +{ + AggregateInfo* agg = m_aggregates[lcl->GetLclNum()]; + if (agg == nullptr) + { + return; + } + + bool isUse = (lcl->gtFlags & GTF_VAR_DEF) == 0; + bool isDef = !isUse; + + unsigned baseIndex = m_structLclToTrackedIndex[lcl->GetLclNum()]; + unsigned offs = lcl->GetLclOffs(); + var_types accessType = lcl->TypeGet(); + size_t index = Promotion::BinarySearch(agg->Replacements, offs); + + if (accessType == TYP_STRUCT) + { + if (!BitVecOps::IsMember(m_bvTraits, volatileVars, baseIndex)) + { + unsigned size; + if (lcl->OperIs(GT_LCL_ADDR)) + { + size = m_compiler->lvaGetDesc(lcl)->lvExactSize(); + } + else + { + size = lcl->GetLayout(m_compiler)->GetSize(); + } + // TODO-TP: GTF_VAR_DEATH annotation here currently means the remainder. We need + // a side table to attach field liveness information to. + bool isFullDefOfRemainder = isDef && (agg->UnpromotedMin >= offs) && (agg->UnpromotedMax <= (offs + size)); + if (isFullDefOfRemainder) + { + BitVecOps::RemoveElemD(m_bvTraits, life, baseIndex); + } + } + } + else + { + + } +} From e7c2805a5b437872ea61b6916f4b46bce9e45ca4 Mon Sep 17 00:00:00 2001 From: Jakob Botsch Nielsen Date: Tue, 9 May 2023 22:13:26 +0200 Subject: [PATCH 02/16] Compiler fixes --- src/coreclr/jit/promotion.cpp | 59 +++++++++++++++++++++++++++++++++++ src/coreclr/jit/promotion.h | 14 ++++----- 2 files changed, 66 insertions(+), 7 deletions(-) diff --git a/src/coreclr/jit/promotion.cpp b/src/coreclr/jit/promotion.cpp index 5c39f202bfd61b..ed16c19c29ea74 100644 --- a/src/coreclr/jit/promotion.cpp +++ b/src/coreclr/jit/promotion.cpp @@ -863,6 +863,65 @@ bool StructSegments::CoveringSegment(Segment* result) return true; } +#ifdef DEBUG +//------------------------------------------------------------------------ +// Check: +// Validate that the data structure is normalized and that it equals a +// specific fixed bit vector. +// +// Parameters: +// vect - The bit vector +// +// Remarks: +// This validates that the internal representation is normalized (i.e. +// all adjacent intervals are merged) and that it contains an index iff +// the specified vector contains that index. +// +void StructSegments::Check(FixedBitVect* vect) +{ + bool first = true; + unsigned last = 0; + for (const Segment& segment : m_segments) + { + assert(first || (last < segment.Start)); + assert(segment.End <= vect->bitVectGetSize()); + + for (unsigned i = last; i < segment.Start; i++) + assert(!vect->bitVectTest(i)); + + for (unsigned i = segment.Start; i < segment.End; i++) + assert(vect->bitVectTest(i)); + + first = false; + last = segment.End; + } + + for (unsigned i = last, size = vect->bitVectGetSize(); i < size; i++) + assert(!vect->bitVectTest(i)); +} + +//------------------------------------------------------------------------ +// Dump: +// Dump a string representation of the segment tree to stdout. +// +void StructSegments::Dump() +{ + if (m_segments.size() == 0) + { + printf(""); + } + else + { + const char* sep = ""; + for (const Segment& segment : m_segments) + { + printf("%s[%03u..%03u)", sep, segment.Start, segment.End); + sep = " "; + } + } +} +#endif + StructSegments Promotion::SignificantSegments(Compiler* compiler, ClassLayout* layout) { COMP_HANDLE compHnd = compiler->info.compCompHnd; diff --git a/src/coreclr/jit/promotion.h b/src/coreclr/jit/promotion.h index ca29e0d7199a64..d8eeea251072ee 100644 --- a/src/coreclr/jit/promotion.h +++ b/src/coreclr/jit/promotion.h @@ -76,6 +76,11 @@ class StructSegments bool IsEmpty(); bool IsSingleSegment(Segment* result); bool CoveringSegment(Segment* result); + +#ifdef DEBUG + void Check(FixedBitVect* vect); + void Dump(); +#endif }; // Represents information about an aggregate that now has replacements in it. @@ -100,7 +105,7 @@ class Promotion friend class LocalUses; friend class LocalsUseVisitor; - friend class AggregateInfo; + friend struct AggregateInfo; friend class PromotionLiveness; friend class ReplaceVisitor; friend class DecompositionPlan; @@ -229,12 +234,7 @@ class ReplaceVisitor : public GenTreeVisitor return m_madeChanges; } - void StartBlock(BasicBlock* block) - { - m_liveness->StartBlock(block); - } - - void StartStatement(Statement* stmt) + void Reset() { m_madeChanges = false; } From 92d28a68727fcf6981acbc519af94a8baba22398 Mon Sep 17 00:00:00 2001 From: Jakob Botsch Nielsen Date: Wed, 10 May 2023 11:07:18 +0200 Subject: [PATCH 03/16] Fix build --- src/coreclr/jit/promotion.cpp | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/coreclr/jit/promotion.cpp b/src/coreclr/jit/promotion.cpp index ed16c19c29ea74..2fb481903ee754 100644 --- a/src/coreclr/jit/promotion.cpp +++ b/src/coreclr/jit/promotion.cpp @@ -285,7 +285,7 @@ class LocalUses JITDUMP("Picking promotions for V%02u\n", lclNum); - assert(*replacements == nullptr); + assert(*aggregateInfo == nullptr); for (size_t i = 0; i < m_accesses.size(); i++) { const Access& access = m_accesses[i]; @@ -1473,11 +1473,10 @@ PhaseStatus Promotion::Run() ReplaceVisitor replacer(this, aggregates, &liveness); for (BasicBlock* bb : m_compiler->Blocks()) { - replacer.StartBlock(bb); for (Statement* stmt : bb->Statements()) { DISPSTMT(stmt); - replacer.StartStatement(stmt); + replacer.Reset(); replacer.WalkTree(stmt->GetRootNodePointer(), nullptr); if (replacer.MadeChanges()) From dbd223d566b7362888e0d5a7b3e755b66ed65ce1 Mon Sep 17 00:00:00 2001 From: Jakob Botsch Nielsen Date: Wed, 10 May 2023 15:40:56 +0200 Subject: [PATCH 04/16] Stretch 2 --- src/coreclr/jit/promotion.cpp | 21 +- src/coreclr/jit/promotion.h | 31 ++- src/coreclr/jit/promotiondecomposition.cpp | 113 ++++++-- src/coreclr/jit/promotionliveness.cpp | 296 +++++++++++++++------ 4 files changed, 341 insertions(+), 120 deletions(-) diff --git a/src/coreclr/jit/promotion.cpp b/src/coreclr/jit/promotion.cpp index 2fb481903ee754..b2acfa73d77451 100644 --- a/src/coreclr/jit/promotion.cpp +++ b/src/coreclr/jit/promotion.cpp @@ -1495,18 +1495,27 @@ PhaseStatus Promotion::Run() continue; } - for (Replacement& rep : aggregates[i]->Replacements) + for (size_t j = 0; j < aggregates[i]->Replacements.size(); j++) { + Replacement& rep = aggregates[i]->Replacements[j]; assert(!rep.NeedsReadBack || !rep.NeedsWriteBack); if (rep.NeedsReadBack) { - JITDUMP("Reading back replacement V%02u.[%03u..%03u) -> V%02u near the end of " FMT_BB ":\n", i, + if (!liveness.IsReplacementLiveOut(bb, i, (unsigned)j)) + { + JITDUMP("Skipping reading back dead replacement V%02u.[%03u..%03u) -> V%02u near the end of " FMT_BB "\n", i, + rep.Offset, rep.Offset + genTypeSize(rep.AccessType), rep.LclNum, bb->bbNum); + } + else + { + JITDUMP("Reading back replacement V%02u.[%03u..%03u) -> V%02u near the end of " FMT_BB ":\n", i, rep.Offset, rep.Offset + genTypeSize(rep.AccessType), rep.LclNum, bb->bbNum); - GenTree* readBack = CreateReadBack(m_compiler, i, rep); - Statement* stmt = m_compiler->fgNewStmtFromTree(readBack); - DISPSTMT(stmt); - m_compiler->fgInsertStmtNearEnd(bb, stmt); + GenTree* readBack = CreateReadBack(m_compiler, i, rep); + Statement* stmt = m_compiler->fgNewStmtFromTree(readBack); + DISPSTMT(stmt); + m_compiler->fgInsertStmtNearEnd(bb, stmt); + } rep.NeedsReadBack = false; } diff --git a/src/coreclr/jit/promotion.h b/src/coreclr/jit/promotion.h index d8eeea251072ee..64de7f8a895a6d 100644 --- a/src/coreclr/jit/promotion.h +++ b/src/coreclr/jit/promotion.h @@ -174,6 +174,30 @@ class Promotion struct BasicBlockLiveness; +class StructUseDeaths +{ + BitVec m_deaths; + unsigned m_numFields; + + friend class PromotionLiveness; + +private: + StructUseDeaths(BitVec deaths, unsigned numFields) + : m_deaths(deaths) + , m_numFields(numFields) + { + } + +public: + StructUseDeaths() + : m_numFields(0) + { + } + + bool IsRemainderDying() const; + bool IsReplacementDying(unsigned index) const; +}; + class PromotionLiveness { Compiler* m_compiler; @@ -183,19 +207,20 @@ class PromotionLiveness BasicBlockLiveness* m_bbInfo = nullptr; bool m_hasPossibleBackEdge = false; BitVec m_liveIn; - BitVec m_liveOut; BitVec m_ehLiveVars; + JitHashTable, BitVec> m_aggDeaths; friend class PromotionLivenessBitSetTraits; public: PromotionLiveness(Compiler* compiler, AggregateInfo** aggregates) - : m_compiler(compiler), m_aggregates(aggregates) + : m_compiler(compiler), m_aggregates(aggregates), m_aggDeaths(compiler->getAllocator(CMK_Promotion)) { } void Run(); - + bool IsReplacementLiveOut(BasicBlock* bb, unsigned structLcl, unsigned replacement); + StructUseDeaths GetDeathsForStructLocal(GenTreeLclVarCommon* use); private: void MarkUseDef(GenTreeLclVarCommon* lcl, BitVec& useSet, BitVec& defSet); void MarkIndex(unsigned index, bool isUse, bool isDef, BitVec& useSet, BitVec& defSet); diff --git a/src/coreclr/jit/promotiondecomposition.cpp b/src/coreclr/jit/promotiondecomposition.cpp index fc2091757e5097..99eb5b4dcf940d 100644 --- a/src/coreclr/jit/promotiondecomposition.cpp +++ b/src/coreclr/jit/promotiondecomposition.cpp @@ -47,18 +47,23 @@ class DecompositionPlan }; Compiler* m_compiler; - ArrayStack m_entries; + AggregateInfo** m_aggregates; + PromotionLiveness* m_liveness; GenTree* m_dst; GenTree* m_src; + bool m_dstInvolvesReplacements; bool m_srcInvolvesReplacements; + ArrayStack m_entries; public: - DecompositionPlan(Compiler* comp, GenTree* dst, GenTree* src, bool srcInvolvesReplacements) + DecompositionPlan(Compiler* comp, AggregateInfo** aggregates, PromotionLiveness* liveness, GenTree* dst, GenTree* src, bool dstInvolvesReplacements, bool srcInvolvesReplacements) : m_compiler(comp) - , m_entries(comp->getAllocator(CMK_Promotion)) + , m_aggregates(aggregates) + , m_liveness(liveness) , m_dst(dst) , m_src(src) , m_srcInvolvesReplacements(srcInvolvesReplacements) + , m_entries(comp->getAllocator(CMK_Promotion)) { } @@ -306,12 +311,18 @@ class DecompositionPlan // the rest of the remainder); or by handling a specific 'hole' as a // primitive. // - RemainderStrategy DetermineRemainderStrategy() + RemainderStrategy DetermineRemainderStrategy(const StructUseDeaths& dstDeaths) { + if (m_dstInvolvesReplacements && dstDeaths.IsRemainderDying()) + { + JITDUMP(" => Remainder strategy: do nothing (remainder dying)\n"); + return RemainderStrategy(RemainderStrategy::NoRemainder); + } + StructSegments remainder = ComputeRemainder(); if (remainder.IsEmpty()) { - JITDUMP(" => Remainder strategy: do nothing\n"); + JITDUMP(" => Remainder strategy: do nothing (no remainder)\n"); return RemainderStrategy(RemainderStrategy::NoRemainder); } @@ -379,20 +390,32 @@ class DecompositionPlan { GenTree* cns = m_src->OperIsInitVal() ? m_src->gtGetOp1() : m_src; uint8_t initPattern = GetInitPattern(); + StructUseDeaths deaths = m_liveness->GetDeathsForStructLocal(m_dst->AsLclVarCommon()); + + AggregateInfo* agg = m_aggregates[m_dst->AsLclVarCommon()->GetLclNum()]; + assert((agg != nullptr) && (agg->Replacements.size() > 0)); + Replacement* firstRep = agg->Replacements.data(); for (int i = 0; i < m_entries.Height(); i++) { const Entry& entry = m_entries.BottomRef(i); assert((entry.ToLclNum != BAD_VAR_NUM) && (entry.ToReplacement != nullptr)); - GenTree* src = m_compiler->gtNewConWithPattern(entry.Type, initPattern); - GenTree* dst = m_compiler->gtNewLclvNode(entry.ToLclNum, entry.Type); - statements->AddStatement(m_compiler->gtNewAssignNode(dst, src)); + assert((entry.ToReplacement >= firstRep) && (entry.ToReplacement < firstRep + agg->Replacements.size())); + size_t replacementIndex = entry.ToReplacement - firstRep; + + if (!deaths.IsReplacementDying((unsigned)replacementIndex)) + { + GenTree* src = m_compiler->gtNewConWithPattern(entry.Type, initPattern); + GenTree* dst = m_compiler->gtNewLclvNode(entry.ToLclNum, entry.Type); + statements->AddStatement(m_compiler->gtNewAssignNode(dst, src)); + } + entry.ToReplacement->NeedsWriteBack = true; entry.ToReplacement->NeedsReadBack = false; } - RemainderStrategy remainderStrategy = DetermineRemainderStrategy(); + RemainderStrategy remainderStrategy = DetermineRemainderStrategy(deaths); if (remainderStrategy.Type == RemainderStrategy::FullBlock) { GenTree* asg = m_compiler->gtNewBlkOpNode(m_dst, cns); @@ -400,10 +423,10 @@ class DecompositionPlan } else if (remainderStrategy.Type == RemainderStrategy::Primitive) { - GenTree* src = m_compiler->gtNewConWithPattern(remainderStrategy.PrimitiveType, initPattern); + GenTree* src = m_compiler->gtNewConWithPattern(remainderStrategy.PrimitiveType, initPattern); GenTreeLclVarCommon* dstLcl = m_dst->AsLclVarCommon(); - GenTree* dst = m_compiler->gtNewLclFldNode(dstLcl->GetLclNum(), remainderStrategy.PrimitiveType, - dstLcl->GetLclOffs() + remainderStrategy.PrimitiveOffset); + GenTree* dst = m_compiler->gtNewLclFldNode(dstLcl->GetLclNum(), remainderStrategy.PrimitiveType, + dstLcl->GetLclOffs() + remainderStrategy.PrimitiveOffset); m_compiler->lvaSetVarDoNotEnregister(dstLcl->GetLclNum() DEBUGARG(DoNotEnregisterReason::LocalField)); statements->AddStatement(m_compiler->gtNewAssignNode(dst, src)); } @@ -420,7 +443,13 @@ class DecompositionPlan { assert(m_dst->OperIs(GT_LCL_VAR, GT_LCL_FLD, GT_BLK) && m_src->OperIs(GT_LCL_VAR, GT_LCL_FLD, GT_BLK)); - RemainderStrategy remainderStrategy = DetermineRemainderStrategy(); + StructUseDeaths dstDeaths; + if (m_dstInvolvesReplacements) + { + dstDeaths = m_liveness->GetDeathsForStructLocal(m_dst->AsLclVarCommon()); + } + + RemainderStrategy remainderStrategy = DetermineRemainderStrategy(dstDeaths); // If the remainder is a full block and is going to incur write barrier // then avoid incurring multiple write barriers for each source @@ -509,7 +538,7 @@ class DecompositionPlan { for (int i = 0; i < m_entries.Height(); i++) { - if (!IsHandledByRemainder(m_entries.BottomRef(i), remainderStrategy)) + if (!CanSkipEntry(m_entries.BottomRef(i), dstDeaths, remainderStrategy)) { numAddrUses++; } @@ -532,7 +561,7 @@ class DecompositionPlan // See if our first indirection will subsume the null check (usual case). for (int i = 0; i < m_entries.Height(); i++) { - if (IsHandledByRemainder(m_entries.BottomRef(i), remainderStrategy)) + if (CanSkipEntry(m_entries.BottomRef(i), dstDeaths, remainderStrategy)) { continue; } @@ -641,12 +670,19 @@ class DecompositionPlan { const Entry& entry = m_entries.BottomRef(i); - if (IsHandledByRemainder(entry, remainderStrategy)) + if (CanSkipEntry(entry, dstDeaths, remainderStrategy)) { - assert(entry.FromReplacement != nullptr); - JITDUMP(" Skipping dst+%03u <- V%02u (%s); it is up-to-date in its struct local and will be handled " + if (entry.FromReplacement != nullptr) + { + JITDUMP(" Skipping dst+%03u <- V%02u (%s); it is up-to-date in its struct local and will be handled " "as part of the remainder\n", entry.Offset, entry.FromReplacement->LclNum, entry.FromReplacement->Description); + } + else if (entry.ToReplacement != nullptr) + { + JITDUMP(" Skipping def of V%02u (%s); it is dying", entry.ToReplacement->LclNum, entry.ToReplacement->Description); + } + continue; } @@ -751,20 +787,41 @@ class DecompositionPlan } //------------------------------------------------------------------------ - // IsHandledByRemainder: - // Check if the specified entry is redundant because the remainder would - // handle it anyway. This occurs when we have a source replacement that - // is up-to-date in its struct local and we are going to retain a full - // block operation anyway. + // CanSkipEntry: + // Check if the specified entry can be skipped because it is writing to a + // death replacement or because the remainder would handle it anyway. // // Parameters: // entry - The init/copy entry // remainderStrategy - The strategy we are using for the remainder // - bool IsHandledByRemainder(const Entry& entry, const RemainderStrategy& remainderStrategy) + bool CanSkipEntry(const Entry& entry, const StructUseDeaths& deaths, const RemainderStrategy& remainderStrategy) { - return (remainderStrategy.Type == RemainderStrategy::FullBlock) && (entry.FromReplacement != nullptr) && - !entry.FromReplacement->NeedsWriteBack && (entry.ToLclNum == BAD_VAR_NUM); + if (entry.ToReplacement != nullptr) + { + // Check if this entry is dying anyway. + assert(m_dstInvolvesReplacements); + + AggregateInfo* agg = m_aggregates[m_dst->AsLclVarCommon()->GetLclNum()]; + assert((agg != nullptr) && (agg->Replacements.size() > 0)); + Replacement* firstRep = agg->Replacements.data(); + assert((entry.ToReplacement >= firstRep) && (entry.ToReplacement < (firstRep + agg->Replacements.size()))); + + size_t replacementIndex = entry.ToReplacement - firstRep; + if (deaths.IsReplacementDying((unsigned)replacementIndex)) + { + return true; + } + } + + if (entry.FromReplacement != nullptr) + { + // Check if the remainder is going to handle it. + return (remainderStrategy.Type == RemainderStrategy::FullBlock) && + !entry.FromReplacement->NeedsWriteBack && (entry.ToLclNum == BAD_VAR_NUM); + } + + return false; } //------------------------------------------------------------------------ @@ -868,6 +925,8 @@ void ReplaceVisitor::HandleAssignment(GenTree** use, GenTree* user) if (!dstInvolvesReplacements && !srcInvolvesReplacements) { + // TODO-CQ: If the destination is an aggregate we can still use liveness + // information for the remainder to DCE this. return; } @@ -959,7 +1018,7 @@ void ReplaceVisitor::HandleAssignment(GenTree** use, GenTree* user) } } - DecompositionPlan plan(m_compiler, dst, src, srcInvolvesReplacements); + DecompositionPlan plan(m_compiler, m_aggregates, m_liveness, dst, src, dstInvolvesReplacements, srcInvolvesReplacements); if (src->IsConstInitVal()) { diff --git a/src/coreclr/jit/promotionliveness.cpp b/src/coreclr/jit/promotionliveness.cpp index f4300f6da05f20..78ce6666cf9914 100644 --- a/src/coreclr/jit/promotionliveness.cpp +++ b/src/coreclr/jit/promotionliveness.cpp @@ -3,12 +3,47 @@ struct BasicBlockLiveness { + // Variables used before a full definition. BitVec VarUse; + // Variables fully defined before a use. + // Note that this differs from our normal liveness: partial definitions are + // NOT marked but they are also not considered uses. BitVec VarDef; BitVec LiveIn; BitVec LiveOut; }; +bool PromotionLiveness::IsReplacementLiveOut(BasicBlock* bb, unsigned structLcl, unsigned replacementIndex) +{ + BitVec liveOut = m_bbInfo[bb->bbNum].LiveOut; + unsigned baseIndex = m_structLclToTrackedIndex[structLcl]; + return BitVecOps::IsMember(m_bvTraits, liveOut, baseIndex + 1 + replacementIndex); +} + +StructUseDeaths PromotionLiveness::GetDeathsForStructLocal(GenTreeLclVarCommon* lcl) +{ + assert(lcl->OperIsLocal()); + BitVec aggDeaths; + bool found = m_aggDeaths.Lookup(lcl, &aggDeaths); + assert(found); + + unsigned lclNum = lcl->GetLclNum(); + AggregateInfo* aggInfo = m_aggregates[lclNum]; + return StructUseDeaths(aggDeaths, (unsigned)aggInfo->Replacements.size()); +} + +bool StructUseDeaths::IsRemainderDying() const +{ + BitVecTraits traits(1 + m_numFields, nullptr); + return BitVecOps::IsMember(&traits, m_deaths, 0); +} + +bool StructUseDeaths::IsReplacementDying(unsigned index) const +{ + BitVecTraits traits(1 + m_numFields, nullptr); + return BitVecOps::IsMember(&traits, m_deaths, 1 + index); +} + void PromotionLiveness::Run() { m_structLclToTrackedIndex = new (m_compiler, CMK_Promotion) unsigned[m_compiler->lvaTableCnt] {}; @@ -31,25 +66,15 @@ void PromotionLiveness::Run() } m_bvTraits = new (m_compiler, CMK_Promotion) BitVecTraits(trackedIndex, m_compiler); - m_bbInfo = m_compiler->fgAllocateTypeForEachBlk(CMK_Promotion); - for (BasicBlock* block : m_compiler->Blocks()) - { - BasicBlockLiveness& bb = m_bbInfo[block->bbNum]; - BitVecOps::AssignNoCopy(m_bvTraits, bb.VarUse, BitVecOps::MakeEmpty(m_bvTraits)); - BitVecOps::AssignNoCopy(m_bvTraits, bb.VarDef, BitVecOps::MakeEmpty(m_bvTraits)); - BitVecOps::AssignNoCopy(m_bvTraits, bb.LiveIn, BitVecOps::MakeEmpty(m_bvTraits)); - BitVecOps::AssignNoCopy(m_bvTraits, bb.LiveOut, BitVecOps::MakeEmpty(m_bvTraits)); - } + BitVecOps::AssignNoCopy(m_bvTraits, m_liveIn, BitVecOps::MakeEmpty(m_bvTraits)); + BitVecOps::AssignNoCopy(m_bvTraits, m_ehLiveVars, BitVecOps::MakeEmpty(m_bvTraits)); - while (true) - { - ComputeUseDefSets(); + ComputeUseDefSets(); - InterBlockLiveness(); + InterBlockLiveness(); - FillInLiveness(); - } + FillInLiveness(); } void PromotionLiveness::ComputeUseDefSets() @@ -57,9 +82,10 @@ void PromotionLiveness::ComputeUseDefSets() for (BasicBlock* block : m_compiler->Blocks()) { BasicBlockLiveness& bb = m_bbInfo[block->bbNum]; - BitVecOps::ClearD(m_bvTraits, bb.VarUse); - BitVecOps::ClearD(m_bvTraits, bb.VarDef); - BitVecOps::ClearD(m_bvTraits, bb.LiveOut); + BitVecOps::AssignNoCopy(m_bvTraits, bb.VarUse, BitVecOps::MakeEmpty(m_bvTraits)); + BitVecOps::AssignNoCopy(m_bvTraits, bb.VarDef, BitVecOps::MakeEmpty(m_bvTraits)); + BitVecOps::AssignNoCopy(m_bvTraits, bb.LiveIn, BitVecOps::MakeEmpty(m_bvTraits)); + BitVecOps::AssignNoCopy(m_bvTraits, bb.LiveOut, BitVecOps::MakeEmpty(m_bvTraits)); for (Statement* stmt : block->Statements()) { @@ -71,21 +97,6 @@ void PromotionLiveness::ComputeUseDefSets() } } -static bool Intersects(unsigned segment1Start, unsigned segment1End, unsigned segment2Start, unsigned segment2End) -{ - if (segment1End <= segment2Start) - { - return false; - } - - if (segment2End <= segment1Start) - { - return false; - } - - return true; -} - void PromotionLiveness::MarkUseDef(GenTreeLclVarCommon* lcl, BitSetShortLongRep& useSet, BitSetShortLongRep& defSet) { AggregateInfo* agg = m_aggregates[lcl->GetLclNum()]; @@ -99,72 +110,67 @@ void PromotionLiveness::MarkUseDef(GenTreeLclVarCommon* lcl, BitSetShortLongRep& bool isDef = !isUse; unsigned baseIndex = m_structLclToTrackedIndex[lcl->GetLclNum()]; - unsigned offs = lcl->GetLclOffs(); var_types accessType = lcl->TypeGet(); - size_t index = Promotion::BinarySearch(reps, offs); if (accessType == TYP_STRUCT) { - unsigned size; if (lcl->OperIs(GT_LCL_ADDR)) { - size = m_compiler->lvaGetDesc(lcl->GetLclNum())->lvExactSize() - offs; + // For LCL_ADDR this is a retbuf and we expect it to be a def. We + // don't know the exact size here so we cannot mark anything as + // being fully defined, thus we can just return. + assert(isDef); + return; } - else + + if (lcl->OperIsScalarLocal()) { - size = lcl->GetLayout(m_compiler)->GetSize(); + // Mark remainder and all fields. + for (size_t i = 0; i <= reps.size(); i++) + MarkIndex(baseIndex + (unsigned)i, isUse, isDef, useSet, defSet); } - - if ((ssize_t)index < 0) + else { - index = ~index; - if ((index > 0) && reps[index - 1].Overlaps(offs, size)) + unsigned offs = lcl->GetLclOffs(); + unsigned size = lcl->GetLayout(m_compiler)->GetSize(); + size_t index = Promotion::BinarySearch(reps, offs); + + if ((ssize_t)index < 0) { - index--; + index = ~index; + if ((index > 0) && reps[index - 1].Overlaps(offs, size)) + { + index--; + } } - } - - unsigned lastEnd = offs; - bool usesRemainder = false; - while ((index < reps.size()) && (reps[index].Offset < offs + size)) - { - Replacement& rep = reps[index]; - bool isFullFieldDef = isDef && (offs <= rep.Offset) && (offs + size >= rep.Offset + genTypeSize(rep.AccessType)); - MarkIndex(baseIndex + (unsigned)index, isUse, isFullFieldDef, useSet, defSet); - // Check if [lastEnd..rep.Offset) intersects with [UnpromotedMin..UnpromotedMax) to determine if this is a use of the remainder. - if (isUse && (rep.Offset > lastEnd)) + while ((index < reps.size()) && (reps[index].Offset < offs + size)) { - // TODO-CQ: This doesn't take padding inside the struct into account. - // We can compute a cumulative indicator that indicates from - // replacement to replacement whether there is any remainder - // between them. - usesRemainder |= Intersects(lastEnd, rep.Offset, agg->UnpromotedMin, agg->UnpromotedMax); + Replacement& rep = reps[index]; + bool isFullFieldDef = isDef && (offs <= rep.Offset) && (offs + size >= rep.Offset + genTypeSize(rep.AccessType)); + MarkIndex(baseIndex + 1 + (unsigned)index, isUse, isFullFieldDef, useSet, defSet); } - } - if (isUse && (lastEnd < offs + size)) - { - usesRemainder |= Intersects(lastEnd, offs + size, agg->UnpromotedMin, agg->UnpromotedMax); + bool isFullDefOfRemainder = isDef && (agg->UnpromotedMin >= offs) && (agg->UnpromotedMax <= (offs + size)); + // TODO-CQ: We could also try to figure out if a use actually touches the remainder, e.g. in some cases + // a struct use may consist only of promoted fields and does not actually use the remainder. + MarkIndex(baseIndex, isUse, isFullDefOfRemainder, useSet, defSet); } - - bool isFullDefOfRemainder = isDef && (agg->UnpromotedMin >= offs) && (agg->UnpromotedMax <= (offs + size)); - bool isUseOfRemainder = isUse && usesRemainder; - MarkIndex(baseIndex, isUseOfRemainder, isFullDefOfRemainder, useSet, defSet); } else { + unsigned offs = lcl->GetLclOffs(); + size_t index = Promotion::BinarySearch(reps, offs); if ((ssize_t)index < 0) { unsigned size = genTypeSize(accessType); bool isFullDefOfRemainder = isDef && (agg->UnpromotedMin >= offs) && (agg->UnpromotedMax <= (offs + size)); - bool isUseOfRemainder = isUse && Intersects(agg->UnpromotedMin, agg->UnpromotedMax, offs, offs + size); - MarkIndex(baseIndex, isUseOfRemainder, isFullDefOfRemainder, useSet, defSet); + MarkIndex(baseIndex, isUse, isFullDefOfRemainder, useSet, defSet); } else { // Accessing element. - MarkIndex(baseIndex + (unsigned)index, isUse, isDef, useSet, defSet); + MarkIndex(baseIndex + 1 + (unsigned)index, isUse, isDef, useSet, defSet); } } } @@ -400,34 +406,156 @@ void PromotionLiveness::FillInLiveness(BitVec& life, BitVec volatileVars, GenTre bool isDef = !isUse; unsigned baseIndex = m_structLclToTrackedIndex[lcl->GetLclNum()]; - unsigned offs = lcl->GetLclOffs(); var_types accessType = lcl->TypeGet(); - size_t index = Promotion::BinarySearch(agg->Replacements, offs); if (accessType == TYP_STRUCT) { - if (!BitVecOps::IsMember(m_bvTraits, volatileVars, baseIndex)) + if (lcl->OperIs(GT_LCL_ADDR)) + { + // Retbuf -- these are definitions but we do not know of how much. + // We never mark them as dead and we never treat them as killing anything. + assert(isDef); + return; + } + + // We need an external bit set to represent dying fields/remainder on a struct use. + BitVecTraits aggTraits(1 + (unsigned)agg->Replacements.size(), m_compiler); + BitVec aggDeaths(BitVecOps::MakeEmpty(&aggTraits)); + if (lcl->OperIsScalarLocal()) + { + // Handle remainder and all fields. + for (size_t i = 0; i <= agg->Replacements.size(); i++) + { + unsigned varIndex = baseIndex + (unsigned)i; + if (BitVecOps::IsMember(m_bvTraits, life, varIndex)) + { + if (isDef && !BitVecOps::IsMember(m_bvTraits, volatileVars, varIndex)) + { + BitVecOps::RemoveElemD(m_bvTraits, life, varIndex); + } + } + else + { + BitVecOps::AddElemD(&aggTraits, aggDeaths, (unsigned)i); + + if (isUse) + { + BitVecOps::AddElemD(m_bvTraits, life, varIndex); + } + } + } + } + else { - unsigned size; - if (lcl->OperIs(GT_LCL_ADDR)) + unsigned offs = lcl->GetLclOffs(); + unsigned size = lcl->GetLayout(m_compiler)->GetSize(); + size_t index = Promotion::BinarySearch(agg->Replacements, offs); + + if ((ssize_t)index < 0) { - size = m_compiler->lvaGetDesc(lcl)->lvExactSize(); + index = ~index; + if ((index > 0) && agg->Replacements[index - 1].Overlaps(offs, size)) + { + index--; + } } - else + + while ((index < agg->Replacements.size()) && (agg->Replacements[index].Offset < offs + size)) + { + unsigned varIndex = baseIndex + 1 + (unsigned)index; + Replacement& rep = agg->Replacements[index]; + if (BitVecOps::IsMember(m_bvTraits, life, varIndex)) + { + bool isFullFieldDef = isDef && (offs <= rep.Offset) && (offs + size >= rep.Offset + genTypeSize(rep.AccessType)); + if (isFullFieldDef && !BitVecOps::IsMember(m_bvTraits, volatileVars, varIndex)) + { + BitVecOps::RemoveElemD(m_bvTraits, life, varIndex); + } + } + else + { + BitVecOps::AddElemD(&aggTraits, aggDeaths, 1 + (unsigned)index); + + if (isUse) + { + BitVecOps::AddElemD(m_bvTraits, life, varIndex); + } + } + } + + if (BitVecOps::IsMember(m_bvTraits, life, baseIndex)) { - size = lcl->GetLayout(m_compiler)->GetSize(); + bool isFullDefOfRemainder = isDef && (agg->UnpromotedMin >= offs) && (agg->UnpromotedMax <= (offs + size)); + if (isFullDefOfRemainder && !BitVecOps::IsMember(m_bvTraits, volatileVars, baseIndex)) + { + BitVecOps::RemoveElemD(m_bvTraits, life, baseIndex); + } } - // TODO-TP: GTF_VAR_DEATH annotation here currently means the remainder. We need - // a side table to attach field liveness information to. - bool isFullDefOfRemainder = isDef && (agg->UnpromotedMin >= offs) && (agg->UnpromotedMax <= (offs + size)); - if (isFullDefOfRemainder) + else { - BitVecOps::RemoveElemD(m_bvTraits, life, baseIndex); + // TODO-CQ: We could also try to figure out if a use actually touches the remainder, e.g. in some cases + // a struct use may consist only of promoted fields and does not actually use the remainder. + BitVecOps::AddElemD(&aggTraits, aggDeaths, 0); + + if (isUse) + { + BitVecOps::AddElemD(m_bvTraits, life, baseIndex); + } } } + + m_aggDeaths.Set(lcl, aggDeaths); } else { + unsigned offs = lcl->GetLclOffs(); + size_t index = Promotion::BinarySearch(agg->Replacements, offs); + if ((ssize_t)index < 0) + { + unsigned size = genTypeSize(accessType); + if (BitVecOps::IsMember(m_bvTraits, life, baseIndex)) + { + lcl->gtFlags &= ~GTF_VAR_DEATH; + + bool isFullDefOfRemainder = isDef && (agg->UnpromotedMin >= offs) && (agg->UnpromotedMax <= (offs + size)); + if (isFullDefOfRemainder && !BitVecOps::IsMember(m_bvTraits, volatileVars, baseIndex)) + { + BitVecOps::RemoveElemD(m_bvTraits, life, baseIndex); + } + } + else + { + lcl->gtFlags |= GTF_VAR_DEATH; + + if (isUse) + { + BitVecOps::AddElemD(m_bvTraits, life, baseIndex); + } + } + } + else + { + index = ~index; + unsigned varIndex = baseIndex + 1 + (unsigned)index; + + if (BitVecOps::IsMember(m_bvTraits, life, varIndex)) + { + lcl->gtFlags &= ~GTF_VAR_DEATH; + + if (isDef && !BitVecOps::IsMember(m_bvTraits, volatileVars, varIndex)) + { + BitVecOps::RemoveElemD(m_bvTraits, life, varIndex); + } + } + else + { + lcl->gtFlags |= GTF_VAR_DEATH; + if (isUse) + { + BitVecOps::AddElemD(m_bvTraits, life, varIndex); + } + } + } } } From 3e1ef9a8c4829141f7505d59704b3be0faf17d8f Mon Sep 17 00:00:00 2001 From: Jakob Botsch Nielsen Date: Wed, 10 May 2023 15:41:31 +0200 Subject: [PATCH 05/16] Run jit-format --- src/coreclr/jit/promotion.cpp | 35 +++++++----- src/coreclr/jit/promotion.h | 37 ++++++------ src/coreclr/jit/promotiondecomposition.cpp | 51 ++++++++++------- src/coreclr/jit/promotionliveness.cpp | 66 ++++++++++++---------- 4 files changed, 104 insertions(+), 85 deletions(-) diff --git a/src/coreclr/jit/promotion.cpp b/src/coreclr/jit/promotion.cpp index b2acfa73d77451..d168eba618d9ff 100644 --- a/src/coreclr/jit/promotion.cpp +++ b/src/coreclr/jit/promotion.cpp @@ -126,9 +126,12 @@ inline AccessKindFlags& operator&=(AccessKindFlags& a, AccessKindFlags b) return a = (AccessKindFlags)((uint32_t)a & (uint32_t)b); } -bool AggregateInfo::OverlappingReplacements(unsigned offset, unsigned size, Replacement** firstReplacement, Replacement** endReplacement) +bool AggregateInfo::OverlappingReplacements(unsigned offset, + unsigned size, + Replacement** firstReplacement, + Replacement** endReplacement) { - size_t firstIndex = Promotion::BinarySearch(Replacements, offset); + size_t firstIndex = Promotion::BinarySearch(Replacements, offset); if ((ssize_t)firstIndex < 0) { firstIndex = ~firstIndex; @@ -176,7 +179,6 @@ bool AggregateInfo::OverlappingReplacements(unsigned offset, unsigned size, Repl return true; } - // Tracks all the accesses into one particular struct local. class LocalUses { @@ -314,11 +316,11 @@ class LocalUses if (*aggregateInfo == nullptr) { - *aggregateInfo = - new (comp, CMK_Promotion) AggregateInfo(comp->getAllocator(CMK_Promotion)); + *aggregateInfo = new (comp, CMK_Promotion) AggregateInfo(comp->getAllocator(CMK_Promotion)); } - (*aggregateInfo)->Replacements.push_back(Replacement(access.Offset, access.AccessType, newLcl DEBUGARG(bufp))); + (*aggregateInfo) + ->Replacements.push_back(Replacement(access.Offset, access.AccessType, newLcl DEBUGARG(bufp))); } JITDUMP("\n"); @@ -859,7 +861,7 @@ bool StructSegments::CoveringSegment(Segment* result) } result->Start = m_segments[0].Start; - result->End = m_segments[m_segments.size() - 1].End; + result->End = m_segments[m_segments.size() - 1].End; return true; } @@ -1411,8 +1413,8 @@ PhaseStatus Promotion::Run() #endif // Pick promotions based on the use information we just collected. - bool anyReplacements = false; - AggregateInfo** aggregates = new (m_compiler, CMK_Promotion) AggregateInfo*[m_compiler->lvaCount]{}; + bool anyReplacements = false; + AggregateInfo** aggregates = new (m_compiler, CMK_Promotion) AggregateInfo*[m_compiler->lvaCount]{}; for (unsigned i = 0; i < numLocals; i++) { LocalUses* uses = localsUse.GetUsesByLocal(i); @@ -1433,7 +1435,8 @@ PhaseStatus Promotion::Run() StructSegments unpromotedParts = SignificantSegments(m_compiler, m_compiler->lvaGetDesc(i)->GetLayout()); for (size_t i = 0; i < reps.size(); i++) { - unpromotedParts.Subtract(StructSegments::Segment(reps[i].Offset, reps[i].Offset + genTypeSize(reps[i].AccessType))); + unpromotedParts.Subtract( + StructSegments::Segment(reps[i].Offset, reps[i].Offset + genTypeSize(reps[i].AccessType))); } StructSegments::Segment unpromotedSegment; @@ -1503,16 +1506,18 @@ PhaseStatus Promotion::Run() { if (!liveness.IsReplacementLiveOut(bb, i, (unsigned)j)) { - JITDUMP("Skipping reading back dead replacement V%02u.[%03u..%03u) -> V%02u near the end of " FMT_BB "\n", i, - rep.Offset, rep.Offset + genTypeSize(rep.AccessType), rep.LclNum, bb->bbNum); + JITDUMP( + "Skipping reading back dead replacement V%02u.[%03u..%03u) -> V%02u near the end of " FMT_BB + "\n", + i, rep.Offset, rep.Offset + genTypeSize(rep.AccessType), rep.LclNum, bb->bbNum); } else { JITDUMP("Reading back replacement V%02u.[%03u..%03u) -> V%02u near the end of " FMT_BB ":\n", i, - rep.Offset, rep.Offset + genTypeSize(rep.AccessType), rep.LclNum, bb->bbNum); + rep.Offset, rep.Offset + genTypeSize(rep.AccessType), rep.LclNum, bb->bbNum); - GenTree* readBack = CreateReadBack(m_compiler, i, rep); - Statement* stmt = m_compiler->fgNewStmtFromTree(readBack); + GenTree* readBack = CreateReadBack(m_compiler, i, rep); + Statement* stmt = m_compiler->fgNewStmtFromTree(readBack); DISPSTMT(stmt); m_compiler->fgInsertStmtNearEnd(bb, stmt); } diff --git a/src/coreclr/jit/promotion.h b/src/coreclr/jit/promotion.h index 64de7f8a895a6d..6ead4a7b6e1dd4 100644 --- a/src/coreclr/jit/promotion.h +++ b/src/coreclr/jit/promotion.h @@ -96,7 +96,10 @@ struct AggregateInfo { } - bool OverlappingReplacements(unsigned offset, unsigned size, Replacement** firstReplacement, Replacement** endReplacement); + bool OverlappingReplacements(unsigned offset, + unsigned size, + Replacement** firstReplacement, + Replacement** endReplacement); }; class Promotion @@ -176,21 +179,18 @@ struct BasicBlockLiveness; class StructUseDeaths { - BitVec m_deaths; + BitVec m_deaths; unsigned m_numFields; friend class PromotionLiveness; private: - StructUseDeaths(BitVec deaths, unsigned numFields) - : m_deaths(deaths) - , m_numFields(numFields) + StructUseDeaths(BitVec deaths, unsigned numFields) : m_deaths(deaths), m_numFields(numFields) { } public: - StructUseDeaths() - : m_numFields(0) + StructUseDeaths() : m_numFields(0) { } @@ -200,14 +200,14 @@ class StructUseDeaths class PromotionLiveness { - Compiler* m_compiler; - AggregateInfo** m_aggregates; - BitVecTraits* m_bvTraits = nullptr; - unsigned* m_structLclToTrackedIndex = nullptr; - BasicBlockLiveness* m_bbInfo = nullptr; - bool m_hasPossibleBackEdge = false; - BitVec m_liveIn; - BitVec m_ehLiveVars; + Compiler* m_compiler; + AggregateInfo** m_aggregates; + BitVecTraits* m_bvTraits = nullptr; + unsigned* m_structLclToTrackedIndex = nullptr; + BasicBlockLiveness* m_bbInfo = nullptr; + bool m_hasPossibleBackEdge = false; + BitVec m_liveIn; + BitVec m_ehLiveVars; JitHashTable, BitVec> m_aggDeaths; friend class PromotionLivenessBitSetTraits; @@ -221,6 +221,7 @@ class PromotionLiveness void Run(); bool IsReplacementLiveOut(BasicBlock* bb, unsigned structLcl, unsigned replacement); StructUseDeaths GetDeathsForStructLocal(GenTreeLclVarCommon* use); + private: void MarkUseDef(GenTreeLclVarCommon* lcl, BitVec& useSet, BitVec& defSet); void MarkIndex(unsigned index, bool isUse, bool isDef, BitVec& useSet, BitVec& defSet); @@ -237,10 +238,10 @@ class DecompositionPlan; class ReplaceVisitor : public GenTreeVisitor { - Promotion* m_prom; - AggregateInfo** m_aggregates; + Promotion* m_prom; + AggregateInfo** m_aggregates; PromotionLiveness* m_liveness; - bool m_madeChanges = false; + bool m_madeChanges = false; public: enum diff --git a/src/coreclr/jit/promotiondecomposition.cpp b/src/coreclr/jit/promotiondecomposition.cpp index 99eb5b4dcf940d..1ef269bf8cd058 100644 --- a/src/coreclr/jit/promotiondecomposition.cpp +++ b/src/coreclr/jit/promotiondecomposition.cpp @@ -46,17 +46,23 @@ class DecompositionPlan var_types Type; }; - Compiler* m_compiler; - AggregateInfo** m_aggregates; + Compiler* m_compiler; + AggregateInfo** m_aggregates; PromotionLiveness* m_liveness; - GenTree* m_dst; - GenTree* m_src; - bool m_dstInvolvesReplacements; - bool m_srcInvolvesReplacements; - ArrayStack m_entries; + GenTree* m_dst; + GenTree* m_src; + bool m_dstInvolvesReplacements; + bool m_srcInvolvesReplacements; + ArrayStack m_entries; public: - DecompositionPlan(Compiler* comp, AggregateInfo** aggregates, PromotionLiveness* liveness, GenTree* dst, GenTree* src, bool dstInvolvesReplacements, bool srcInvolvesReplacements) + DecompositionPlan(Compiler* comp, + AggregateInfo** aggregates, + PromotionLiveness* liveness, + GenTree* dst, + GenTree* src, + bool dstInvolvesReplacements, + bool srcInvolvesReplacements) : m_compiler(comp) , m_aggregates(aggregates) , m_liveness(liveness) @@ -247,8 +253,8 @@ class DecompositionPlan // StructSegments ComputeRemainder() { - ClassLayout* dstLayout = m_dst->GetLayout(m_compiler); - StructSegments segments = Promotion::SignificantSegments(m_compiler, dstLayout); + ClassLayout* dstLayout = m_dst->GetLayout(m_compiler); + StructSegments segments = Promotion::SignificantSegments(m_compiler, dstLayout); // Validate with "obviously correct" but less scalable fixed bit vector implementation. INDEBUG(FixedBitVect* segmentBitVect = FixedBitVect::bitVectInit(dstLayout->GetSize(), m_compiler)); @@ -388,9 +394,9 @@ class DecompositionPlan // void FinalizeInit(DecompositionStatementList* statements) { - GenTree* cns = m_src->OperIsInitVal() ? m_src->gtGetOp1() : m_src; - uint8_t initPattern = GetInitPattern(); - StructUseDeaths deaths = m_liveness->GetDeathsForStructLocal(m_dst->AsLclVarCommon()); + GenTree* cns = m_src->OperIsInitVal() ? m_src->gtGetOp1() : m_src; + uint8_t initPattern = GetInitPattern(); + StructUseDeaths deaths = m_liveness->GetDeathsForStructLocal(m_dst->AsLclVarCommon()); AggregateInfo* agg = m_aggregates[m_dst->AsLclVarCommon()->GetLclNum()]; assert((agg != nullptr) && (agg->Replacements.size() > 0)); @@ -423,10 +429,10 @@ class DecompositionPlan } else if (remainderStrategy.Type == RemainderStrategy::Primitive) { - GenTree* src = m_compiler->gtNewConWithPattern(remainderStrategy.PrimitiveType, initPattern); + GenTree* src = m_compiler->gtNewConWithPattern(remainderStrategy.PrimitiveType, initPattern); GenTreeLclVarCommon* dstLcl = m_dst->AsLclVarCommon(); - GenTree* dst = m_compiler->gtNewLclFldNode(dstLcl->GetLclNum(), remainderStrategy.PrimitiveType, - dstLcl->GetLclOffs() + remainderStrategy.PrimitiveOffset); + GenTree* dst = m_compiler->gtNewLclFldNode(dstLcl->GetLclNum(), remainderStrategy.PrimitiveType, + dstLcl->GetLclOffs() + remainderStrategy.PrimitiveOffset); m_compiler->lvaSetVarDoNotEnregister(dstLcl->GetLclNum() DEBUGARG(DoNotEnregisterReason::LocalField)); statements->AddStatement(m_compiler->gtNewAssignNode(dst, src)); } @@ -674,13 +680,15 @@ class DecompositionPlan { if (entry.FromReplacement != nullptr) { - JITDUMP(" Skipping dst+%03u <- V%02u (%s); it is up-to-date in its struct local and will be handled " + JITDUMP( + " Skipping dst+%03u <- V%02u (%s); it is up-to-date in its struct local and will be handled " "as part of the remainder\n", entry.Offset, entry.FromReplacement->LclNum, entry.FromReplacement->Description); } else if (entry.ToReplacement != nullptr) { - JITDUMP(" Skipping def of V%02u (%s); it is dying", entry.ToReplacement->LclNum, entry.ToReplacement->Description); + JITDUMP(" Skipping def of V%02u (%s); it is dying", entry.ToReplacement->LclNum, + entry.ToReplacement->Description); } continue; @@ -817,8 +825,8 @@ class DecompositionPlan if (entry.FromReplacement != nullptr) { // Check if the remainder is going to handle it. - return (remainderStrategy.Type == RemainderStrategy::FullBlock) && - !entry.FromReplacement->NeedsWriteBack && (entry.ToLclNum == BAD_VAR_NUM); + return (remainderStrategy.Type == RemainderStrategy::FullBlock) && !entry.FromReplacement->NeedsWriteBack && + (entry.ToLclNum == BAD_VAR_NUM); } return false; @@ -1018,7 +1026,8 @@ void ReplaceVisitor::HandleAssignment(GenTree** use, GenTree* user) } } - DecompositionPlan plan(m_compiler, m_aggregates, m_liveness, dst, src, dstInvolvesReplacements, srcInvolvesReplacements); + DecompositionPlan plan(m_compiler, m_aggregates, m_liveness, dst, src, dstInvolvesReplacements, + srcInvolvesReplacements); if (src->IsConstInitVal()) { diff --git a/src/coreclr/jit/promotionliveness.cpp b/src/coreclr/jit/promotionliveness.cpp index 78ce6666cf9914..849c10244e836f 100644 --- a/src/coreclr/jit/promotionliveness.cpp +++ b/src/coreclr/jit/promotionliveness.cpp @@ -15,7 +15,7 @@ struct BasicBlockLiveness bool PromotionLiveness::IsReplacementLiveOut(BasicBlock* bb, unsigned structLcl, unsigned replacementIndex) { - BitVec liveOut = m_bbInfo[bb->bbNum].LiveOut; + BitVec liveOut = m_bbInfo[bb->bbNum].LiveOut; unsigned baseIndex = m_structLclToTrackedIndex[structLcl]; return BitVecOps::IsMember(m_bvTraits, liveOut, baseIndex + 1 + replacementIndex); } @@ -24,10 +24,10 @@ StructUseDeaths PromotionLiveness::GetDeathsForStructLocal(GenTreeLclVarCommon* { assert(lcl->OperIsLocal()); BitVec aggDeaths; - bool found = m_aggDeaths.Lookup(lcl, &aggDeaths); + bool found = m_aggDeaths.Lookup(lcl, &aggDeaths); assert(found); - unsigned lclNum = lcl->GetLclNum(); + unsigned lclNum = lcl->GetLclNum(); AggregateInfo* aggInfo = m_aggregates[lclNum]; return StructUseDeaths(aggDeaths, (unsigned)aggInfo->Replacements.size()); } @@ -46,8 +46,8 @@ bool StructUseDeaths::IsReplacementDying(unsigned index) const void PromotionLiveness::Run() { - m_structLclToTrackedIndex = new (m_compiler, CMK_Promotion) unsigned[m_compiler->lvaTableCnt] {}; - unsigned trackedIndex = 0; + m_structLclToTrackedIndex = new (m_compiler, CMK_Promotion) unsigned[m_compiler->lvaTableCnt]{}; + unsigned trackedIndex = 0; for (unsigned lclNum = 0; lclNum < m_compiler->lvaTableCnt; lclNum++) { AggregateInfo* agg = m_aggregates[lclNum]; @@ -66,7 +66,7 @@ void PromotionLiveness::Run() } m_bvTraits = new (m_compiler, CMK_Promotion) BitVecTraits(trackedIndex, m_compiler); - m_bbInfo = m_compiler->fgAllocateTypeForEachBlk(CMK_Promotion); + m_bbInfo = m_compiler->fgAllocateTypeForEachBlk(CMK_Promotion); BitVecOps::AssignNoCopy(m_bvTraits, m_liveIn, BitVecOps::MakeEmpty(m_bvTraits)); BitVecOps::AssignNoCopy(m_bvTraits, m_ehLiveVars, BitVecOps::MakeEmpty(m_bvTraits)); @@ -105,11 +105,11 @@ void PromotionLiveness::MarkUseDef(GenTreeLclVarCommon* lcl, BitSetShortLongRep& return; } - jitstd::vector& reps = agg->Replacements; - bool isUse = (lcl->gtFlags & GTF_VAR_DEF) == 0; - bool isDef = !isUse; + jitstd::vector& reps = agg->Replacements; + bool isUse = (lcl->gtFlags & GTF_VAR_DEF) == 0; + bool isDef = !isUse; - unsigned baseIndex = m_structLclToTrackedIndex[lcl->GetLclNum()]; + unsigned baseIndex = m_structLclToTrackedIndex[lcl->GetLclNum()]; var_types accessType = lcl->TypeGet(); if (accessType == TYP_STRUCT) @@ -131,9 +131,9 @@ void PromotionLiveness::MarkUseDef(GenTreeLclVarCommon* lcl, BitSetShortLongRep& } else { - unsigned offs = lcl->GetLclOffs(); - unsigned size = lcl->GetLayout(m_compiler)->GetSize(); - size_t index = Promotion::BinarySearch(reps, offs); + unsigned offs = lcl->GetLclOffs(); + unsigned size = lcl->GetLayout(m_compiler)->GetSize(); + size_t index = Promotion::BinarySearch(reps, offs); if ((ssize_t)index < 0) { @@ -147,7 +147,8 @@ void PromotionLiveness::MarkUseDef(GenTreeLclVarCommon* lcl, BitSetShortLongRep& while ((index < reps.size()) && (reps[index].Offset < offs + size)) { Replacement& rep = reps[index]; - bool isFullFieldDef = isDef && (offs <= rep.Offset) && (offs + size >= rep.Offset + genTypeSize(rep.AccessType)); + bool isFullFieldDef = + isDef && (offs <= rep.Offset) && (offs + size >= rep.Offset + genTypeSize(rep.AccessType)); MarkIndex(baseIndex + 1 + (unsigned)index, isUse, isFullFieldDef, useSet, defSet); } @@ -159,11 +160,11 @@ void PromotionLiveness::MarkUseDef(GenTreeLclVarCommon* lcl, BitSetShortLongRep& } else { - unsigned offs = lcl->GetLclOffs(); - size_t index = Promotion::BinarySearch(reps, offs); + unsigned offs = lcl->GetLclOffs(); + size_t index = Promotion::BinarySearch(reps, offs); if ((ssize_t)index < 0) { - unsigned size = genTypeSize(accessType); + unsigned size = genTypeSize(accessType); bool isFullDefOfRemainder = isDef && (agg->UnpromotedMin >= offs) && (agg->UnpromotedMax <= (offs + size)); MarkIndex(baseIndex, isUse, isFullDefOfRemainder, useSet, defSet); } @@ -270,7 +271,7 @@ void PromotionLiveness::AddHandlerLiveVars(BasicBlock* block, BitVec& ehLiveVars /* If we have nested try's edbEnclosing will provide them */ assert((HBtab->ebdEnclosingTryIndex == EHblkDsc::NO_ENCLOSING_INDEX) || - (HBtab->ebdEnclosingTryIndex > m_compiler->ehGetIndex(HBtab))); + (HBtab->ebdEnclosingTryIndex > m_compiler->ehGetIndex(HBtab))); unsigned outerIndex = HBtab->ebdEnclosingTryIndex; if (outerIndex == EHblkDsc::NO_ENCLOSING_INDEX) @@ -405,7 +406,7 @@ void PromotionLiveness::FillInLiveness(BitVec& life, BitVec volatileVars, GenTre bool isUse = (lcl->gtFlags & GTF_VAR_DEF) == 0; bool isDef = !isUse; - unsigned baseIndex = m_structLclToTrackedIndex[lcl->GetLclNum()]; + unsigned baseIndex = m_structLclToTrackedIndex[lcl->GetLclNum()]; var_types accessType = lcl->TypeGet(); if (accessType == TYP_STRUCT) @@ -420,7 +421,7 @@ void PromotionLiveness::FillInLiveness(BitVec& life, BitVec volatileVars, GenTre // We need an external bit set to represent dying fields/remainder on a struct use. BitVecTraits aggTraits(1 + (unsigned)agg->Replacements.size(), m_compiler); - BitVec aggDeaths(BitVecOps::MakeEmpty(&aggTraits)); + BitVec aggDeaths(BitVecOps::MakeEmpty(&aggTraits)); if (lcl->OperIsScalarLocal()) { // Handle remainder and all fields. @@ -447,9 +448,9 @@ void PromotionLiveness::FillInLiveness(BitVec& life, BitVec volatileVars, GenTre } else { - unsigned offs = lcl->GetLclOffs(); - unsigned size = lcl->GetLayout(m_compiler)->GetSize(); - size_t index = Promotion::BinarySearch(agg->Replacements, offs); + unsigned offs = lcl->GetLclOffs(); + unsigned size = lcl->GetLayout(m_compiler)->GetSize(); + size_t index = Promotion::BinarySearch(agg->Replacements, offs); if ((ssize_t)index < 0) { @@ -462,11 +463,12 @@ void PromotionLiveness::FillInLiveness(BitVec& life, BitVec volatileVars, GenTre while ((index < agg->Replacements.size()) && (agg->Replacements[index].Offset < offs + size)) { - unsigned varIndex = baseIndex + 1 + (unsigned)index; - Replacement& rep = agg->Replacements[index]; + unsigned varIndex = baseIndex + 1 + (unsigned)index; + Replacement& rep = agg->Replacements[index]; if (BitVecOps::IsMember(m_bvTraits, life, varIndex)) { - bool isFullFieldDef = isDef && (offs <= rep.Offset) && (offs + size >= rep.Offset + genTypeSize(rep.AccessType)); + bool isFullFieldDef = + isDef && (offs <= rep.Offset) && (offs + size >= rep.Offset + genTypeSize(rep.AccessType)); if (isFullFieldDef && !BitVecOps::IsMember(m_bvTraits, volatileVars, varIndex)) { BitVecOps::RemoveElemD(m_bvTraits, life, varIndex); @@ -485,7 +487,8 @@ void PromotionLiveness::FillInLiveness(BitVec& life, BitVec volatileVars, GenTre if (BitVecOps::IsMember(m_bvTraits, life, baseIndex)) { - bool isFullDefOfRemainder = isDef && (agg->UnpromotedMin >= offs) && (agg->UnpromotedMax <= (offs + size)); + bool isFullDefOfRemainder = + isDef && (agg->UnpromotedMin >= offs) && (agg->UnpromotedMax <= (offs + size)); if (isFullDefOfRemainder && !BitVecOps::IsMember(m_bvTraits, volatileVars, baseIndex)) { BitVecOps::RemoveElemD(m_bvTraits, life, baseIndex); @@ -508,8 +511,8 @@ void PromotionLiveness::FillInLiveness(BitVec& life, BitVec volatileVars, GenTre } else { - unsigned offs = lcl->GetLclOffs(); - size_t index = Promotion::BinarySearch(agg->Replacements, offs); + unsigned offs = lcl->GetLclOffs(); + size_t index = Promotion::BinarySearch(agg->Replacements, offs); if ((ssize_t)index < 0) { unsigned size = genTypeSize(accessType); @@ -517,7 +520,8 @@ void PromotionLiveness::FillInLiveness(BitVec& life, BitVec volatileVars, GenTre { lcl->gtFlags &= ~GTF_VAR_DEATH; - bool isFullDefOfRemainder = isDef && (agg->UnpromotedMin >= offs) && (agg->UnpromotedMax <= (offs + size)); + bool isFullDefOfRemainder = + isDef && (agg->UnpromotedMin >= offs) && (agg->UnpromotedMax <= (offs + size)); if (isFullDefOfRemainder && !BitVecOps::IsMember(m_bvTraits, volatileVars, baseIndex)) { BitVecOps::RemoveElemD(m_bvTraits, life, baseIndex); @@ -535,7 +539,7 @@ void PromotionLiveness::FillInLiveness(BitVec& life, BitVec volatileVars, GenTre } else { - index = ~index; + index = ~index; unsigned varIndex = baseIndex + 1 + (unsigned)index; if (BitVecOps::IsMember(m_bvTraits, life, varIndex)) From 1c7e18432661b7d9b8a4c7300eb6fcbb51d4980b Mon Sep 17 00:00:00 2001 From: Jakob Botsch Nielsen Date: Wed, 10 May 2023 16:14:29 +0200 Subject: [PATCH 06/16] Fix --- src/coreclr/jit/promotion.cpp | 7 ++-- src/coreclr/jit/promotion.h | 30 ++++++++-------- src/coreclr/jit/promotiondecomposition.cpp | 41 +++++++++++----------- src/coreclr/jit/promotionliveness.cpp | 4 +-- 4 files changed, 43 insertions(+), 39 deletions(-) diff --git a/src/coreclr/jit/promotion.cpp b/src/coreclr/jit/promotion.cpp index 3f51787e8fed23..c6cf9b96c97dcd 100644 --- a/src/coreclr/jit/promotion.cpp +++ b/src/coreclr/jit/promotion.cpp @@ -1215,8 +1215,11 @@ void ReplaceVisitor::ReplaceLocal(GenTree** use, GenTree* user) Replacement& rep = replacements[index]; assert(accessType == rep.AccessType); JITDUMP(" ..replaced with promoted lcl V%02u\n", rep.LclNum); + *use = m_compiler->gtNewLclvNode(rep.LclNum, accessType); + (*use)->gtFlags |= lcl->gtFlags & GTF_VAR_DEATH; + if ((lcl->gtFlags & GTF_VAR_DEF) != 0) { (*use)->gtFlags |= GTF_VAR_DEF; // TODO-ASG: delete. @@ -1414,8 +1417,8 @@ PhaseStatus Promotion::Run() #endif // Pick promotions based on the use information we just collected. - bool anyReplacements = false; - AggregateInfo** aggregates = new (m_compiler, CMK_Promotion) AggregateInfo*[m_compiler->lvaCount]{}; + bool anyReplacements = false; + jitstd::vector aggregates(m_compiler->lvaCount, nullptr, m_compiler->getAllocator(CMK_Promotion)); for (unsigned i = 0; i < numLocals; i++) { LocalUses* uses = localsUse.GetUsesByLocal(i); diff --git a/src/coreclr/jit/promotion.h b/src/coreclr/jit/promotion.h index 6ead4a7b6e1dd4..c93a0175ebba0d 100644 --- a/src/coreclr/jit/promotion.h +++ b/src/coreclr/jit/promotion.h @@ -190,7 +190,7 @@ class StructUseDeaths } public: - StructUseDeaths() : m_numFields(0) + StructUseDeaths() : m_deaths(BitVecOps::UninitVal()), m_numFields(0) { } @@ -200,20 +200,20 @@ class StructUseDeaths class PromotionLiveness { - Compiler* m_compiler; - AggregateInfo** m_aggregates; - BitVecTraits* m_bvTraits = nullptr; - unsigned* m_structLclToTrackedIndex = nullptr; - BasicBlockLiveness* m_bbInfo = nullptr; - bool m_hasPossibleBackEdge = false; - BitVec m_liveIn; - BitVec m_ehLiveVars; + Compiler* m_compiler; + jitstd::vector& m_aggregates; + BitVecTraits* m_bvTraits = nullptr; + unsigned* m_structLclToTrackedIndex = nullptr; + BasicBlockLiveness* m_bbInfo = nullptr; + bool m_hasPossibleBackEdge = false; + BitVec m_liveIn; + BitVec m_ehLiveVars; JitHashTable, BitVec> m_aggDeaths; friend class PromotionLivenessBitSetTraits; public: - PromotionLiveness(Compiler* compiler, AggregateInfo** aggregates) + PromotionLiveness(Compiler* compiler, jitstd::vector& aggregates) : m_compiler(compiler), m_aggregates(aggregates), m_aggDeaths(compiler->getAllocator(CMK_Promotion)) { } @@ -238,10 +238,10 @@ class DecompositionPlan; class ReplaceVisitor : public GenTreeVisitor { - Promotion* m_prom; - AggregateInfo** m_aggregates; - PromotionLiveness* m_liveness; - bool m_madeChanges = false; + Promotion* m_prom; + jitstd::vector& m_aggregates; + PromotionLiveness* m_liveness; + bool m_madeChanges = false; public: enum @@ -250,7 +250,7 @@ class ReplaceVisitor : public GenTreeVisitor UseExecutionOrder = true, }; - ReplaceVisitor(Promotion* prom, AggregateInfo** aggregates, PromotionLiveness* liveness) + ReplaceVisitor(Promotion* prom, jitstd::vector& aggregates, PromotionLiveness* liveness) : GenTreeVisitor(prom->m_compiler), m_prom(prom), m_aggregates(aggregates), m_liveness(liveness) { } diff --git a/src/coreclr/jit/promotiondecomposition.cpp b/src/coreclr/jit/promotiondecomposition.cpp index 811a6c035c7e0c..6e9336cf347dba 100644 --- a/src/coreclr/jit/promotiondecomposition.cpp +++ b/src/coreclr/jit/promotiondecomposition.cpp @@ -46,23 +46,23 @@ class DecompositionPlan var_types Type; }; - Compiler* m_compiler; - AggregateInfo** m_aggregates; - PromotionLiveness* m_liveness; - GenTree* m_dst; - GenTree* m_src; - bool m_dstInvolvesReplacements; - bool m_srcInvolvesReplacements; - ArrayStack m_entries; + Compiler* m_compiler; + jitstd::vector& m_aggregates; + PromotionLiveness* m_liveness; + GenTree* m_dst; + GenTree* m_src; + bool m_dstInvolvesReplacements; + bool m_srcInvolvesReplacements; + ArrayStack m_entries; public: - DecompositionPlan(Compiler* comp, - AggregateInfo** aggregates, - PromotionLiveness* liveness, - GenTree* dst, - GenTree* src, - bool dstInvolvesReplacements, - bool srcInvolvesReplacements) + DecompositionPlan(Compiler* comp, + jitstd::vector& aggregates, + PromotionLiveness* liveness, + GenTree* dst, + GenTree* src, + bool dstInvolvesReplacements, + bool srcInvolvesReplacements) : m_compiler(comp) , m_aggregates(aggregates) , m_liveness(liveness) @@ -677,6 +677,12 @@ class DecompositionPlan { const Entry& entry = m_entries.BottomRef(i); + if (entry.ToReplacement != nullptr) + { + entry.ToReplacement->NeedsWriteBack = true; + entry.ToReplacement->NeedsReadBack = false; + } + if (CanSkipEntry(entry, dstDeaths, remainderStrategy)) { if (entry.FromReplacement != nullptr) @@ -737,11 +743,6 @@ class DecompositionPlan } statements->AddStatement(m_compiler->gtNewAssignNode(dst, src)); - if (entry.ToReplacement != nullptr) - { - entry.ToReplacement->NeedsWriteBack = true; - entry.ToReplacement->NeedsReadBack = false; - } } if ((remainderStrategy.Type == RemainderStrategy::FullBlock) && !m_srcInvolvesReplacements) diff --git a/src/coreclr/jit/promotionliveness.cpp b/src/coreclr/jit/promotionliveness.cpp index 849c10244e836f..726e8dafbe85a0 100644 --- a/src/coreclr/jit/promotionliveness.cpp +++ b/src/coreclr/jit/promotionliveness.cpp @@ -46,9 +46,9 @@ bool StructUseDeaths::IsReplacementDying(unsigned index) const void PromotionLiveness::Run() { - m_structLclToTrackedIndex = new (m_compiler, CMK_Promotion) unsigned[m_compiler->lvaTableCnt]{}; + m_structLclToTrackedIndex = new (m_compiler, CMK_Promotion) unsigned[m_aggregates.size()]{}; unsigned trackedIndex = 0; - for (unsigned lclNum = 0; lclNum < m_compiler->lvaTableCnt; lclNum++) + for (size_t lclNum = 0; lclNum < m_aggregates.size(); lclNum++) { AggregateInfo* agg = m_aggregates[lclNum]; if (agg == nullptr) From cb8ea30a735c4c6660e239ecdd361131a5688fa1 Mon Sep 17 00:00:00 2001 From: Jakob Botsch Nielsen Date: Wed, 10 May 2023 16:29:50 +0200 Subject: [PATCH 07/16] Fixes, run jit-format --- src/coreclr/jit/promotion.cpp | 10 +++++++++- src/coreclr/jit/promotion.h | 3 ++- src/coreclr/jit/promotiondecomposition.cpp | 7 ++++--- src/coreclr/jit/promotionliveness.cpp | 6 ++++-- 4 files changed, 19 insertions(+), 7 deletions(-) diff --git a/src/coreclr/jit/promotion.cpp b/src/coreclr/jit/promotion.cpp index c6cf9b96c97dcd..cf373973bfc05c 100644 --- a/src/coreclr/jit/promotion.cpp +++ b/src/coreclr/jit/promotion.cpp @@ -924,7 +924,8 @@ void StructSegments::Dump() } #endif -StructSegments Promotion::SignificantSegments(Compiler* compiler, ClassLayout* layout) +StructSegments Promotion::SignificantSegments(Compiler* compiler, + ClassLayout* layout DEBUGARG(FixedBitVect** bitVectRepr)) { COMP_HANDLE compHnd = compiler->info.compCompHnd; @@ -1002,6 +1003,13 @@ StructSegments Promotion::SignificantSegments(Compiler* compiler, ClassLayout* l } } +#ifdef DEBUG + if (bitVectRepr != nullptr) + { + *bitVectRepr = segmentBitVect; + } +#endif + // TODO-TP: Cache this per class layout, we call this for every struct // operation on a promoted local. return segments; diff --git a/src/coreclr/jit/promotion.h b/src/coreclr/jit/promotion.h index c93a0175ebba0d..db0df7d458d8be 100644 --- a/src/coreclr/jit/promotion.h +++ b/src/coreclr/jit/promotion.h @@ -114,7 +114,8 @@ class Promotion friend class DecompositionPlan; friend class StructSegments; - static StructSegments SignificantSegments(Compiler* compiler, ClassLayout* layout); + static StructSegments SignificantSegments(Compiler* compiler, + ClassLayout* layout DEBUGARG(FixedBitVect** bitVectRepr = nullptr)); void InsertInitialReadBack(unsigned lclNum, const jitstd::vector& replacements, Statement** prevStmt); void ExplicitlyZeroInitReplacementLocals(unsigned lclNum, diff --git a/src/coreclr/jit/promotiondecomposition.cpp b/src/coreclr/jit/promotiondecomposition.cpp index 6e9336cf347dba..57fdf74fb6f22d 100644 --- a/src/coreclr/jit/promotiondecomposition.cpp +++ b/src/coreclr/jit/promotiondecomposition.cpp @@ -68,6 +68,7 @@ class DecompositionPlan , m_liveness(liveness) , m_dst(dst) , m_src(src) + , m_dstInvolvesReplacements(dstInvolvesReplacements) , m_srcInvolvesReplacements(srcInvolvesReplacements) , m_entries(comp->getAllocator(CMK_Promotion)) { @@ -253,11 +254,11 @@ class DecompositionPlan // StructSegments ComputeRemainder() { - ClassLayout* dstLayout = m_dst->GetLayout(m_compiler); - StructSegments segments = Promotion::SignificantSegments(m_compiler, dstLayout); + ClassLayout* dstLayout = m_dst->GetLayout(m_compiler); // Validate with "obviously correct" but less scalable fixed bit vector implementation. - INDEBUG(FixedBitVect* segmentBitVect = FixedBitVect::bitVectInit(dstLayout->GetSize(), m_compiler)); + INDEBUG(FixedBitVect * segmentBitVect); + StructSegments segments = Promotion::SignificantSegments(m_compiler, dstLayout DEBUGARG(&segmentBitVect)); for (int i = 0; i < m_entries.Height(); i++) { diff --git a/src/coreclr/jit/promotionliveness.cpp b/src/coreclr/jit/promotionliveness.cpp index 726e8dafbe85a0..fc3b46de74d1d6 100644 --- a/src/coreclr/jit/promotionliveness.cpp +++ b/src/coreclr/jit/promotionliveness.cpp @@ -150,6 +150,7 @@ void PromotionLiveness::MarkUseDef(GenTreeLclVarCommon* lcl, BitSetShortLongRep& bool isFullFieldDef = isDef && (offs <= rep.Offset) && (offs + size >= rep.Offset + genTypeSize(rep.AccessType)); MarkIndex(baseIndex + 1 + (unsigned)index, isUse, isFullFieldDef, useSet, defSet); + index++; } bool isFullDefOfRemainder = isDef && (agg->UnpromotedMin >= offs) && (agg->UnpromotedMax <= (offs + size)); @@ -233,7 +234,7 @@ bool PromotionLiveness::PerBlockLiveness(BasicBlock* block) m_hasPossibleBackEdge = true; } - bool liveInChanged = BitVecOps::Equal(m_bvTraits, bbInfo.LiveIn, m_liveIn); + bool liveInChanged = !BitVecOps::Equal(m_bvTraits, bbInfo.LiveIn, m_liveIn); if (liveInChanged) { @@ -483,6 +484,8 @@ void PromotionLiveness::FillInLiveness(BitVec& life, BitVec volatileVars, GenTre BitVecOps::AddElemD(m_bvTraits, life, varIndex); } } + + index++; } if (BitVecOps::IsMember(m_bvTraits, life, baseIndex)) @@ -539,7 +542,6 @@ void PromotionLiveness::FillInLiveness(BitVec& life, BitVec volatileVars, GenTre } else { - index = ~index; unsigned varIndex = baseIndex + 1 + (unsigned)index; if (BitVecOps::IsMember(m_bvTraits, life, varIndex)) From 68b822bbaaa454ff0e21659f8324e42117fd4dbc Mon Sep 17 00:00:00 2001 From: Jakob Botsch Nielsen Date: Fri, 12 May 2023 10:46:19 +0200 Subject: [PATCH 08/16] Handle non-remainder uses of struct local --- src/coreclr/jit/promotion.h | 5 +++++ src/coreclr/jit/promotiondecomposition.cpp | 25 +++++++++++++++++----- 2 files changed, 25 insertions(+), 5 deletions(-) diff --git a/src/coreclr/jit/promotion.h b/src/coreclr/jit/promotion.h index db0df7d458d8be..25230ecd5895fd 100644 --- a/src/coreclr/jit/promotion.h +++ b/src/coreclr/jit/promotion.h @@ -280,6 +280,11 @@ class ReplaceVisitor : public GenTreeVisitor Replacement** firstReplacement, Replacement** endReplacement = nullptr); void EliminateCommasInBlockOp(GenTreeOp* asg, DecompositionStatementList* result); + void HandlePartiallyOverlappingReplacements(Replacement** dstFirstRep, + Replacement** dstEndRep, + Replacement** srcFirstRep, + Replacement** srcEndRep, + DecompositionPlan* plan); void InitFields(GenTreeLclVarCommon* dst, Replacement* firstRep, Replacement* endRep, DecompositionPlan* plan); void CopyBetweenFields(GenTree* dst, Replacement* dstFirstRep, diff --git a/src/coreclr/jit/promotiondecomposition.cpp b/src/coreclr/jit/promotiondecomposition.cpp index 57fdf74fb6f22d..7181b07f8f6a56 100644 --- a/src/coreclr/jit/promotiondecomposition.cpp +++ b/src/coreclr/jit/promotiondecomposition.cpp @@ -54,6 +54,7 @@ class DecompositionPlan bool m_dstInvolvesReplacements; bool m_srcInvolvesReplacements; ArrayStack m_entries; + bool m_hasNonRemainderUseOfStructLocal = false; public: DecompositionPlan(Compiler* comp, @@ -168,6 +169,17 @@ class DecompositionPlan m_entries.Push(Entry{dstRep->LclNum, dstRep, BAD_VAR_NUM, nullptr, offset, dstRep->AccessType}); } + //------------------------------------------------------------------------ + // MarkNonRemainderUseOfStructLocal: + // Mark that some of the destination replacements are being handled via a + // readback. This invalidates liveness information for the remainder + // because the struct local will now also be used for the readback. + // + void MarkNonRemainderUseOfStructLocal() + { + m_hasNonRemainderUseOfStructLocal = true; + } + //------------------------------------------------------------------------ // Finalize: // Create IR to perform the full decomposed struct copy as specified by @@ -320,7 +332,7 @@ class DecompositionPlan // RemainderStrategy DetermineRemainderStrategy(const StructUseDeaths& dstDeaths) { - if (m_dstInvolvesReplacements && dstDeaths.IsRemainderDying()) + if (m_dstInvolvesReplacements && !m_hasNonRemainderUseOfStructLocal && dstDeaths.IsRemainderDying()) { JITDUMP(" => Remainder strategy: do nothing (remainder dying)\n"); return RemainderStrategy(RemainderStrategy::NoRemainder); @@ -372,7 +384,7 @@ class DecompositionPlan { if (!IsInit() || CanInitPrimitive(primitiveType)) { - JITDUMP(" => Remainder strategy: %s at %03u\n", varTypeName(primitiveType), segment.Start); + JITDUMP(" => Remainder strategy: %s at +%03u\n", varTypeName(primitiveType), segment.Start); return RemainderStrategy(RemainderStrategy::Primitive, segment.Start, primitiveType); } else @@ -986,6 +998,9 @@ void ReplaceVisitor::HandleAssignment(GenTree** use, GenTree* user) DecompositionStatementList result; EliminateCommasInBlockOp(asg, &result); + DecompositionPlan plan(m_compiler, m_aggregates, m_liveness, dst, src, dstInvolvesReplacements, + srcInvolvesReplacements); + if (dstInvolvesReplacements) { unsigned dstLclOffs = dstLcl->GetLclOffs(); @@ -1005,6 +1020,7 @@ void ReplaceVisitor::HandleAssignment(GenTree** use, GenTree* user) dstFirstRep->NeedsWriteBack = false; } + plan.MarkNonRemainderUseOfStructLocal(); dstFirstRep->NeedsReadBack = true; dstFirstRep++; } @@ -1023,6 +1039,7 @@ void ReplaceVisitor::HandleAssignment(GenTree** use, GenTree* user) dstLastRep->NeedsWriteBack = false; } + plan.MarkNonRemainderUseOfStructLocal(); dstLastRep->NeedsReadBack = true; dstEndRep--; } @@ -1067,9 +1084,6 @@ void ReplaceVisitor::HandleAssignment(GenTree** use, GenTree* user) } } - DecompositionPlan plan(m_compiler, m_aggregates, m_liveness, dst, src, dstInvolvesReplacements, - srcInvolvesReplacements); - if (src->IsConstInitVal()) { InitFields(dst->AsLclVarCommon(), dstFirstRep, dstEndRep, &plan); @@ -1219,6 +1233,7 @@ void ReplaceVisitor::InitFields(GenTreeLclVarCommon* dst, // We will need to read this one back after initing the struct. rep->NeedsWriteBack = false; rep->NeedsReadBack = true; + plan->MarkNonRemainderUseOfStructLocal(); continue; } From 6798af53acc34aeec77773f7e952caeebfa3c54a Mon Sep 17 00:00:00 2001 From: Jakob Botsch Nielsen Date: Fri, 12 May 2023 14:05:18 +0200 Subject: [PATCH 09/16] Cleanup; run jit-format --- src/coreclr/jit/promotion.cpp | 60 ++++- src/coreclr/jit/promotion.h | 8 +- src/coreclr/jit/promotiondecomposition.cpp | 6 +- src/coreclr/jit/promotionliveness.cpp | 247 +++++++++++++++++---- 4 files changed, 262 insertions(+), 59 deletions(-) diff --git a/src/coreclr/jit/promotion.cpp b/src/coreclr/jit/promotion.cpp index cf373973bfc05c..2f953727bb0e99 100644 --- a/src/coreclr/jit/promotion.cpp +++ b/src/coreclr/jit/promotion.cpp @@ -1,3 +1,6 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + #include "jitpch.h" #include "promotion.h" #include "jitstd/algorithm.h" @@ -671,7 +674,17 @@ bool Replacement::Overlaps(unsigned otherStart, unsigned otherSize) const return true; } -bool StructSegments::Segment::IntersectsInclusive(const Segment& other) const +//------------------------------------------------------------------------ +// IntersectsOrAdjacent: +// Check if this segment intersects or is adjacent to another segment. +// +// Parameters: +// other - The other segment. +// +// Returns: +// True if so. +// +bool StructSegments::Segment::IntersectsOrAdjacent(const Segment& other) const { if (End < other.Start) { @@ -686,11 +699,28 @@ bool StructSegments::Segment::IntersectsInclusive(const Segment& other) const return true; } +//------------------------------------------------------------------------ +// Contains: +// Check if this segment contains another segment. +// +// Parameters: +// other - The other segment. +// +// Returns: +// True if so. +// bool StructSegments::Segment::Contains(const Segment& other) const { return (other.Start >= Start) && (other.End <= End); } +//------------------------------------------------------------------------ +// Merge: +// Update this segment to also contain another segment. +// +// Parameters: +// other - The other segment. +// void StructSegments::Segment::Merge(const Segment& other) { Start = min(Start, other.Start); @@ -717,7 +747,7 @@ void StructSegments::Add(const Segment& segment) size_t endIndex; for (endIndex = index + 1; endIndex < m_segments.size(); endIndex++) { - if (!m_segments[index].IntersectsInclusive(m_segments[endIndex])) + if (!m_segments[index].IntersectsOrAdjacent(m_segments[endIndex])) { break; } @@ -760,7 +790,7 @@ void StructSegments::Subtract(const Segment& segment) return; } - assert(m_segments[index].IntersectsInclusive(segment)); + assert(m_segments[index].IntersectsOrAdjacent(segment)); if (m_segments[index].Contains(segment)) { @@ -853,6 +883,16 @@ bool StructSegments::IsSingleSegment(Segment* result) return false; } +//------------------------------------------------------------------------ +// CoveringSegment: +// Compute a segment that covers all contained segments in this segment tree. +// +// Parameters: +// result - [out] The single segment. Only valid if the method returns true. +// +// Returns: +// True if this segment tree was non-empty; otherwise false. +// bool StructSegments::CoveringSegment(Segment* result) { if (m_segments.size() == 0) @@ -924,6 +964,20 @@ void StructSegments::Dump() } #endif +//------------------------------------------------------------------------ +// SignificantSegments: +// Compute a segment tree containing all significant (non-padding) segments +// for the specified class layout. +// +// Parameters: +// compiler - Compiler instance +// layout - The layout +// bitVectRept - In debug, a bit vector that represents the same segments as the returned segment tree. +// Used for verification purposes. +// +// Returns: +// Segment tree containing all significant parts of the layout. +// StructSegments Promotion::SignificantSegments(Compiler* compiler, ClassLayout* layout DEBUGARG(FixedBitVect** bitVectRepr)) { diff --git a/src/coreclr/jit/promotion.h b/src/coreclr/jit/promotion.h index 25230ecd5895fd..a81fe4be6a09f4 100644 --- a/src/coreclr/jit/promotion.h +++ b/src/coreclr/jit/promotion.h @@ -58,7 +58,7 @@ class StructSegments { } - bool IntersectsInclusive(const Segment& other) const; + bool IntersectsOrAdjacent(const Segment& other) const; bool Contains(const Segment& other) const; void Merge(const Segment& other); }; @@ -199,6 +199,7 @@ class StructUseDeaths bool IsReplacementDying(unsigned index) const; }; +// Class to compute and track liveness information pertaining promoted structs. class PromotionLiveness { Compiler* m_compiler; @@ -280,11 +281,6 @@ class ReplaceVisitor : public GenTreeVisitor Replacement** firstReplacement, Replacement** endReplacement = nullptr); void EliminateCommasInBlockOp(GenTreeOp* asg, DecompositionStatementList* result); - void HandlePartiallyOverlappingReplacements(Replacement** dstFirstRep, - Replacement** dstEndRep, - Replacement** srcFirstRep, - Replacement** srcEndRep, - DecompositionPlan* plan); void InitFields(GenTreeLclVarCommon* dst, Replacement* firstRep, Replacement* endRep, DecompositionPlan* plan); void CopyBetweenFields(GenTree* dst, Replacement* dstFirstRep, diff --git a/src/coreclr/jit/promotiondecomposition.cpp b/src/coreclr/jit/promotiondecomposition.cpp index 7181b07f8f6a56..384a8f1b9f6251 100644 --- a/src/coreclr/jit/promotiondecomposition.cpp +++ b/src/coreclr/jit/promotiondecomposition.cpp @@ -1,3 +1,6 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + #include "jitpch.h" #include "promotion.h" #include "jitstd/algorithm.h" @@ -347,11 +350,10 @@ class DecompositionPlan StructSegments::Segment segment; // See if we can "plug the hole" with a single primitive. - if (remainder.IsSingleSegment(&segment)) + if (remainder.IsSingleSegment(&segment) && !m_dst->OperIs(GT_LCL_VAR)) { var_types primitiveType = TYP_UNDEF; unsigned size = segment.End - segment.Start; - // For if ((size == TARGET_POINTER_SIZE) && ((segment.Start % TARGET_POINTER_SIZE) == 0)) { ClassLayout* dstLayout = m_dst->GetLayout(m_compiler); diff --git a/src/coreclr/jit/promotionliveness.cpp b/src/coreclr/jit/promotionliveness.cpp index fc3b46de74d1d6..73f09599bdbb68 100644 --- a/src/coreclr/jit/promotionliveness.cpp +++ b/src/coreclr/jit/promotionliveness.cpp @@ -1,3 +1,6 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + #include "jitpch.h" #include "promotion.h" @@ -13,37 +16,56 @@ struct BasicBlockLiveness BitVec LiveOut; }; -bool PromotionLiveness::IsReplacementLiveOut(BasicBlock* bb, unsigned structLcl, unsigned replacementIndex) -{ - BitVec liveOut = m_bbInfo[bb->bbNum].LiveOut; - unsigned baseIndex = m_structLclToTrackedIndex[structLcl]; - return BitVecOps::IsMember(m_bvTraits, liveOut, baseIndex + 1 + replacementIndex); -} - -StructUseDeaths PromotionLiveness::GetDeathsForStructLocal(GenTreeLclVarCommon* lcl) -{ - assert(lcl->OperIsLocal()); - BitVec aggDeaths; - bool found = m_aggDeaths.Lookup(lcl, &aggDeaths); - assert(found); - - unsigned lclNum = lcl->GetLclNum(); - AggregateInfo* aggInfo = m_aggregates[lclNum]; - return StructUseDeaths(aggDeaths, (unsigned)aggInfo->Replacements.size()); -} - -bool StructUseDeaths::IsRemainderDying() const -{ - BitVecTraits traits(1 + m_numFields, nullptr); - return BitVecOps::IsMember(&traits, m_deaths, 0); -} - -bool StructUseDeaths::IsReplacementDying(unsigned index) const -{ - BitVecTraits traits(1 + m_numFields, nullptr); - return BitVecOps::IsMember(&traits, m_deaths, 1 + index); -} - +//------------------------------------------------------------------------ +// Run: +// Compute liveness information pertaining the promoted structs. +// +// Remarks: +// For each promoted aggregate we compute the liveness for its remainder and +// all of its fields. Unlike regular liveness we currently do not do any DCE +// here and so only do the dataflow computation once. +// +// The liveness information is written into the IR using the normal +// GTF_VAR_DEATH flag. Note that the semantics of GTF_VAR_DEATH differs from +// the rest of the JIT for a short while between the liveness is computed and +// the replacement phase has run: in particular, after this liveness pass you +// may see a node like: +// +// LCL_FLD int V16 tmp9 [+8] (last use) +// +// that indicates that this particular field (or the remainder if it wasn't +// promoted) is dying, not that V16 itself is dying. After replacement has +// run the semantics align with the rest of the JIT: in the promoted case V16 +// [+8] would be replaced by its promoted field local, and in the remainder +// case all non-remainder uses of V16 would also be. +// +// There is one catch which is struct uses of the local. These can indicate +// deaths of multiple fields and also the remainder, so this information is +// stored on the side. PromotionLiveness::GetDeathsForStructLocal is used to +// query this information. +// +// The liveness information is used by decomposition to avoid creating dead +// stores, and also to mark the replacement field uses/defs with proper +// up-to-date liveness information to be used by future phases (forward sub +// and morph, as of writing this). It is also used to avoid creating +// unnecessary read-backs; this is mostly just a TP optimization as future +// liveness passes would be expected to DCE these anyway. +// +// Avoiding the creation of dead stores to the remainder is especially +// important as these otherwise would often end up looking like partial +// definitions, and the other liveness passes handle partial definitions very +// conservatively and are not able to DCE them. +// +// Unlike the other liveness passes we keep the per-block liveness +// information on the side and we do not update BasicBlock::bbLiveIn et al. +// This relies on downstream phases not requiring/wanting to use per-basic +// block live-in/live-out/var-use/var-def sets. To be able to update these we +// would need to give the new locals "regular" tracked indices (i.e. allocate +// a lvVarIndex); currently the indices allocated are "dense" in the sense +// that the bit vectors only have indices for remainders and the replacement +// fields introduced by this pass uin other words, we allocate 1 + num_fields +// indices for each promoted struct local). +// void PromotionLiveness::Run() { m_structLclToTrackedIndex = new (m_compiler, CMK_Promotion) unsigned[m_aggregates.size()]{}; @@ -77,6 +99,10 @@ void PromotionLiveness::Run() FillInLiveness(); } +//------------------------------------------------------------------------ +// ComputeUseDefSets: +// Compute the use and def sets for all blocks. +// void PromotionLiveness::ComputeUseDefSets() { for (BasicBlock* block : m_compiler->Blocks()) @@ -97,6 +123,15 @@ void PromotionLiveness::ComputeUseDefSets() } } +//------------------------------------------------------------------------ +// MarkUseDef: +// Mark use/def information for a single appearence of a local. +// +// Parameters: +// lcl - The local node +// useSet - The use set to mark in. +// defSet - The def set to mark in. +// void PromotionLiveness::MarkUseDef(GenTreeLclVarCommon* lcl, BitSetShortLongRep& useSet, BitSetShortLongRep& defSet) { AggregateInfo* agg = m_aggregates[lcl->GetLclNum()]; @@ -106,8 +141,8 @@ void PromotionLiveness::MarkUseDef(GenTreeLclVarCommon* lcl, BitSetShortLongRep& } jitstd::vector& reps = agg->Replacements; - bool isUse = (lcl->gtFlags & GTF_VAR_DEF) == 0; - bool isDef = !isUse; + bool isDef = (lcl->gtFlags & GTF_VAR_DEF) != 0; + bool isUse = !isDef; unsigned baseIndex = m_structLclToTrackedIndex[lcl->GetLclNum()]; var_types accessType = lcl->TypeGet(); @@ -177,6 +212,17 @@ void PromotionLiveness::MarkUseDef(GenTreeLclVarCommon* lcl, BitSetShortLongRep& } } +//------------------------------------------------------------------------ +// MarkIndex: +// Mark specific bits in use/def bit vectors depending on whether this is a use def. +// +// Parameters: +// index - The index of the bit to set. +// isUse - Whether this is a use +// isDef - Whether this is a def +// useSet - The set of uses +// defSet - The set of defs +// void PromotionLiveness::MarkIndex(unsigned index, bool isUse, bool isDef, BitVec& useSet, BitVec& defSet) { if (isUse && !BitVecOps::IsMember(m_bvTraits, defSet, index)) @@ -190,6 +236,10 @@ void PromotionLiveness::MarkIndex(unsigned index, bool isUse, bool isDef, BitVec } } +//------------------------------------------------------------------------ +// InterBlockLiveness: +// Compute the fixpoint. +// void PromotionLiveness::InterBlockLiveness() { bool changed; @@ -210,6 +260,14 @@ void PromotionLiveness::InterBlockLiveness() } while (changed); } +//------------------------------------------------------------------------ +// PerBlockLiveness: +// Compute liveness for a single block during a single iteration of the +// fixpoint computation. +// +// Parameters: +// block - The block +// bool PromotionLiveness::PerBlockLiveness(BasicBlock* block) { // We disable promotion for GT_JMP methods. @@ -244,6 +302,18 @@ bool PromotionLiveness::PerBlockLiveness(BasicBlock* block) return liveInChanged; } +//------------------------------------------------------------------------ +// AddHandlerLiveVars: +// Find variables that are live-in to handlers reachable by implicit control +// flow and add them to a specified bit vector. +// +// Parameters: +// block - The block +// ehLiveVars - The bit vector to mark in +// +// Remarks: +// Similar to Compiler::fgGetHandlerLiveVars used by regular liveness. +// void PromotionLiveness::AddHandlerLiveVars(BasicBlock* block, BitVec& ehLiveVars) { assert(m_compiler->ehBlockHasExnFlowDsc(block)); @@ -251,7 +321,7 @@ void PromotionLiveness::AddHandlerLiveVars(BasicBlock* block, BitVec& ehLiveVars do { - /* Either we enter the filter first or the catch/finally */ + // Either we enter the filter first or the catch/finally if (HBtab->HasFilter()) { BitVecOps::UnionD(m_bvTraits, ehLiveVars, m_bbInfo[HBtab->ebdFilter->bbNum].LiveIn); @@ -270,7 +340,7 @@ void PromotionLiveness::AddHandlerLiveVars(BasicBlock* block, BitVec& ehLiveVars BitVecOps::UnionD(m_bvTraits, ehLiveVars, m_bbInfo[HBtab->ebdHndBeg->bbNum].LiveIn); } - /* If we have nested try's edbEnclosing will provide them */ + // If we have nested try's edbEnclosing will provide them assert((HBtab->ebdEnclosingTryIndex == EHblkDsc::NO_ENCLOSING_INDEX) || (HBtab->ebdEnclosingTryIndex > m_compiler->ehGetIndex(HBtab))); @@ -350,6 +420,11 @@ void PromotionLiveness::AddHandlerLiveVars(BasicBlock* block, BitVec& ehLiveVars } } +//------------------------------------------------------------------------ +// FillInLiveness: +// Starting with the live-out set for each basic block do a backwards traversal +// marking liveness into the IR. +// void PromotionLiveness::FillInLiveness() { BitVec life(BitVecOps::MakeEmpty(m_bvTraits)); @@ -357,6 +432,11 @@ void PromotionLiveness::FillInLiveness() for (BasicBlock* block : m_compiler->Blocks()) { + if (block->firstStmt() == nullptr) + { + continue; + } + BasicBlockLiveness& bbInfo = m_bbInfo[block->bbNum]; BitVecOps::ClearD(m_bvTraits, volatileVars); @@ -368,34 +448,34 @@ void PromotionLiveness::FillInLiveness() BitVecOps::Assign(m_bvTraits, life, bbInfo.LiveOut); - Statement* firstStmt = block->firstStmt(); - - if (firstStmt == nullptr) - { - continue; - } - Statement* stmt = block->lastStmt(); while (true) { - Statement* prevStmt = stmt->GetPrevStmt(); - for (GenTree* cur = stmt->GetTreeListEnd(); cur != nullptr; cur = cur->gtPrev) { FillInLiveness(life, volatileVars, cur->AsLclVarCommon()); } - if (stmt == firstStmt) + if (stmt == block->firstStmt()) { break; } - stmt = prevStmt; + stmt = stmt->GetPrevStmt(); } } } +//------------------------------------------------------------------------ +// FillInLiveness: +// Fill liveness information into the specified IR node. +// +// Parameters: +// life - The current life set. Will be read and updated depending on 'lcl'. +// volatileVars - Bit vector of variables that are live always. +// lcl - The IR node to process liveness for and to mark with liveness information. +// void PromotionLiveness::FillInLiveness(BitVec& life, BitVec volatileVars, GenTreeLclVarCommon* lcl) { AggregateInfo* agg = m_aggregates[lcl->GetLclNum()]; @@ -404,8 +484,8 @@ void PromotionLiveness::FillInLiveness(BitVec& life, BitVec volatileVars, GenTre return; } - bool isUse = (lcl->gtFlags & GTF_VAR_DEF) == 0; - bool isDef = !isUse; + bool isDef = (lcl->gtFlags & GTF_VAR_DEF) != 0; + bool isUse = !isDef; unsigned baseIndex = m_structLclToTrackedIndex[lcl->GetLclNum()]; var_types accessType = lcl->TypeGet(); @@ -462,6 +542,7 @@ void PromotionLiveness::FillInLiveness(BitVec& life, BitVec volatileVars, GenTre } } + // Handle fields. while ((index < agg->Replacements.size()) && (agg->Replacements[index].Offset < offs + size)) { unsigned varIndex = baseIndex + 1 + (unsigned)index; @@ -488,6 +569,7 @@ void PromotionLiveness::FillInLiveness(BitVec& life, BitVec volatileVars, GenTre index++; } + // Handle remainder. if (BitVecOps::IsMember(m_bvTraits, life, baseIndex)) { bool isFullDefOfRemainder = @@ -518,6 +600,7 @@ void PromotionLiveness::FillInLiveness(BitVec& life, BitVec volatileVars, GenTre size_t index = Promotion::BinarySearch(agg->Replacements, offs); if ((ssize_t)index < 0) { + // No replacement found, this is a use of the remainder. unsigned size = genTypeSize(accessType); if (BitVecOps::IsMember(m_bvTraits, life, baseIndex)) { @@ -542,6 +625,7 @@ void PromotionLiveness::FillInLiveness(BitVec& life, BitVec volatileVars, GenTre } else { + // Use of a field. unsigned varIndex = baseIndex + 1 + (unsigned)index; if (BitVecOps::IsMember(m_bvTraits, life, varIndex)) @@ -565,3 +649,70 @@ void PromotionLiveness::FillInLiveness(BitVec& life, BitVec volatileVars, GenTre } } } + +//------------------------------------------------------------------------ +// IsReplacementLiveOut: +// Check if a replacement field is live at the end of a basic block. +// +// Parameters: +// structLcl - The struct (base) local +// replacementIndex - Index of the replacement +// +// Returns: +// True if the field is in the live-out set. +// +bool PromotionLiveness::IsReplacementLiveOut(BasicBlock* bb, unsigned structLcl, unsigned replacementIndex) +{ + BitVec liveOut = m_bbInfo[bb->bbNum].LiveOut; + unsigned baseIndex = m_structLclToTrackedIndex[structLcl]; + return BitVecOps::IsMember(m_bvTraits, liveOut, baseIndex + 1 + replacementIndex); +} + +//------------------------------------------------------------------------ +// GetDeathsForStructLocal: +// Get a data structure that can be used to query liveness information +// for a specified local node at its position. +// +// Parameters: +// lcl - The node +// +// Returns: +// Liveness information. +// +StructUseDeaths PromotionLiveness::GetDeathsForStructLocal(GenTreeLclVarCommon* lcl) +{ + assert(lcl->OperIsLocal() && lcl->TypeIs(TYP_STRUCT) && (m_aggregates[lcl->GetLclNum()] != nullptr)); + BitVec aggDeaths; + bool found = m_aggDeaths.Lookup(lcl, &aggDeaths); + assert(found); + + unsigned lclNum = lcl->GetLclNum(); + AggregateInfo* aggInfo = m_aggregates[lclNum]; + return StructUseDeaths(aggDeaths, (unsigned)aggInfo->Replacements.size()); +} + +//------------------------------------------------------------------------ +// IsRemainderDying: +// Check if the remainder is dying. +// +// Returns: +// True if so. +// +bool StructUseDeaths::IsRemainderDying() const +{ + BitVecTraits traits(1 + m_numFields, nullptr); + return BitVecOps::IsMember(&traits, m_deaths, 0); +} + +//------------------------------------------------------------------------ +// IsReplacementDying: +// Check if a specific replacement is dying. +// +// Returns: +// True if so. +// +bool StructUseDeaths::IsReplacementDying(unsigned index) const +{ + BitVecTraits traits(1 + m_numFields, nullptr); + return BitVecOps::IsMember(&traits, m_deaths, 1 + index); +} From 70c65b362887252c191b3ab87440a6ac36c8f7f9 Mon Sep 17 00:00:00 2001 From: Jakob Botsch Nielsen Date: Fri, 12 May 2023 14:18:55 +0200 Subject: [PATCH 10/16] Handle QMARKs --- src/coreclr/jit/promotionliveness.cpp | 62 ++++++++++++++++++++++++--- 1 file changed, 57 insertions(+), 5 deletions(-) diff --git a/src/coreclr/jit/promotionliveness.cpp b/src/coreclr/jit/promotionliveness.cpp index 73f09599bdbb68..b4349c19db5f6a 100644 --- a/src/coreclr/jit/promotionliveness.cpp +++ b/src/coreclr/jit/promotionliveness.cpp @@ -113,11 +113,40 @@ void PromotionLiveness::ComputeUseDefSets() BitVecOps::AssignNoCopy(m_bvTraits, bb.LiveIn, BitVecOps::MakeEmpty(m_bvTraits)); BitVecOps::AssignNoCopy(m_bvTraits, bb.LiveOut, BitVecOps::MakeEmpty(m_bvTraits)); - for (Statement* stmt : block->Statements()) + if (m_compiler->compQmarkUsed) { - for (GenTreeLclVarCommon* lcl : stmt->LocalsTreeList()) + for (Statement* stmt : block->Statements()) { - MarkUseDef(lcl, bb.VarUse, bb.VarDef); + GenTree* dst; + GenTree* qmark = m_compiler->fgGetTopLevelQmark(stmt->GetRootNode(), &dst); + if (qmark == nullptr) + { + for (GenTreeLclVarCommon* lcl : stmt->LocalsTreeList()) + { + MarkUseDef(lcl, bb.VarUse, bb.VarDef); + } + } + else + { + for (GenTreeLclVarCommon* lcl : stmt->LocalsTreeList()) + { + // Skip liveness updates/marking for defs; they may be conditionally executed. + if ((lcl->gtFlags & GTF_VAR_DEF) == 0) + { + MarkUseDef(lcl, bb.VarUse, bb.VarDef); + } + } + } + } + } + else + { + for (Statement* stmt : block->Statements()) + { + for (GenTreeLclVarCommon* lcl : stmt->LocalsTreeList()) + { + MarkUseDef(lcl, bb.VarUse, bb.VarDef); + } } } } @@ -162,7 +191,9 @@ void PromotionLiveness::MarkUseDef(GenTreeLclVarCommon* lcl, BitSetShortLongRep& { // Mark remainder and all fields. for (size_t i = 0; i <= reps.size(); i++) + { MarkIndex(baseIndex + (unsigned)i, isUse, isDef, useSet, defSet); + } } else { @@ -452,9 +483,30 @@ void PromotionLiveness::FillInLiveness() while (true) { - for (GenTree* cur = stmt->GetTreeListEnd(); cur != nullptr; cur = cur->gtPrev) + GenTree* qmark = nullptr; + if (m_compiler->compQmarkUsed) + { + GenTree* dst; + qmark = m_compiler->fgGetTopLevelQmark(stmt->GetRootNode(), &dst); + } + + if (qmark == nullptr) { - FillInLiveness(life, volatileVars, cur->AsLclVarCommon()); + for (GenTree* cur = stmt->GetTreeListEnd(); cur != nullptr; cur = cur->gtPrev) + { + FillInLiveness(life, volatileVars, cur->AsLclVarCommon()); + } + } + else + { + for (GenTree* cur = stmt->GetTreeListEnd(); cur != nullptr; cur = cur->gtPrev) + { + // Skip liveness updates/marking for defs; they may be conditionally executed. + if ((cur->gtFlags & GTF_VAR_DEF) == 0) + { + FillInLiveness(life, volatileVars, cur->AsLclVarCommon()); + } + } } if (stmt == block->firstStmt()) From 6a68b10a58e2f938779ff3ff12c72700f1b7b4d5 Mon Sep 17 00:00:00 2001 From: Jakob Botsch Nielsen Date: Fri, 12 May 2023 22:45:49 +0200 Subject: [PATCH 11/16] Oopsie --- src/coreclr/jit/promotiondecomposition.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/coreclr/jit/promotiondecomposition.cpp b/src/coreclr/jit/promotiondecomposition.cpp index 384a8f1b9f6251..ebaf4d2bdd8173 100644 --- a/src/coreclr/jit/promotiondecomposition.cpp +++ b/src/coreclr/jit/promotiondecomposition.cpp @@ -350,7 +350,7 @@ class DecompositionPlan StructSegments::Segment segment; // See if we can "plug the hole" with a single primitive. - if (remainder.IsSingleSegment(&segment) && !m_dst->OperIs(GT_LCL_VAR)) + if (remainder.IsSingleSegment(&segment)) { var_types primitiveType = TYP_UNDEF; unsigned size = segment.End - segment.Start; From 585fbb60ab9529b3145a2eb8ae7def039dadcd16 Mon Sep 17 00:00:00 2001 From: Jakob Botsch Nielsen Date: Sat, 13 May 2023 12:39:50 +0200 Subject: [PATCH 12/16] More JITDUMPing, mark more locals as dying --- src/coreclr/jit/compiler.h | 3 + src/coreclr/jit/gentree.cpp | 8 +- src/coreclr/jit/lclvars.cpp | 1 + src/coreclr/jit/promotion.cpp | 47 ++++++++- src/coreclr/jit/promotion.h | 19 +++- src/coreclr/jit/promotiondecomposition.cpp | 67 ++++++++++--- src/coreclr/jit/promotionliveness.cpp | 106 ++++++++++++++++++++- 7 files changed, 230 insertions(+), 21 deletions(-) diff --git a/src/coreclr/jit/compiler.h b/src/coreclr/jit/compiler.h index 53deac240305c9..b235d7726a32b0 100644 --- a/src/coreclr/jit/compiler.h +++ b/src/coreclr/jit/compiler.h @@ -513,6 +513,9 @@ class LclVarDsc { return lvTracked && lvType != TYP_STRUCT; } +#ifdef DEBUG + unsigned char lvTrackedWithoutIndex : 1; // Tracked but has no lvVarIndex (i.e. only valid GTF_VAR_DEATH flags) +#endif unsigned char lvPinned : 1; // is this a pinned variable? unsigned char lvMustInit : 1; // must be initialized diff --git a/src/coreclr/jit/gentree.cpp b/src/coreclr/jit/gentree.cpp index c0065e8605d9db..5c77f5b5ac8f81 100644 --- a/src/coreclr/jit/gentree.cpp +++ b/src/coreclr/jit/gentree.cpp @@ -12007,9 +12007,13 @@ void Compiler::gtDispLeaf(GenTree* tree, IndentStack* indentStack) } else // a normal not-promoted lclvar { - if (varDsc->lvTracked && fgLocalVarLivenessDone && ((tree->gtFlags & GTF_VAR_DEATH) != 0)) + if ((tree->gtFlags & GTF_VAR_DEATH) != 0) { - printf(" (last use)"); + if ((varDsc->lvTracked || varDsc->lvTrackedWithoutIndex) && fgLocalVarLivenessDone && + ((tree->gtFlags & GTF_VAR_DEATH) != 0)) + { + printf(" (last use)"); + } } } } diff --git a/src/coreclr/jit/lclvars.cpp b/src/coreclr/jit/lclvars.cpp index 71b3aa031162e1..be4fd8c865bf18 100644 --- a/src/coreclr/jit/lclvars.cpp +++ b/src/coreclr/jit/lclvars.cpp @@ -3564,6 +3564,7 @@ void Compiler::lvaSortByRefCount() // Start by assuming that the variable will be tracked. varDsc->lvTracked = 1; + INDEBUG(varDsc->lvTrackedWithoutIndex = 0); if (varDsc->lvRefCnt(lvaRefCountState) == 0) { diff --git a/src/coreclr/jit/promotion.cpp b/src/coreclr/jit/promotion.cpp index 2f953727bb0e99..a76d6aa2bf759a 100644 --- a/src/coreclr/jit/promotion.cpp +++ b/src/coreclr/jit/promotion.cpp @@ -319,7 +319,7 @@ class LocalUses if (*aggregateInfo == nullptr) { - *aggregateInfo = new (comp, CMK_Promotion) AggregateInfo(comp->getAllocator(CMK_Promotion)); + *aggregateInfo = new (comp, CMK_Promotion) AggregateInfo(comp->getAllocator(CMK_Promotion), lclNum); } (*aggregateInfo) @@ -1194,6 +1194,11 @@ void ReplaceVisitor::LoadStoreAroundCall(GenTreeCall* call, GenTree* user) { unsigned size = argNodeLcl->GetLayout(m_compiler)->GetSize(); WriteBackBefore(&arg.EarlyNodeRef(), argNodeLcl->GetLclNum(), argNodeLcl->GetLclOffs(), size); + + if ((m_aggregates[argNodeLcl->GetLclNum()] != nullptr) && IsPromotedStructLocalDying(argNodeLcl)) + { + argNodeLcl->gtFlags |= GTF_VAR_DEATH; + } } } @@ -1208,6 +1213,46 @@ void ReplaceVisitor::LoadStoreAroundCall(GenTreeCall* call, GenTree* user) } } +//------------------------------------------------------------------------ +// IsPromotedStructLocalDying: +// Check if a promoted struct local is dying at its current position. +// +// Parameters: +// lcl - The local +// +// Returns: +// True if so. +// +// Remarks: +// This effectively translates our precise liveness information for struct +// uses into the liveness information that the rest of the JIT expects. +// +// If the remainder of the struct local is dying, then we expect that this +// entire struct local is now dying, since all field accesses are going to be +// replaced with other locals. The exception is if there is a queued read +// back for any of the fields. +// +bool ReplaceVisitor::IsPromotedStructLocalDying(GenTreeLclVarCommon* lcl) +{ + StructUseDeaths deaths = m_liveness->GetDeathsForStructLocal(lcl); + if (!deaths.IsRemainderDying()) + { + return false; + } + + AggregateInfo* agg = m_aggregates[lcl->GetLclNum()]; + + for (size_t i = 0; i < agg->Replacements.size(); i++) + { + if (agg->Replacements[i].NeedsReadBack) + { + return false; + } + } + + return true; +} + //------------------------------------------------------------------------ // ReplaceLocal: // Handle a local that may need to be replaced. diff --git a/src/coreclr/jit/promotion.h b/src/coreclr/jit/promotion.h index a81fe4be6a09f4..977d5f8df254dd 100644 --- a/src/coreclr/jit/promotion.h +++ b/src/coreclr/jit/promotion.h @@ -87,12 +87,13 @@ class StructSegments struct AggregateInfo { jitstd::vector Replacements; + unsigned LclNum; // Min offset in the struct local of the unpromoted part. unsigned UnpromotedMin = 0; // Max offset in the struct local of the unpromoted part. unsigned UnpromotedMax = 0; - AggregateInfo(CompAllocator alloc) : Replacements(alloc) + AggregateInfo(CompAllocator alloc, unsigned lclNum) : Replacements(alloc), LclNum(lclNum) { } @@ -181,7 +182,7 @@ struct BasicBlockLiveness; class StructUseDeaths { BitVec m_deaths; - unsigned m_numFields; + unsigned m_numFields = 0; friend class PromotionLiveness; @@ -191,12 +192,16 @@ class StructUseDeaths } public: - StructUseDeaths() : m_deaths(BitVecOps::UninitVal()), m_numFields(0) + StructUseDeaths() : m_deaths(BitVecOps::UninitVal()) { } bool IsRemainderDying() const; bool IsReplacementDying(unsigned index) const; + +#ifdef DEBUG + void Dump(); +#endif }; // Class to compute and track liveness information pertaining promoted structs. @@ -206,6 +211,7 @@ class PromotionLiveness jitstd::vector& m_aggregates; BitVecTraits* m_bvTraits = nullptr; unsigned* m_structLclToTrackedIndex = nullptr; + unsigned m_numVars = 0; BasicBlockLiveness* m_bbInfo = nullptr; bool m_hasPossibleBackEdge = false; BitVec m_liveIn; @@ -233,6 +239,9 @@ class PromotionLiveness void AddHandlerLiveVars(BasicBlock* block, BitVec& ehLiveVars); void FillInLiveness(); void FillInLiveness(BitVec& life, BitVec volatileVars, GenTreeLclVarCommon* lcl); +#ifdef DEBUG + void DumpVarSet(BitVec set, BitVec allVars); +#endif }; class DecompositionStatementList; @@ -271,6 +280,7 @@ class ReplaceVisitor : public GenTreeVisitor private: void LoadStoreAroundCall(GenTreeCall* call, GenTree* user); + bool IsPromotedStructLocalDying(GenTreeLclVarCommon* structLcl); void ReplaceLocal(GenTree** use, GenTree* user); void StoreBeforeReturn(GenTreeUnOp* ret); void WriteBackBefore(GenTree** use, unsigned lcl, unsigned offs, unsigned size); @@ -290,6 +300,9 @@ class ReplaceVisitor : public GenTreeVisitor Replacement* srcEndRep, DecompositionStatementList* statements, DecompositionPlan* plan); +#ifdef DEBUG + const char* LastUseString(GenTreeLclVarCommon* lcl, Replacement* rep); +#endif }; #endif diff --git a/src/coreclr/jit/promotiondecomposition.cpp b/src/coreclr/jit/promotiondecomposition.cpp index ebaf4d2bdd8173..130be822d92c7a 100644 --- a/src/coreclr/jit/promotiondecomposition.cpp +++ b/src/coreclr/jit/promotiondecomposition.cpp @@ -688,6 +688,12 @@ class DecompositionPlan statements->AddStatement(indir); } + StructUseDeaths srcDeaths; + if (m_srcInvolvesReplacements) + { + srcDeaths = m_liveness->GetDeathsForStructLocal(m_src->AsLclVarCommon()); + } + for (int i = 0; i < m_entries.Height(); i++) { const Entry& entry = m_entries.BottomRef(i); @@ -741,6 +747,19 @@ class DecompositionPlan if (entry.FromLclNum != BAD_VAR_NUM) { src = m_compiler->gtNewLclvNode(entry.FromLclNum, entry.Type); + + if (entry.FromReplacement != nullptr) + { + AggregateInfo* srcAgg = m_aggregates[m_src->AsLclVarCommon()->GetLclNum()]; + Replacement* firstRep = srcAgg->Replacements.data(); + assert((entry.FromReplacement >= firstRep) && + (entry.FromReplacement < (firstRep + srcAgg->Replacements.size()))); + size_t replacementIndex = entry.FromReplacement - firstRep; + if (srcDeaths.IsReplacementDying((unsigned)replacementIndex)) + { + src->gtFlags |= GTF_VAR_DEATH; + } + } } else if (m_src->OperIs(GT_LCL_VAR, GT_LCL_FLD)) { @@ -1244,6 +1263,25 @@ void ReplaceVisitor::InitFields(GenTreeLclVarCommon* dst, } } +#ifdef DEBUG +const char* ReplaceVisitor::LastUseString(GenTreeLclVarCommon* lcl, Replacement* rep) +{ + StructUseDeaths deaths = m_liveness->GetDeathsForStructLocal(lcl); + AggregateInfo* agg = m_aggregates[lcl->GetLclNum()]; + assert(agg != nullptr); + Replacement* firstRep = agg->Replacements.data(); + assert((rep >= firstRep) && (rep < firstRep + agg->Replacements.size())); + + size_t replacementIndex = rep - firstRep; + if (deaths.IsReplacementDying((unsigned)replacementIndex)) + { + return " (last use)"; + } + + return ""; +} +#endif + //------------------------------------------------------------------------ // CopyBetweenFields: // Copy between two struct locals that may involve replacements. @@ -1301,7 +1339,8 @@ void ReplaceVisitor::CopyBetweenFields(GenTree* dst, // Write it directly to the destination struct local. unsigned offs = srcRep->Offset - srcBaseOffs; plan->CopyFromReplacement(srcRep, offs); - JITDUMP(" dst+%03u <- V%02u (%s)\n", offs, srcRep->LclNum, srcRep->Description); + JITDUMP(" dst+%03u <- V%02u (%s)%s\n", offs, srcRep->LclNum, srcRep->Description, + LastUseString(srcLcl, srcRep)); srcRep++; continue; } @@ -1312,7 +1351,8 @@ void ReplaceVisitor::CopyBetweenFields(GenTree* dst, // Read it directly from the source struct local. unsigned offs = dstRep->Offset - dstBaseOffs; plan->CopyToReplacement(dstRep, offs); - JITDUMP(" V%02u (%s) <- src+%03u\n", dstRep->LclNum, dstRep->Description, offs); + JITDUMP(" V%02u (%s)%s <- src+%03u\n", dstRep->LclNum, dstRep->Description, + LastUseString(dstLcl, dstRep), offs); dstRep++; continue; } @@ -1323,8 +1363,9 @@ void ReplaceVisitor::CopyBetweenFields(GenTree* dst, (dstRep->AccessType == srcRep->AccessType)) { plan->CopyBetweenReplacements(dstRep, srcRep, dstRep->Offset - dstBaseOffs); - JITDUMP(" V%02u (%s) <- V%02u (%s)\n", dstRep->LclNum, dstRep->Description, srcRep->LclNum, - srcRep->Description); + JITDUMP(" V%02u (%s)%s <- V%02u (%s)%s\n", dstRep->LclNum, dstRep->Description, + LastUseString(dstLcl, dstRep), srcRep->LclNum, srcRep->Description, + LastUseString(srcLcl, srcRep)); dstRep++; srcRep++; @@ -1335,8 +1376,9 @@ void ReplaceVisitor::CopyBetweenFields(GenTree* dst, // will handle the destination replacement in a future // iteration of the loop. statements->AddStatement(Promotion::CreateWriteBack(m_compiler, srcLcl->GetLclNum(), *srcRep)); - JITDUMP(" Partial overlap of V%02u (%s) <- V%02u (%s). Will read source back before copy\n", - dstRep->LclNum, dstRep->Description, srcRep->LclNum, srcRep->Description); + JITDUMP(" Partial overlap of V%02u (%s)%s <- V%02u (%s)%s. Will read source back before copy\n", + dstRep->LclNum, dstRep->Description, LastUseString(dstLcl, dstRep), srcRep->LclNum, + srcRep->Description, LastUseString(srcLcl, srcRep)); srcRep++; continue; } @@ -1356,7 +1398,8 @@ void ReplaceVisitor::CopyBetweenFields(GenTree* dst, if (dsc->lvType == dstRep->AccessType) { plan->CopyBetweenReplacements(dstRep, fieldLcl, offs); - JITDUMP(" V%02u (%s) <- V%02u (%s)\n", dstRep->LclNum, dstRep->Description, dsc->lvReason); + JITDUMP(" V%02u (%s)%s <- V%02u (%s)\n", dstRep->LclNum, dstRep->Description, + LastUseString(dstLcl, dstRep), dsc->lvReason); dstRep++; continue; } @@ -1368,7 +1411,8 @@ void ReplaceVisitor::CopyBetweenFields(GenTree* dst, // directly to the destination's struct local and mark the // overlapping fields as needing read back to avoid this DNER. plan->CopyToReplacement(dstRep, offs); - JITDUMP(" V%02u (%s) <- src+%03u\n", dstRep->LclNum, dstRep->Description, offs); + JITDUMP(" V%02u (%s)%s <- src+%03u\n", dstRep->LclNum, dstRep->Description, LastUseString(dstLcl, dstRep), + offs); dstRep++; } else @@ -1386,8 +1430,8 @@ void ReplaceVisitor::CopyBetweenFields(GenTree* dst, if (dsc->lvType == srcRep->AccessType) { plan->CopyBetweenReplacements(fieldLcl, srcRep, offs); - JITDUMP(" V%02u (%s) <- V%02u (%s)\n", fieldLcl, dsc->lvReason, srcRep->LclNum, - srcRep->Description); + JITDUMP(" V%02u (%s) <- V%02u (%s)%s\n", fieldLcl, dsc->lvReason, srcRep->LclNum, + srcRep->Description, LastUseString(srcLcl, srcRep)); srcRep++; continue; } @@ -1395,7 +1439,8 @@ void ReplaceVisitor::CopyBetweenFields(GenTree* dst, } plan->CopyFromReplacement(srcRep, offs); - JITDUMP(" dst+%03u <- V%02u (%s)\n", offs, srcRep->LclNum, srcRep->Description); + JITDUMP(" dst+%03u <- V%02u (%s)%s\n", offs, srcRep->LclNum, srcRep->Description, + LastUseString(srcLcl, srcRep)); srcRep++; } } diff --git a/src/coreclr/jit/promotionliveness.cpp b/src/coreclr/jit/promotionliveness.cpp index b4349c19db5f6a..b0836c47ab5e18 100644 --- a/src/coreclr/jit/promotionliveness.cpp +++ b/src/coreclr/jit/promotionliveness.cpp @@ -79,19 +79,33 @@ void PromotionLiveness::Run() } m_structLclToTrackedIndex[lclNum] = trackedIndex; + // TODO: We need a scalability limit on these, we cannot always track + // the remainder and all fields. // Remainder. - // TODO-TP: We can avoid allocating an index for this when agg->UnpromotedMin == agg->UnpromotedMax, - // but it makes liveness use/def marking less uniform. trackedIndex++; - // Fields + // Fields. trackedIndex += (unsigned)agg->Replacements.size(); + +#ifdef DEBUG + // Mark the struct local (remainder) and fields as tracked for DISPTREE to properly + // show last use information. + m_compiler->lvaGetDesc((unsigned)lclNum)->lvTrackedWithoutIndex = true; + for (size_t i = 0; i < agg->Replacements.size(); i++) + { + m_compiler->lvaGetDesc(agg->Replacements[i].LclNum)->lvTrackedWithoutIndex = true; + } +#endif } - m_bvTraits = new (m_compiler, CMK_Promotion) BitVecTraits(trackedIndex, m_compiler); + m_numVars = trackedIndex; + + m_bvTraits = new (m_compiler, CMK_Promotion) BitVecTraits(m_numVars, m_compiler); m_bbInfo = m_compiler->fgAllocateTypeForEachBlk(CMK_Promotion); BitVecOps::AssignNoCopy(m_bvTraits, m_liveIn, BitVecOps::MakeEmpty(m_bvTraits)); BitVecOps::AssignNoCopy(m_bvTraits, m_ehLiveVars, BitVecOps::MakeEmpty(m_bvTraits)); + JITDUMP("Computing liveness for %u vars\n", m_numVars); + ComputeUseDefSets(); InterBlockLiveness(); @@ -149,6 +163,18 @@ void PromotionLiveness::ComputeUseDefSets() } } } + +#ifdef DEBUG + if (m_compiler->verbose) + { + BitVec allVars(BitVecOps::Union(m_bvTraits, bb.VarUse, bb.VarDef)); + printf(FMT_BB " USE(%u)=", block->bbNum, BitVecOps::Count(m_bvTraits, bb.VarUse)); + DumpVarSet(bb.VarUse, allVars); + printf("\n" FMT_BB " DEF(%u)=", block->bbNum, BitVecOps::Count(m_bvTraits, bb.VarDef)); + DumpVarSet(bb.VarDef, allVars); + printf("\n\n"); + } +#endif } } @@ -289,6 +315,22 @@ void PromotionLiveness::InterBlockLiveness() break; } } while (changed); + +#ifdef DEBUG + if (m_compiler->verbose) + { + for (BasicBlock* block : m_compiler->Blocks()) + { + BasicBlockLiveness& bbInfo = m_bbInfo[block->bbNum]; + BitVec allVars(BitVecOps::Union(m_bvTraits, bbInfo.LiveIn, bbInfo.LiveOut)); + printf(FMT_BB " IN (%u)=", block->bbNum, BitVecOps::Count(m_bvTraits, bbInfo.LiveIn)); + DumpVarSet(bbInfo.LiveIn, allVars); + printf("\n" FMT_BB "OUT(%u)=", block->bbNum, BitVecOps::Count(m_bvTraits, bbInfo.LiveOut)); + DumpVarSet(bbInfo.LiveOut, allVars); + printf("\n\n"); + } + } +#endif } //------------------------------------------------------------------------ @@ -768,3 +810,59 @@ bool StructUseDeaths::IsReplacementDying(unsigned index) const BitVecTraits traits(1 + m_numFields, nullptr); return BitVecOps::IsMember(&traits, m_deaths, 1 + index); } + +//------------------------------------------------------------------------ +// IsReplacementDying: +// Check if a specific replacement is dying. +// +// Returns: +// True if so. +// +void StructUseDeaths::Dump() +{ +} + +#ifdef DEBUG +void PromotionLiveness::DumpVarSet(BitVec set, BitVec allVars) +{ + printf("{"); + + const char* sep = ""; + for (size_t i = 0; i < m_aggregates.size(); i++) + { + AggregateInfo* agg = m_aggregates[i]; + if (agg == nullptr) + { + continue; + } + + for (size_t j = 0; j <= agg->Replacements.size(); j++) + { + unsigned index = (unsigned)(m_structLclToTrackedIndex[i] + j); + + if (BitVecOps::IsMember(m_bvTraits, set, index)) + { + printf("%s", sep); + if (j == 0) + { + printf("%sV%02u (remainder)", sep, (unsigned)i); + } + else + { + const Replacement& rep = agg->Replacements[j - 1]; + printf("%sV%02u[%03u..%03u)", sep, (unsigned)i, rep.Offset, + rep.Offset + genTypeSize(rep.AccessType)); + } + } + else if (BitVecOps::IsMember(m_bvTraits, allVars, index)) + { + { + printf("%s ", sep); + } + } + } + } + + printf("}"); +} +#endif From 8d73b0ee5aefb532414fcbc3bff5a2d37a704c0b Mon Sep 17 00:00:00 2001 From: Jakob Botsch Nielsen Date: Sat, 13 May 2023 13:21:48 +0200 Subject: [PATCH 13/16] Revert, run jit-format --- src/coreclr/jit/compiler.h | 3 ++- src/coreclr/jit/gentree.cpp | 4 ++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/src/coreclr/jit/compiler.h b/src/coreclr/jit/compiler.h index b0ef3db5f927cf..6d1aa6928ad684 100644 --- a/src/coreclr/jit/compiler.h +++ b/src/coreclr/jit/compiler.h @@ -514,7 +514,8 @@ class LclVarDsc return lvTracked && lvType != TYP_STRUCT; } #ifdef DEBUG - unsigned char lvTrackedWithoutIndex : 1; // Tracked but has no lvVarIndex (i.e. only valid GTF_VAR_DEATH flags) + unsigned char lvTrackedWithoutIndex : 1; // Tracked but has no lvVarIndex (i.e. only valid GTF_VAR_DEATH flags, used + // by physical promotion) #endif unsigned char lvPinned : 1; // is this a pinned variable? diff --git a/src/coreclr/jit/gentree.cpp b/src/coreclr/jit/gentree.cpp index 5e51563c68663a..6468c8bc6acb42 100644 --- a/src/coreclr/jit/gentree.cpp +++ b/src/coreclr/jit/gentree.cpp @@ -11936,7 +11936,6 @@ void Compiler::gtDispLocal(GenTreeLclVarCommon* tree, IndentStack* indentStack) printf(" "); fieldVarDsc->PrintVarReg(); } -(varDsc->lvTracked || varDsc->lvTrackedWithoutIndex) if (fieldVarDsc->lvTracked && fgLocalVarLivenessDone && tree->IsLastUse(index)) { printf(" (last use)"); @@ -11946,7 +11945,8 @@ void Compiler::gtDispLocal(GenTreeLclVarCommon* tree, IndentStack* indentStack) } else // a normal not-promoted lclvar { - if ((varDsc->lvTracked || varDsc->lvTrackedWithoutIndex) && fgLocalVarLivenessDone && ((tree->gtFlags & GTF_VAR_DEATH) != 0)) + if ((varDsc->lvTracked || varDsc->lvTrackedWithoutIndex) && fgLocalVarLivenessDone && + ((tree->gtFlags & GTF_VAR_DEATH) != 0)) { printf(" (last use)"); } From 5f0f264c077fd75d2cf91470742f81c0e07b0333 Mon Sep 17 00:00:00 2001 From: Jakob Botsch Nielsen Date: Sat, 13 May 2023 14:07:51 +0200 Subject: [PATCH 14/16] Fix some logging --- src/coreclr/jit/promotion.cpp | 1 + src/coreclr/jit/promotionliveness.cpp | 17 +++++++++-------- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/src/coreclr/jit/promotion.cpp b/src/coreclr/jit/promotion.cpp index a76d6aa2bf759a..0baea26d97977d 100644 --- a/src/coreclr/jit/promotion.cpp +++ b/src/coreclr/jit/promotion.cpp @@ -1583,6 +1583,7 @@ PhaseStatus Promotion::Run() PromotionLiveness liveness(m_compiler, aggregates); liveness.Run(); + JITDUMP("Making replacements\n\n"); // Make all replacements we decided on. ReplaceVisitor replacer(this, aggregates, &liveness); for (BasicBlock* bb : m_compiler->Blocks()) diff --git a/src/coreclr/jit/promotionliveness.cpp b/src/coreclr/jit/promotionliveness.cpp index b0836c47ab5e18..952720d2390388 100644 --- a/src/coreclr/jit/promotionliveness.cpp +++ b/src/coreclr/jit/promotionliveness.cpp @@ -104,7 +104,7 @@ void PromotionLiveness::Run() BitVecOps::AssignNoCopy(m_bvTraits, m_liveIn, BitVecOps::MakeEmpty(m_bvTraits)); BitVecOps::AssignNoCopy(m_bvTraits, m_ehLiveVars, BitVecOps::MakeEmpty(m_bvTraits)); - JITDUMP("Computing liveness for %u vars\n", m_numVars); + JITDUMP("Computing liveness for %u remainders/fields\n\n", m_numVars); ComputeUseDefSets(); @@ -325,7 +325,7 @@ void PromotionLiveness::InterBlockLiveness() BitVec allVars(BitVecOps::Union(m_bvTraits, bbInfo.LiveIn, bbInfo.LiveOut)); printf(FMT_BB " IN (%u)=", block->bbNum, BitVecOps::Count(m_bvTraits, bbInfo.LiveIn)); DumpVarSet(bbInfo.LiveIn, allVars); - printf("\n" FMT_BB "OUT(%u)=", block->bbNum, BitVecOps::Count(m_bvTraits, bbInfo.LiveOut)); + printf("\n" FMT_BB " OUT(%u)=", block->bbNum, BitVecOps::Count(m_bvTraits, bbInfo.LiveOut)); DumpVarSet(bbInfo.LiveOut, allVars); printf("\n\n"); } @@ -842,23 +842,24 @@ void PromotionLiveness::DumpVarSet(BitVec set, BitVec allVars) if (BitVecOps::IsMember(m_bvTraits, set, index)) { - printf("%s", sep); if (j == 0) { - printf("%sV%02u (remainder)", sep, (unsigned)i); + // 14 chars + printf("%sV%02u(remainder)", sep, (unsigned)i); } else { const Replacement& rep = agg->Replacements[j - 1]; - printf("%sV%02u[%03u..%03u)", sep, (unsigned)i, rep.Offset, + // 14 chars + printf("%sV%02u.[%03u..%03u)", sep, (unsigned)i, rep.Offset, rep.Offset + genTypeSize(rep.AccessType)); } + sep = " "; } else if (BitVecOps::IsMember(m_bvTraits, allVars, index)) { - { - printf("%s ", sep); - } + printf("%s ", sep); + sep = " "; } } } From c78f76e72268f7650046d96e181ddfdf35a634af Mon Sep 17 00:00:00 2001 From: Jakob Botsch Nielsen Date: Sat, 13 May 2023 14:21:13 +0200 Subject: [PATCH 15/16] Fix build --- src/coreclr/jit/promotion.h | 4 ---- src/coreclr/jit/promotiondecomposition.cpp | 12 ++++++++++++ src/coreclr/jit/promotionliveness.cpp | 16 +++++++--------- 3 files changed, 19 insertions(+), 13 deletions(-) diff --git a/src/coreclr/jit/promotion.h b/src/coreclr/jit/promotion.h index 977d5f8df254dd..55bd2c1c33f834 100644 --- a/src/coreclr/jit/promotion.h +++ b/src/coreclr/jit/promotion.h @@ -198,10 +198,6 @@ class StructUseDeaths bool IsRemainderDying() const; bool IsReplacementDying(unsigned index) const; - -#ifdef DEBUG - void Dump(); -#endif }; // Class to compute and track liveness information pertaining promoted structs. diff --git a/src/coreclr/jit/promotiondecomposition.cpp b/src/coreclr/jit/promotiondecomposition.cpp index 130be822d92c7a..848865cbbf895c 100644 --- a/src/coreclr/jit/promotiondecomposition.cpp +++ b/src/coreclr/jit/promotiondecomposition.cpp @@ -1264,6 +1264,18 @@ void ReplaceVisitor::InitFields(GenTreeLclVarCommon* dst, } #ifdef DEBUG +//------------------------------------------------------------------------ +// LastUseString: +// Return a string indicating whether a replacement is a last use, for +// JITDUMP purposes. +// +// Parameters: +// lcl - A struct local +// rep - A replacement of that struct local +// +// Returns: +// " (last use)" if it is, and otherwise "". +// const char* ReplaceVisitor::LastUseString(GenTreeLclVarCommon* lcl, Replacement* rep) { StructUseDeaths deaths = m_liveness->GetDeathsForStructLocal(lcl); diff --git a/src/coreclr/jit/promotionliveness.cpp b/src/coreclr/jit/promotionliveness.cpp index 952720d2390388..7f87093a06bc2f 100644 --- a/src/coreclr/jit/promotionliveness.cpp +++ b/src/coreclr/jit/promotionliveness.cpp @@ -811,18 +811,16 @@ bool StructUseDeaths::IsReplacementDying(unsigned index) const return BitVecOps::IsMember(&traits, m_deaths, 1 + index); } +#ifdef DEBUG //------------------------------------------------------------------------ -// IsReplacementDying: -// Check if a specific replacement is dying. +// DumpVarSet: +// Dump a var set to jitstdout. // -// Returns: -// True if so. +// Parameters: +// set - The set to dump +// allVars - Set of all variables to print whitespace for if not in 'set'. +// Used for alignment. // -void StructUseDeaths::Dump() -{ -} - -#ifdef DEBUG void PromotionLiveness::DumpVarSet(BitVec set, BitVec allVars) { printf("{"); From 361ab86e40f10d804713268970e567a418ae3efc Mon Sep 17 00:00:00 2001 From: Jakob Botsch Nielsen Date: Sat, 13 May 2023 21:35:13 +0200 Subject: [PATCH 16/16] Rename, fix some comments --- src/coreclr/jit/promotion.cpp | 61 ++++++++++++++-------- src/coreclr/jit/promotion.h | 15 +++--- src/coreclr/jit/promotiondecomposition.cpp | 16 +++--- src/coreclr/jit/promotionliveness.cpp | 38 +++++++------- 4 files changed, 74 insertions(+), 56 deletions(-) diff --git a/src/coreclr/jit/promotion.cpp b/src/coreclr/jit/promotion.cpp index 0baea26d97977d..7e62b68058578a 100644 --- a/src/coreclr/jit/promotion.cpp +++ b/src/coreclr/jit/promotion.cpp @@ -129,6 +129,19 @@ inline AccessKindFlags& operator&=(AccessKindFlags& a, AccessKindFlags b) return a = (AccessKindFlags)((uint32_t)a & (uint32_t)b); } +//------------------------------------------------------------------------ +// OverlappingReplacements: +// Find replacements that overlap the specified [offset..offset+size) interval. +// +// Parameters: +// offset - Starting offset of interval +// size - Size of interval +// firstReplacement - [out] The first replacement that overlaps +// endReplacement - [out, optional] One past the last replacement that overlaps +// +// Returns: +// True if any replacement overlaps; otherwise false. +// bool AggregateInfo::OverlappingReplacements(unsigned offset, unsigned size, Replacement** firstReplacement, @@ -1234,7 +1247,7 @@ void ReplaceVisitor::LoadStoreAroundCall(GenTreeCall* call, GenTree* user) // bool ReplaceVisitor::IsPromotedStructLocalDying(GenTreeLclVarCommon* lcl) { - StructUseDeaths deaths = m_liveness->GetDeathsForStructLocal(lcl); + StructDeaths deaths = m_liveness->GetDeathsForStructLocal(lcl); if (!deaths.IsRemainderDying()) { return false; @@ -1322,7 +1335,6 @@ void ReplaceVisitor::ReplaceLocal(GenTree** use, GenTree* user) Replacement& rep = replacements[index]; assert(accessType == rep.AccessType); JITDUMP(" ..replaced with promoted lcl V%02u\n", rep.LclNum); - *use = m_compiler->gtNewLclvNode(rep.LclNum, accessType); (*use)->gtFlags |= lcl->gtFlags & GTF_VAR_DEATH; @@ -1543,6 +1555,18 @@ PhaseStatus Promotion::Run() jitstd::vector& reps = aggregates[i]->Replacements; + assert(reps.size() > 0); + anyReplacements = true; +#ifdef DEBUG + JITDUMP("V%02u promoted with %d replacements\n", i, (int)reps.size()); + for (const Replacement& rep : reps) + { + JITDUMP(" [%03u..%03u) promoted as %s V%02u\n", rep.Offset, rep.Offset + genTypeSize(rep.AccessType), + varTypeName(rep.AccessType), rep.LclNum); + } +#endif + + JITDUMP("Computing unpromoted remainder for V%02u\n", i); StructSegments unpromotedParts = SignificantSegments(m_compiler, m_compiler->lvaGetDesc(i)->GetLayout()); for (size_t i = 0; i < reps.size(); i++) { @@ -1550,6 +1574,10 @@ PhaseStatus Promotion::Run() StructSegments::Segment(reps[i].Offset, reps[i].Offset + genTypeSize(reps[i].AccessType))); } + JITDUMP(" Remainder: "); + DBEXEC(m_compiler->verbose, unpromotedParts.Dump()); + JITDUMP("\n\n"); + StructSegments::Segment unpromotedSegment; if (unpromotedParts.CoveringSegment(&unpromotedSegment)) { @@ -1561,17 +1589,6 @@ PhaseStatus Promotion::Run() { // Aggregate is fully promoted, leave UnpromotedMin == UnpromotedMax to indicate this. } - - assert(reps.size() > 0); - anyReplacements = true; -#ifdef DEBUG - JITDUMP("V%02u promoted with %d replacements\n", i, (int)reps.size()); - for (const Replacement& rep : reps) - { - JITDUMP(" [%03u..%03u) promoted as %s V%02u\n", rep.Offset, rep.Offset + genTypeSize(rep.AccessType), - varTypeName(rep.AccessType), rep.LclNum); - } -#endif } if (!anyReplacements) @@ -1579,7 +1596,7 @@ PhaseStatus Promotion::Run() return PhaseStatus::MODIFIED_NOTHING; } - // Do a liveness pass to refine liveness for the promotions we picked. + // Compute liveness for the fields and remainders. PromotionLiveness liveness(m_compiler, aggregates); liveness.Run(); @@ -1616,14 +1633,7 @@ PhaseStatus Promotion::Run() assert(!rep.NeedsReadBack || !rep.NeedsWriteBack); if (rep.NeedsReadBack) { - if (!liveness.IsReplacementLiveOut(bb, i, (unsigned)j)) - { - JITDUMP( - "Skipping reading back dead replacement V%02u.[%03u..%03u) -> V%02u near the end of " FMT_BB - "\n", - i, rep.Offset, rep.Offset + genTypeSize(rep.AccessType), rep.LclNum, bb->bbNum); - } - else + if (liveness.IsReplacementLiveOut(bb, i, (unsigned)j)) { JITDUMP("Reading back replacement V%02u.[%03u..%03u) -> V%02u near the end of " FMT_BB ":\n", i, rep.Offset, rep.Offset + genTypeSize(rep.AccessType), rep.LclNum, bb->bbNum); @@ -1633,6 +1643,13 @@ PhaseStatus Promotion::Run() DISPSTMT(stmt); m_compiler->fgInsertStmtNearEnd(bb, stmt); } + else + { + JITDUMP( + "Skipping reading back dead replacement V%02u.[%03u..%03u) -> V%02u near the end of " FMT_BB + "\n", + i, rep.Offset, rep.Offset + genTypeSize(rep.AccessType), rep.LclNum, bb->bbNum); + } rep.NeedsReadBack = false; } diff --git a/src/coreclr/jit/promotion.h b/src/coreclr/jit/promotion.h index 55bd2c1c33f834..5c4e26383b3990 100644 --- a/src/coreclr/jit/promotion.h +++ b/src/coreclr/jit/promotion.h @@ -177,9 +177,8 @@ class Promotion PhaseStatus Run(); }; -struct BasicBlockLiveness; - -class StructUseDeaths +// Class to represent liveness information for a struct local's fields and remainder. +class StructDeaths { BitVec m_deaths; unsigned m_numFields = 0; @@ -187,12 +186,12 @@ class StructUseDeaths friend class PromotionLiveness; private: - StructUseDeaths(BitVec deaths, unsigned numFields) : m_deaths(deaths), m_numFields(numFields) + StructDeaths(BitVec deaths, unsigned numFields) : m_deaths(deaths), m_numFields(numFields) { } public: - StructUseDeaths() : m_deaths(BitVecOps::UninitVal()) + StructDeaths() : m_deaths(BitVecOps::UninitVal()) { } @@ -200,6 +199,8 @@ class StructUseDeaths bool IsReplacementDying(unsigned index) const; }; +struct BasicBlockLiveness; + // Class to compute and track liveness information pertaining promoted structs. class PromotionLiveness { @@ -214,8 +215,6 @@ class PromotionLiveness BitVec m_ehLiveVars; JitHashTable, BitVec> m_aggDeaths; - friend class PromotionLivenessBitSetTraits; - public: PromotionLiveness(Compiler* compiler, jitstd::vector& aggregates) : m_compiler(compiler), m_aggregates(aggregates), m_aggDeaths(compiler->getAllocator(CMK_Promotion)) @@ -224,7 +223,7 @@ class PromotionLiveness void Run(); bool IsReplacementLiveOut(BasicBlock* bb, unsigned structLcl, unsigned replacement); - StructUseDeaths GetDeathsForStructLocal(GenTreeLclVarCommon* use); + StructDeaths GetDeathsForStructLocal(GenTreeLclVarCommon* use); private: void MarkUseDef(GenTreeLclVarCommon* lcl, BitVec& useSet, BitVec& defSet); diff --git a/src/coreclr/jit/promotiondecomposition.cpp b/src/coreclr/jit/promotiondecomposition.cpp index 848865cbbf895c..165607bb586507 100644 --- a/src/coreclr/jit/promotiondecomposition.cpp +++ b/src/coreclr/jit/promotiondecomposition.cpp @@ -333,7 +333,7 @@ class DecompositionPlan // the rest of the remainder); or by handling a specific 'hole' as a // primitive. // - RemainderStrategy DetermineRemainderStrategy(const StructUseDeaths& dstDeaths) + RemainderStrategy DetermineRemainderStrategy(const StructDeaths& dstDeaths) { if (m_dstInvolvesReplacements && !m_hasNonRemainderUseOfStructLocal && dstDeaths.IsRemainderDying()) { @@ -409,8 +409,8 @@ class DecompositionPlan // void FinalizeInit(DecompositionStatementList* statements) { - uint8_t initPattern = GetInitPattern(); - StructUseDeaths deaths = m_liveness->GetDeathsForStructLocal(m_dst->AsLclVarCommon()); + uint8_t initPattern = GetInitPattern(); + StructDeaths deaths = m_liveness->GetDeathsForStructLocal(m_dst->AsLclVarCommon()); AggregateInfo* agg = m_aggregates[m_dst->AsLclVarCommon()->GetLclNum()]; assert((agg != nullptr) && (agg->Replacements.size() > 0)); @@ -463,7 +463,7 @@ class DecompositionPlan { assert(m_dst->OperIs(GT_LCL_VAR, GT_LCL_FLD, GT_BLK) && m_src->OperIs(GT_LCL_VAR, GT_LCL_FLD, GT_BLK)); - StructUseDeaths dstDeaths; + StructDeaths dstDeaths; if (m_dstInvolvesReplacements) { dstDeaths = m_liveness->GetDeathsForStructLocal(m_dst->AsLclVarCommon()); @@ -688,7 +688,7 @@ class DecompositionPlan statements->AddStatement(indir); } - StructUseDeaths srcDeaths; + StructDeaths srcDeaths; if (m_srcInvolvesReplacements) { srcDeaths = m_liveness->GetDeathsForStructLocal(m_src->AsLclVarCommon()); @@ -831,7 +831,7 @@ class DecompositionPlan // entry - The init/copy entry // remainderStrategy - The strategy we are using for the remainder // - bool CanSkipEntry(const Entry& entry, const StructUseDeaths& deaths, const RemainderStrategy& remainderStrategy) + bool CanSkipEntry(const Entry& entry, const StructDeaths& deaths, const RemainderStrategy& remainderStrategy) { if (entry.ToReplacement != nullptr) { @@ -1278,8 +1278,8 @@ void ReplaceVisitor::InitFields(GenTreeLclVarCommon* dst, // const char* ReplaceVisitor::LastUseString(GenTreeLclVarCommon* lcl, Replacement* rep) { - StructUseDeaths deaths = m_liveness->GetDeathsForStructLocal(lcl); - AggregateInfo* agg = m_aggregates[lcl->GetLclNum()]; + StructDeaths deaths = m_liveness->GetDeathsForStructLocal(lcl); + AggregateInfo* agg = m_aggregates[lcl->GetLclNum()]; assert(agg != nullptr); Replacement* firstRep = agg->Replacements.data(); assert((rep >= firstRep) && (rep < firstRep + agg->Replacements.size())); diff --git a/src/coreclr/jit/promotionliveness.cpp b/src/coreclr/jit/promotionliveness.cpp index 7f87093a06bc2f..c936478fdee655 100644 --- a/src/coreclr/jit/promotionliveness.cpp +++ b/src/coreclr/jit/promotionliveness.cpp @@ -12,7 +12,9 @@ struct BasicBlockLiveness // Note that this differs from our normal liveness: partial definitions are // NOT marked but they are also not considered uses. BitVec VarDef; + // Variables live-in to this basic block. BitVec LiveIn; + // Variables live-out of this basic block. BitVec LiveOut; }; @@ -61,10 +63,12 @@ struct BasicBlockLiveness // This relies on downstream phases not requiring/wanting to use per-basic // block live-in/live-out/var-use/var-def sets. To be able to update these we // would need to give the new locals "regular" tracked indices (i.e. allocate -// a lvVarIndex); currently the indices allocated are "dense" in the sense -// that the bit vectors only have indices for remainders and the replacement -// fields introduced by this pass uin other words, we allocate 1 + num_fields -// indices for each promoted struct local). +// a lvVarIndex). +// +// The indices allocated and used internally within the liveness computation +// are "dense" in the sense that the bit vectors only have indices for +// remainders and the replacement fields introduced by this pass. In other +// words, we allocate 1 + num_fields indices for each promoted struct local). // void PromotionLiveness::Run() { @@ -586,14 +590,6 @@ void PromotionLiveness::FillInLiveness(BitVec& life, BitVec volatileVars, GenTre if (accessType == TYP_STRUCT) { - if (lcl->OperIs(GT_LCL_ADDR)) - { - // Retbuf -- these are definitions but we do not know of how much. - // We never mark them as dead and we never treat them as killing anything. - assert(isDef); - return; - } - // We need an external bit set to represent dying fields/remainder on a struct use. BitVecTraits aggTraits(1 + (unsigned)agg->Replacements.size(), m_compiler); BitVec aggDeaths(BitVecOps::MakeEmpty(&aggTraits)); @@ -690,6 +686,14 @@ void PromotionLiveness::FillInLiveness(BitVec& life, BitVec volatileVars, GenTre } else { + if (lcl->OperIs(GT_LCL_ADDR)) + { + // Retbuf -- these are definitions but we do not know of how much. + // We never mark them as dead and we never treat them as killing anything. + assert(isDef); + return; + } + unsigned offs = lcl->GetLclOffs(); size_t index = Promotion::BinarySearch(agg->Replacements, offs); if ((ssize_t)index < 0) @@ -773,7 +777,7 @@ bool PromotionLiveness::IsReplacementLiveOut(BasicBlock* bb, unsigned structLcl, // Returns: // Liveness information. // -StructUseDeaths PromotionLiveness::GetDeathsForStructLocal(GenTreeLclVarCommon* lcl) +StructDeaths PromotionLiveness::GetDeathsForStructLocal(GenTreeLclVarCommon* lcl) { assert(lcl->OperIsLocal() && lcl->TypeIs(TYP_STRUCT) && (m_aggregates[lcl->GetLclNum()] != nullptr)); BitVec aggDeaths; @@ -782,7 +786,7 @@ StructUseDeaths PromotionLiveness::GetDeathsForStructLocal(GenTreeLclVarCommon* unsigned lclNum = lcl->GetLclNum(); AggregateInfo* aggInfo = m_aggregates[lclNum]; - return StructUseDeaths(aggDeaths, (unsigned)aggInfo->Replacements.size()); + return StructDeaths(aggDeaths, (unsigned)aggInfo->Replacements.size()); } //------------------------------------------------------------------------ @@ -792,7 +796,7 @@ StructUseDeaths PromotionLiveness::GetDeathsForStructLocal(GenTreeLclVarCommon* // Returns: // True if so. // -bool StructUseDeaths::IsRemainderDying() const +bool StructDeaths::IsRemainderDying() const { BitVecTraits traits(1 + m_numFields, nullptr); return BitVecOps::IsMember(&traits, m_deaths, 0); @@ -805,7 +809,7 @@ bool StructUseDeaths::IsRemainderDying() const // Returns: // True if so. // -bool StructUseDeaths::IsReplacementDying(unsigned index) const +bool StructDeaths::IsReplacementDying(unsigned index) const { BitVecTraits traits(1 + m_numFields, nullptr); return BitVecOps::IsMember(&traits, m_deaths, 1 + index); @@ -842,13 +846,11 @@ void PromotionLiveness::DumpVarSet(BitVec set, BitVec allVars) { if (j == 0) { - // 14 chars printf("%sV%02u(remainder)", sep, (unsigned)i); } else { const Replacement& rep = agg->Replacements[j - 1]; - // 14 chars printf("%sV%02u.[%03u..%03u)", sep, (unsigned)i, rep.Offset, rep.Offset + genTypeSize(rep.AccessType)); }