1//===-- AssignmentTrackingAnalysis.cpp ------------------------------------===// 3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4// See https://llvm.org/LICENSE.txt for license information. 5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 7//===----------------------------------------------------------------------===// 39#include <unordered_map> 42#define DEBUG_TYPE "debug-ata" 44STATISTIC(NumDefsScanned,
"Number of dbg locs that get scanned for removal");
45STATISTIC(NumDefsRemoved,
"Number of dbg locs removed");
46STATISTIC(NumWedgesScanned,
"Number of dbg wedges scanned");
47STATISTIC(NumWedgesChanged,
"Number of dbg wedges changed");
51cl::desc(
"Maximum num basic blocks before debug info dropped"),
53/// Option for debugging the pass, determines if the memory location fragment 54/// filling happens after generating the variable locations. 57/// Print the results of the analysis. Respects -filter-print-funcs. 61/// Coalesce adjacent dbg locs describing memory locations that have contiguous 62/// fragments. This reduces the cost of LiveDebugValues which does SSA 63/// construction for each explicitly stated variable fragment. 67// Implicit conversions are disabled for enum class types, so unfortunately we 68// need to create a DenseMapInfo wrapper around the specified underlying type. 72returnstatic_cast<VariableID>(Wrapped::getEmptyKey());
75returnstatic_cast<VariableID>(Wrapped::getTombstoneKey());
78return Wrapped::getHashValue(
static_cast<unsigned>(Val));
98/// Helper class to build FunctionVarLocs, since that class isn't easy to 99/// modify. TODO: There's not a great deal of value in the split, it could be 100/// worth merging the two classes. 104// Use an unordered_map so we don't invalidate iterators after 105// insert/modifications. 106 std::unordered_map<VarLocInsertPt, SmallVector<VarLocInfo>> VarLocsBeforeInst;
113 /// Find or insert \p V and return the ID. 118 /// Get a variable from its \p ID. 120return Variables[
static_cast<unsigned>(
ID)];
123 /// Return ptr to wedge of defs or nullptr if no defs come just before /p 126auto R = VarLocsBeforeInst.find(
Before);
127if (R == VarLocsBeforeInst.end())
132 /// Replace the defs that come just before /p Before with /p Wedge. 134 VarLocsBeforeInst[
Before] = std::move(Wedge);
137 /// Add a def for a variable that is valid for its lifetime. 148 /// Add a def to the wedge of defs just before /p Before. 156 VarLocsBeforeInst[
Before].emplace_back(VarLoc);
161// Print the variable table first. TODO: Sorting by variable could make the 162// output more stable? 163unsigned Counter = -1;
164OS <<
"=== Variables ===\n";
167// Skip first entry because it is a dummy entry. 171OS <<
"[" << Counter <<
"] " << V.getVariable()->getName();
172if (
autoF = V.getFragment())
173OS <<
" bits [" <<
F->OffsetInBits <<
", " 174 <<
F->OffsetInBits +
F->SizeInBits <<
")";
175if (
constauto *IA = V.getInlinedAt())
176OS <<
" inlined-at " << *IA;
181OS <<
"DEF Var=[" << (
unsigned)Loc.VariableID <<
"]" 182 <<
" Expr=" << *Loc.Expr <<
" Values=(";
183for (
auto *
Op : Loc.Values.location_ops()) {
184errs() <<
Op->getName() <<
" ";
189// Print the single location variables. 190OS <<
"=== Single location vars ===\n";
196// Print the non-single-location defs in line with IR. 197OS <<
"=== In-line variable defs ===";
199OS <<
"\n" << BB.getName() <<
":\n";
210// Add the single-location variables first. 211for (
constauto &VarLoc : Builder.SingleLocVars)
213// Mark the end of the section. 214 SingleVarLocEnd = VarLocRecords.
size();
216// Insert a contiguous block of VarLocInfos for each instruction, mapping it 217// to the start and end position in the vector with VarLocsBeforeInst. This 218// block includes VarLocs for any DbgVariableRecords attached to that 220for (
auto &
P : Builder.VarLocsBeforeInst) {
221// Process VarLocs attached to a DbgRecord alongside their marker 223if (isa<const DbgRecord *>(
P.first))
226unsigned BlockStart = VarLocRecords.
size();
227// Any VarLocInfos attached to a DbgRecord should now be remapped to their 228// marker Instruction, in order of DbgRecord appearance and prior to any 229// VarLocInfos attached directly to that instruction. 231// Even though DVR defines a variable location, VarLocsBeforeInst can 232// still be empty if that VarLoc was redundant. 233auto It = Builder.VarLocsBeforeInst.find(&DVR);
234if (It == Builder.VarLocsBeforeInst.end())
241unsigned BlockEnd = VarLocRecords.
size();
242// Record the start and end indices. 243if (BlockEnd != BlockStart)
244 VarLocsBeforeInst[
I] = {BlockStart, BlockEnd};
247// Copy the Variables vector from the builder's UniqueVector. 248assert(Variables.empty() &&
"Expect clear before init");
249// UniqueVectors IDs are one-based (which means the VarLocInfo VarID values 250// are one-based) so reserve an extra and insert a dummy. 251 Variables.reserve(Builder.Variables.
size() + 1);
252 Variables.push_back(
DebugVariable(
nullptr, std::nullopt,
nullptr));
253 Variables.append(Builder.Variables.
begin(), Builder.Variables.
end());
258 VarLocRecords.
clear();
259 VarLocsBeforeInst.clear();
263/// Walk backwards along constant GEPs and bitcasts to the base storage from \p 264/// Start as far as possible. Prepend \Expression with the offset and append it 265/// with a DW_OP_deref that haes been implicit until now. Returns the walked-to 266/// value and modified expression. 267static std::pair<Value *, DIExpression *>
270APInt OffsetInBytes(
DL.getTypeSizeInBits(Start->getType()),
false);
272 Start->stripAndAccumulateInBoundsConstantOffsets(
DL, OffsetInBytes);
275 Ops = {dwarf::DW_OP_plus_uconst, OffsetInBytes.
getZExtValue()};
277Expression, Ops,
/*StackValue=*/false,
/*EntryValue=*/false);
283/// Extract the offset used in \p DIExpr. Returns std::nullopt if the expression 284/// doesn't explicitly describe a memory location with DW_OP_deref or if the 285/// expression is too complex to interpret. 286static std::optional<int64_t>
291unsigned ExpectedDerefIdx = 0;
292// Extract the offset. 293if (NumElements > 2 && Elements[0] == dwarf::DW_OP_plus_uconst) {
295 ExpectedDerefIdx = 2;
296 }
elseif (NumElements > 3 && Elements[0] == dwarf::DW_OP_constu) {
297 ExpectedDerefIdx = 3;
298if (Elements[2] == dwarf::DW_OP_plus)
300elseif (Elements[2] == dwarf::DW_OP_minus)
306// If that's all there is it means there's no deref. 307if (ExpectedDerefIdx >= NumElements)
310// Check the next element is DW_OP_deref - otherwise this is too complex or 311// isn't a deref expression. 312if (Elements[ExpectedDerefIdx] != dwarf::DW_OP_deref)
315// Check the final operation is either the DW_OP_deref or is a fragment. 316if (NumElements == ExpectedDerefIdx + 1)
317returnOffset;
// Ends with deref. 318unsigned ExpectedFragFirstIdx = ExpectedDerefIdx + 1;
319unsigned ExpectedFragFinalIdx = ExpectedFragFirstIdx + 2;
320if (NumElements == ExpectedFragFinalIdx + 1 &&
322returnOffset;
// Ends with deref + fragment. 324// Don't bother trying to interpret anything more complex. 328/// A whole (unfragmented) source variable. 338// Enabling fragment coalescing reduces compiler run time when instruction 339// referencing is enabled. However, it may cause LiveDebugVariables to create 340// incorrect locations. Since instruction-referencing mode effectively 341// bypasses LiveDebugVariables we only enable coalescing if the cl::opt flag 342// has not been explicitly set and instruction-referencing is turned on. 346Triple(
F.getParent()->getTargetTriple()));
356/// In dwarf emission, the following sequence 357/// 1. dbg.value ... Fragment(0, 64) 358/// 2. dbg.value ... Fragment(0, 32) 359/// effectively sets Fragment(32, 32) to undef (each def sets all bits not in 360/// the intersection of the fragments to having "no location"). This makes 361/// sense for implicit location values because splitting the computed values 362/// could be troublesome, and is probably quite uncommon. When we convert 363/// dbg.assigns to dbg.value+deref this kind of thing is common, and describing 364/// a location (memory) rather than a value means we don't need to worry about 365/// splitting any values, so we try to recover the rest of the fragment 367/// This class performs a(nother) dataflow analysis over the function, adding 368/// variable locations so that any bits of a variable with a memory location 369/// have that location explicitly reinstated at each subsequent variable 370/// location definition that that doesn't overwrite those bits. i.e. after a 371/// variable location def, insert new defs for the memory location with 372/// fragments for the difference of "all bits currently in memory" and "the 373/// fragment of the second def". 374classMemLocFragmentFill {
378bool CoalesceAdjacentFragments;
380// 0 = no memory location. 385 OffsetInBitsTy, BaseAddress,
388 FragsInMemMap::Allocator IntervalMapAlloc;
391 /// IDs for memory location base addresses in maps. Use 0 to indicate that 392 /// there's no memory location. 401unsigned OffsetInBits;
407 /// BBInsertBeforeMap holds a description for the set of location defs to be 408 /// inserted after the analysis is complete. It is updated during the dataflow 409 /// and the entry for a block is CLEARED each time it is (re-)visited. After 410 /// the dataflow is complete, each block entry will contain the set of defs 411 /// calculated during the final (fixed-point) iteration. 414staticbool intervalMapsAreEqual(
const FragsInMemMap &
A,
415const FragsInMemMap &
B) {
416auto AIt =
A.begin(), AEnd =
A.end();
417auto BIt =
B.begin(), BEnd =
B.end();
418for (; AIt != AEnd; ++AIt, ++BIt) {
420returnfalse;
// B has fewer elements than A. 421if (AIt.start() != BIt.start() || AIt.stop() != BIt.stop())
422returnfalse;
// Interval is different. 424returnfalse;
// Value at interval is different. 426// AIt == AEnd. Check BIt is also now at end. 430staticbool varFragMapsAreEqual(
const VarFragMap &
A,
const VarFragMap &
B) {
431if (
A.size() !=
B.size())
433for (
constauto &APair :
A) {
434auto BIt =
B.find(APair.first);
437if (!intervalMapsAreEqual(APair.second, BIt->second))
443 /// Return a string for the value that \p BaseID represents. 444 std::string
toString(
unsigned BaseID) {
446return Bases[BaseID].getVariableLocationOp(0)->getName().str();
451 /// Format string describing an FragsInMemMap (IntervalMap) interval. 452 std::string
toString(FragsInMemMap::const_iterator It,
bool Newline =
true) {
454 std::stringstream S(
String);
456 S <<
"[" << It.start() <<
", " << It.stop()
459 S <<
"invalid iterator (end)";
466 FragsInMemMap meetFragments(
const FragsInMemMap &
A,
const FragsInMemMap &
B) {
467 FragsInMemMap
Result(IntervalMapAlloc);
468for (
auto AIt =
A.begin(), AEnd =
A.end(); AIt != AEnd; ++AIt) {
470// This is basically copied from process() and inverted (process is 471// performing something like a union whereas this is more of an 474// There's no work to do if interval `a` overlaps no fragments in map `B`. 475if (!
B.overlaps(AIt.start(), AIt.stop()))
478// Does StartBit intersect an existing fragment? 479auto FirstOverlap =
B.find(AIt.start());
481bool IntersectStart = FirstOverlap.start() < AIt.start();
483 <<
", IntersectStart: " << IntersectStart <<
"\n");
485// Does EndBit intersect an existing fragment? 486auto LastOverlap =
B.find(AIt.stop());
488 LastOverlap !=
B.end() && LastOverlap.start() < AIt.stop();
490 <<
", IntersectEnd: " << IntersectEnd <<
"\n");
492// Check if both ends of `a` intersect the same interval `b`. 493if (IntersectStart && IntersectEnd && FirstOverlap == LastOverlap) {
494// Insert `a` (`a` is contained in `b`) if the values match. 501if (*AIt && *AIt == *FirstOverlap)
502Result.insert(AIt.start(), AIt.stop(), *AIt);
504// There's an overlap but `a` is not fully contained within 505// `b`. Shorten any end-point intersections. 510auto Next = FirstOverlap;
514if (*AIt && *AIt == *FirstOverlap)
515Result.insert(AIt.start(), FirstOverlap.stop(), *AIt);
525if (*AIt && *AIt == *LastOverlap)
526Result.insert(LastOverlap.start(), AIt.stop(), *AIt);
529// Insert all intervals in map `B` that are contained within interval 530// `a` where the values match. 535while (Next !=
B.end() && Next.start() < AIt.stop() &&
536 Next.stop() <= AIt.stop()) {
538 <<
"- insert intersection of a and " <<
toString(Next));
539if (*AIt && *AIt == *Next)
540Result.insert(Next.start(), Next.stop(), *Next);
548 /// Meet \p A and \p B, storing the result in \p A. 549void meetVars(VarFragMap &
A,
const VarFragMap &
B) {
552// Result = meet(a, b) for a in A, b in B where Var(a) == Var(b) 553for (
auto It =
A.begin(),
End =
A.end(); It !=
End; ++It) {
554unsigned AVar = It->first;
555 FragsInMemMap &AFrags = It->second;
556auto BIt =
B.find(AVar);
559continue;
// Var has no bits defined in B. 563 AFrags = meetFragments(AFrags, BIt->second);
574// LiveIn locs for BB is the meet of the already-processed preds' LiveOut 577// Ignore preds that haven't been processed yet. This is essentially the 578// same as initialising all variables to implicit top value (⊤) which is 579// the identity value for the meet operation. 580if (!Visited.
count(Pred))
583auto PredLiveOut = LiveOut.
find(Pred);
588 BBLiveIn = PredLiveOut->second;
591LLVM_DEBUG(
dbgs() <<
"BBLiveIn = meet BBLiveIn, " << Pred->getName()
593 meetVars(BBLiveIn, PredLiveOut->second);
596// An empty set is ⊥ for the intersect-like meet operation. If we've 597// already got ⊥ there's no need to run the code - we know the result is 598// ⊥ since `meet(a, ⊥) = ⊥`. 599if (BBLiveIn.size() == 0)
603auto CurrentLiveInEntry = LiveIn.
find(&BB);
604// If there's no LiveIn entry for the block yet, add it. 605if (CurrentLiveInEntry == LiveIn.
end()) {
608 LiveIn[&BB] = std::move(BBLiveIn);
609return/*Changed=*/true;
612// If the LiveIn set has changed (expensive check) update it and return 614if (!varFragMapsAreEqual(BBLiveIn, CurrentLiveInEntry->second)) {
616 CurrentLiveInEntry->second = std::move(BBLiveIn);
617return/*Changed=*/true;
621return/*Changed=*/false;
625unsigned StartBit,
unsigned EndBit,
unsignedBase,
627assert(StartBit < EndBit &&
"Cannot create fragment of size <= 0");
632 Loc.OffsetInBits = StartBit;
633 Loc.SizeInBits = EndBit - StartBit;
634assert(
Base &&
"Expected a non-zero ID for Base address");
637 BBInsertBeforeMap[&BB][
Before].push_back(Loc);
639 <<
" bits [" << StartBit <<
", " << EndBit <<
")\n");
642 /// Inserts a new dbg def if the interval found when looking up \p StartBit 643 /// in \p FragMap starts before \p StartBit or ends after \p EndBit (which 644 /// indicates - assuming StartBit->EndBit has just been inserted - that the 645 /// slice has been coalesced in the map). 647unsigned StartBit,
unsigned EndBit,
unsignedBase,
649if (!CoalesceAdjacentFragments)
651// We've inserted the location into the map. The map will have coalesced 652// adjacent intervals (variable fragments) that describe the same memory 653// location. Use this knowledge to insert a debug location that describes 654// that coalesced fragment. This may eclipse other locs we've just 655// inserted. This is okay as redundant locs will be cleaned up later. 656auto CoalescedFrag = FragMap.find(StartBit);
657// Bail if no coalescing has taken place. 658if (CoalescedFrag.start() == StartBit && CoalescedFrag.stop() == EndBit)
661LLVM_DEBUG(
dbgs() <<
"- Insert loc for bits " << CoalescedFrag.start()
662 <<
" to " << CoalescedFrag.stop() <<
"\n");
663 insertMemLoc(BB,
Before, Var, CoalescedFrag.start(), CoalescedFrag.stop(),
668 VarFragMap &LiveSet) {
672// Don't bother doing anything for this variables if we know it's fully 673// promoted. We're only interested in variables that (sometimes) live on 680// [StartBit: EndBit) are the bits affected by this def. 685 StartBit = Frag->OffsetInBits;
686 EndBit = StartBit + Frag->SizeInBits;
693// We will only fill fragments for simple memory-describing dbg.value 694// intrinsics. If the fragment offset is the same as the offset from the 695// base pointer, do The Thing, otherwise fall back to normal dbg.value 696// behaviour. AssignmentTrackingLowering has generated DIExpressions 697// written in terms of the base pointer. 698// TODO: Remove this condition since the fragment offset doesn't always 699// equal the offset from base pointer (e.g. for a SROA-split variable). 702 DerefOffsetInBytes && *DerefOffsetInBytes * 8 == StartBit
709// First of all, any locs that use mem that are disrupted need reinstating. 710// Unfortunately, IntervalMap doesn't let us insert intervals that overlap 711// with existing intervals so this code involves a lot of fiddling around 712// with intervals to do that manually. 713auto FragIt = LiveSet.find(Var);
715// Check if the variable does not exist in the map. 716if (FragIt == LiveSet.end()) {
717// Add this variable to the BB map. 718autoP = LiveSet.try_emplace(Var, FragsInMemMap(IntervalMapAlloc));
719assert(
P.second &&
"Var already in map?");
720// Add the interval to the fragment map. 721P.first->second.insert(StartBit, EndBit,
Base);
724// The variable has an entry in the map. 726 FragsInMemMap &FragMap = FragIt->second;
727// First check the easy case: the new fragment `f` doesn't overlap with any 729if (!FragMap.overlaps(StartBit, EndBit)) {
731 FragMap.insert(StartBit, EndBit,
Base);
732 coalesceFragments(BB,
Before, Var, StartBit, EndBit,
Base, VarLoc.
DL,
736// There is at least one overlap. 738// Does StartBit intersect an existing fragment? 739auto FirstOverlap = FragMap.find(StartBit);
740assert(FirstOverlap != FragMap.end());
741bool IntersectStart = FirstOverlap.start() < StartBit;
743// Does EndBit intersect an existing fragment? 744auto LastOverlap = FragMap.find(EndBit);
745bool IntersectEnd = LastOverlap.valid() && LastOverlap.start() < EndBit;
747// Check if both ends of `f` intersect the same interval `i`. 748if (IntersectStart && IntersectEnd && FirstOverlap == LastOverlap) {
750// Shorten `i` so that there's space to insert `f`. 756// Save values for use after inserting a new interval. 757auto EndBitOfOverlap = FirstOverlap.stop();
758unsigned OverlapValue = FirstOverlap.value();
760// Shorten the overlapping interval. 761 FirstOverlap.setStop(StartBit);
762 insertMemLoc(BB,
Before, Var, FirstOverlap.start(), StartBit,
763 OverlapValue, VarLoc.
DL);
765// Insert a new interval to represent the end part. 766 FragMap.insert(EndBit, EndBitOfOverlap, OverlapValue);
767 insertMemLoc(BB,
Before, Var, EndBit, EndBitOfOverlap, OverlapValue,
770// Insert the new (middle) fragment now there is space. 771 FragMap.insert(StartBit, EndBit,
Base);
773// There's an overlap but `f` may not be fully contained within 774// `i`. Shorten any end-point intersections so that we can then 780// Shorten any end-point intersections. 783// Split off at the intersection. 784 FirstOverlap.setStop(StartBit);
785 insertMemLoc(BB,
Before, Var, FirstOverlap.start(), StartBit,
786 *FirstOverlap, VarLoc.
DL);
794// Split off at the intersection. 795 LastOverlap.setStart(EndBit);
796 insertMemLoc(BB,
Before, Var, EndBit, LastOverlap.stop(), *LastOverlap,
801// FirstOverlap and LastOverlap have been shortened such that they're 802// no longer overlapping with [StartBit, EndBit). Delete any overlaps 803// that remain (these will be fully contained within `f`). 805// [ - i - ] } Intersection shortening that has happened above. 809// [i2 ] } Intervals fully contained within `f` get erased. 811// [ - f - ][ i ] } Completed insertion. 812auto It = FirstOverlap;
814 ++It;
// IntersectStart: first overlap has been shortened. 815while (It.valid() && It.start() >= StartBit && It.stop() <= EndBit) {
817 It.erase();
// This increments It after removing the interval. 819// We've dealt with all the overlaps now! 820assert(!FragMap.overlaps(StartBit, EndBit));
822 FragMap.insert(StartBit, EndBit,
Base);
825 coalesceFragments(BB,
Before, Var, StartBit, EndBit,
Base, VarLoc.
DL,
831void process(
BasicBlock &BB, VarFragMap &LiveSet) {
832 BBInsertBeforeMap[&BB].
clear();
835if (
constauto *Locs = FnVarLocs->
getWedge(&DVR)) {
837 addDef(Loc, &DVR, *
I.getParent(), LiveSet);
841if (
constauto *Locs = FnVarLocs->
getWedge(&
I)) {
843 addDef(Loc, &
I, *
I.getParent(), LiveSet);
852bool CoalesceAdjacentFragments)
853 : Fn(Fn), VarsWithStackSlot(VarsWithStackSlot),
854 CoalesceAdjacentFragments(CoalesceAdjacentFragments) {}
856 /// Add variable locations to \p FnVarLocs so that any bits of a variable 857 /// with a memory location have that location explicitly reinstated at each 858 /// subsequent variable location definition that that doesn't overwrite those 859 /// bits. i.e. after a variable location def, insert new defs for the memory 860 /// location with fragments for the difference of "all bits currently in 861 /// memory" and "the fragment of the second def". e.g. 865 /// var x bits 0 to 63: value in memory 866 /// more instructions 867 /// var x bits 0 to 31: value is %0 871 /// var x bits 0 to 63: value in memory 872 /// more instructions 873 /// var x bits 0 to 31: value is %0 874 /// var x bits 32 to 61: value in memory ; <-- new loc def 880 this->FnVarLocs = FnVarLocs;
882// Prepare for traversal. 885 std::priority_queue<unsigned int, std::vector<unsigned int>,
886 std::greater<unsigned int>>
888 std::priority_queue<unsigned int, std::vector<unsigned int>,
889 std::greater<unsigned int>>
893 {
// Init OrderToBB and BBToOrder. 894unsignedint RPONumber = 0;
896 OrderToBB[RPONumber] = BB;
897 BBToOrder[BB] = RPONumber;
898 Worklist.push(RPONumber);
901 LiveIn.
init(RPONumber);
902 LiveOut.
init(RPONumber);
905// Perform the traversal. 907// This is a standard "intersect of predecessor outs" dataflow problem. To 908// solve it, we perform meet() and process() using the two worklist method 909// until the LiveIn data for each block becomes unchanging. 911// This dataflow is essentially working on maps of sets and at each meet we 912// intersect the maps and the mapped sets. So, initialized live-in maps 913// monotonically decrease in value throughout the dataflow. 915while (!Worklist.empty() || !Pending.empty()) {
916// We track what is on the pending worklist to avoid inserting the same 917// thing twice. We could avoid this with a custom priority queue, but 918// this is probably not worth it. 921while (!Worklist.empty()) {
925bool InChanged = meet(*BB, Visited);
926// Always consider LiveIn changed on the first visit. 927 InChanged |= Visited.
insert(BB).second;
930 << BB->
getName() <<
" has new InLocs, process it\n");
931// Mutate a copy of LiveIn while processing BB. Once we've processed 932// the terminator LiveSet is the LiveOut set for BB. 933// This is an expensive copy! 934 VarFragMap LiveSet = LiveIn[BB];
936// Process the instructions in the block. 937 process(*BB, LiveSet);
939// Relatively expensive check: has anything changed in LiveOut for BB? 940if (!varFragMapsAreEqual(LiveOut[BB], LiveSet)) {
942 <<
" has new OutLocs, add succs to worklist: [ ");
943 LiveOut[BB] = std::move(LiveSet);
945if (OnPending.
insert(Succ).second) {
947 Pending.push(BBToOrder[Succ]);
954 Worklist.swap(Pending);
955// At this point, pending must be empty, since it was just the empty 957assert(Pending.empty() &&
"Pending should be empty");
960// Insert new location defs. 961for (
auto &Pair : BBInsertBeforeMap) {
962 InsertMap &
Map = Pair.second;
963for (
auto &Pair : Map) {
964auto InsertBefore = Pair.first;
965assert(InsertBefore &&
"should never be null");
966auto FragMemLocs = Pair.second;
969for (
auto &FragMemLoc : FragMemLocs) {
971if (FragMemLoc.SizeInBits !=
972 *
Aggregates[FragMemLoc.Var].first->getSizeInBits())
974 Expr, FragMemLoc.OffsetInBits, FragMemLoc.SizeInBits);
976 FragMemLoc.OffsetInBits / 8);
978 FragMemLoc.DL.getInlinedAt());
979 FnVarLocs->
addVarLoc(InsertBefore, Var, Expr, FragMemLoc.DL,
980 Bases[FragMemLoc.Base]);
987/// AssignmentTrackingLowering encapsulates a dataflow analysis over a function 988/// that interprets assignment tracking debug info metadata and stores in IR to 989/// create a map of variable locations. 990classAssignmentTrackingLowering {
992 /// The kind of location in use for a variable, where Mem is the stack home, 993 /// Val is an SSA value or const, and None means that there is not one single 994 /// kind (either because there are multiple or because there is none; it may 995 /// prove useful to split this into two values in the future). 997 /// LocKind is a join-semilattice with the partial order: 1001 /// join(Mem, Mem) = Mem 1002 /// join(Val, Val) = Val 1003 /// join(Mem, Val) = None 1004 /// join(None, Mem) = None 1005 /// join(None, Val) = None 1006 /// join(None, None) = None 1008 /// Note: the order is not `None > Val > Mem` because we're using DIAssignID 1009 /// to name assignments and are not tracking the actual stored values. 1010 /// Therefore currently there's no way to ensure that Mem values and Val 1011 /// values are the same. This could be a future extension, though it's not 1012 /// clear that many additional locations would be recovered that way in 1013 /// practice as the likelihood of this sitation arising naturally seems 1015enum class LocKind { Mem, Val,
None };
1017 /// An abstraction of the assignment of a value to a variable or memory 1020 /// An Assignment is Known or NoneOrPhi. A Known Assignment means we have a 1021 /// DIAssignID ptr that represents it. NoneOrPhi means that we don't (or 1022 /// can't) know the ID of the last assignment that took place. 1024 /// The Status of the Assignment (Known or NoneOrPhi) is another 1025 /// join-semilattice. The partial order is: 1026 /// NoneOrPhi > Known {id_0, id_1, ...id_N} 1028 /// i.e. for all values x and y where x != y: 1030 /// join(x, y) = NoneOrPhi 1033enum S { Known, NoneOrPhi }
Status;
1034 /// ID of the assignment. nullptr if Status is not Known. 1036 /// The dbg.assign that marks this dbg-def. Mem-defs don't use this field. 1040bool isSameSourceAssignment(
const Assignment &
Other)
const{
1041// Don't include Source in the equality check. Assignments are 1042// defined by their ID, not debug intrinsic(s). 1046staticconstchar *
LUT[] = {
"Known",
"NoneOrPhi"};
1055elseif (
constauto *DAI = dyn_cast<DbgAssignIntrinsic *>(Source))
1058 OS << cast<DbgVariableRecord *>(Source);
1063return Assignment(Known,
ID, Source);
1067"Cannot make an assignment from a non-assign DbgVariableRecord");
1068return Assignment(Known,
ID, Source);
1071return Assignment(Known,
ID, Source);
1074return Assignment(Known,
ID);
1076static Assignment makeNoneOrPhi() {
return Assignment(NoneOrPhi,
nullptr); }
1077// Again, need a Top value? 1078 Assignment() :
Status(NoneOrPhi),
ID(nullptr) {}
// Can we delete this? 1080// If the Status is Known then we expect there to be an assignment ID. 1085// If the Status is Known then we expect there to be an assignment ID. 1090// If the Status is Known then we expect there to be an assignment ID. 1095// If the Status is Known then we expect there to be an assignment ID. 1103usingUntaggedStoreAssignmentMap =
1108 /// The highest numbered VariableID for partially promoted variables plus 1, 1109 /// the values for which start at 1. 1110unsigned TrackedVariablesVectorSize = 0;
1111 /// Map a variable to the set of variables that it fully contains. 1113 /// Map untagged stores to the variable fragments they assign to. Used by 1114 /// processUntaggedInstruction. 1115 UntaggedStoreAssignmentMap UntaggedStoreVars;
1117// Machinery to defer inserting dbg.values. 1119 InstInsertMap InsertBeforeMap;
1120 /// Clear the location definitions currently cached for insertion after /p 1125// emitDbgValue can be called with: 1126// Source=[AssignRecord|DbgValueInst*|DbgAssignIntrinsic*|DbgVariableRecord*] 1127// Since AssignRecord can be cast to one of the latter two types, and all 1128// other types have a shared interface, we use a template to handle the latter 1129// three types, and an explicit overload for AssignRecord that forwards to 1130// the template version with the right type. 1132template <
typename T>
1135staticbool mapsAreEqual(
constBitVector &Mask,
const AssignmentMap &
A,
1136const AssignmentMap &
B) {
1138 return A[VarID].isSameSourceAssignment(B[VarID]);
1142 /// Represents the stack and debug assignments in a block. Used to describe 1143 /// the live-in and live-out values for blocks, as well as the "current" 1144 /// value as we process each instruction in a block. 1146 /// The set of variables (VariableID) being tracked in this block. 1148 /// Dominating assignment to memory for each variable, indexed by 1150 AssignmentMap StackHomeValue;
1151 /// Dominating assignemnt to each variable, indexed by VariableID. 1152 AssignmentMap DebugValue;
1153 /// Location kind for each variable. LiveLoc indicates whether the 1154 /// dominating assignment in StackHomeValue (LocKind::Mem), DebugValue 1155 /// (LocKind::Val), or neither (LocKind::None) is valid, in that order of 1156 /// preference. This cannot be derived by inspecting DebugValue and 1157 /// StackHomeValue due to the fact that there's no distinction in 1158 /// Assignment (the class) between whether an assignment is unknown or a 1159 /// merge of multiple assignments (both are Status::NoneOrPhi). In other 1160 /// words, the memory location may well be valid while both DebugValue and 1161 /// StackHomeValue contain Assignments that have a Status of NoneOrPhi. 1162 /// Indexed by VariableID. 1167const AssignmentMap &getAssignmentMap(AssignmentKind Kind)
const{
1170return StackHomeValue;
1176 AssignmentMap &getAssignmentMap(AssignmentKind Kind) {
1177returnconst_cast<AssignmentMap &
>(
1178const_cast<constBlockInfo *
>(
this)->getAssignmentMap(Kind));
1182return VariableIDsInBlock[
static_cast<unsigned>(Var)];
1185const Assignment &getAssignment(AssignmentKind Kind,
VariableID Var)
const{
1186assert(isVariableTracked(Var) &&
"Var not tracked in block");
1187return getAssignmentMap(Kind)[
static_cast<unsigned>(Var)];
1191assert(isVariableTracked(Var) &&
"Var not tracked in block");
1192return LiveLoc[
static_cast<unsigned>(Var)];
1195 /// Set LocKind for \p Var only: does not set LocKind for VariableIDs of 1196 /// fragments contained win \p Var. 1198 VariableIDsInBlock.
set(
static_cast<unsigned>(Var));
1199 LiveLoc[
static_cast<unsigned>(Var)] = K;
1202 /// Set the assignment in the \p Kind assignment map for \p Var only: does 1203 /// not set the assignment for VariableIDs of fragments contained win \p 1205void setAssignment(AssignmentKind Kind,
VariableID Var,
1206const Assignment &AV) {
1207 VariableIDsInBlock.
set(
static_cast<unsigned>(Var));
1208 getAssignmentMap(Kind)[
static_cast<unsigned>(Var)] = AV;
1211 /// Return true if there is an assignment matching \p AV in the \p Kind 1212 /// assignment map. Does consider assignments for VariableIDs of fragments 1213 /// contained win \p Var. 1214bool hasAssignment(AssignmentKind Kind,
VariableID Var,
1215const Assignment &AV)
const{
1216if (!isVariableTracked(Var))
1218return AV.isSameSourceAssignment(getAssignment(Kind, Var));
1221 /// Compare every element in each map to determine structural equality 1224return VariableIDsInBlock ==
Other.VariableIDsInBlock &&
1225 LiveLoc ==
Other.LiveLoc &&
1226 mapsAreEqual(VariableIDsInBlock, StackHomeValue,
1227Other.StackHomeValue) &&
1228 mapsAreEqual(VariableIDsInBlock, DebugValue,
Other.DebugValue);
1232return LiveLoc.size() == DebugValue.size() &&
1233 LiveLoc.size() == StackHomeValue.size();
1236 /// Clear everything and initialise with ⊤-values for all variables. 1237voidinit(
int NumVars) {
1238 StackHomeValue.clear();
1242 StackHomeValue.insert(StackHomeValue.begin(), NumVars,
1243 Assignment::makeNoneOrPhi());
1244 DebugValue.insert(DebugValue.begin(), NumVars,
1245 Assignment::makeNoneOrPhi());
1246 LiveLoc.
insert(LiveLoc.
begin(), NumVars, LocKind::None);
1249 /// Helper for join. 1250template <
typename ElmtType,
typename FnInputType>
1254 ElmtType (*Fn)(FnInputType, FnInputType)) {
1258 /// See comment for AssignmentTrackingLowering::joinBlockInfo. 1259static BlockInfo join(
const BlockInfo &
A,
const BlockInfo &
B,
int NumVars) {
1262// Intersect = join(a, b) for a in A, b in B where Var(a) == Var(b) 1263// Difference = join(x, ⊤) for x where Var(x) is in A xor B 1264// Join = Intersect ∪ Difference 1266// This is achieved by performing a join on elements from A and B with 1267// variables common to both A and B (join elements indexed by var 1268// intersect), then adding ⊤-value elements for vars in A xor B. The 1269// latter part is equivalent to performing join on elements with variables 1270// in A xor B with the ⊤-value for the map element since join(x, ⊤) = ⊤. 1271// BlockInfo::init initializes all variable entries to the ⊤ value so we 1272// don't need to explicitly perform that step as Join.VariableIDsInBlock 1273// is set to the union of the variables in A and B at the end of this 1279 Intersect &=
B.VariableIDsInBlock;
1282 joinElmt(
VarID, Join.LiveLoc,
A.LiveLoc,
B.LiveLoc, joinKind);
1283 joinElmt(
VarID, Join.DebugValue,
A.DebugValue,
B.DebugValue,
1285 joinElmt(
VarID, Join.StackHomeValue,
A.StackHomeValue,
B.StackHomeValue,
1289 Join.VariableIDsInBlock =
A.VariableIDsInBlock;
1290 Join.VariableIDsInBlock |=
B.VariableIDsInBlock;
1303 /// Helper for process methods to track variables touched each frame. 1306 /// The set of variables that sometimes are not located in their stack home. 1313 /// Join the LiveOut values of preds that are contained in \p Visited into 1314 /// LiveIn[BB]. Return True if LiveIn[BB] has changed as a result. LiveIn[BB] 1315 /// values monotonically increase. See the @link joinMethods join methods 1316 /// @endlink documentation for more info. 1318 ///@name joinMethods 1319 /// Functions that implement `join` (the least upper bound) for the 1320 /// join-semilattice types used in the dataflow. There is an explicit bottom 1321 /// value (⊥) for some types and and explicit top value (⊤) for all types. 1324 /// Join(A, B) >= A && Join(A, B) >= B 1328 /// These invariants are important for monotonicity. 1330 /// For the map-type functions, all unmapped keys in an empty map are 1331 /// associated with a bottom value (⊥). This represents their values being 1332 /// unknown. Unmapped keys in non-empty maps (joining two maps with a key 1333 /// only present in one) represents either a variable going out of scope or 1334 /// dropped debug info. It is assumed the key is associated with a top value 1335 /// (⊤) in this case (unknown location / assignment). 1337static LocKind joinKind(LocKind
A, LocKind
B);
1338static Assignment joinAssignment(
const Assignment &
A,
const Assignment &
B);
1339 BlockInfo joinBlockInfo(
const BlockInfo &
A,
const BlockInfo &
B);
1342 /// Process the instructions in \p BB updating \p LiveSet along the way. \p 1343 /// LiveSet must be initialized with the current live-in locations before 1345void process(
BasicBlock &BB, BlockInfo *LiveSet);
1346 ///@name processMethods 1347 /// Methods to process instructions in order to update the LiveSet (current 1348 /// location information). 1350void processNonDbgInstruction(
Instruction &
I, BlockInfo *LiveSet);
1352 /// Update \p LiveSet after encountering an instruction with a DIAssignID 1353 /// attachment, \p I. 1354void processTaggedInstruction(
Instruction &
I, BlockInfo *LiveSet);
1355 /// Update \p LiveSet after encountering an instruciton without a DIAssignID 1356 /// attachment, \p I. 1357void processUntaggedInstruction(
Instruction &
I, BlockInfo *LiveSet);
1358void processDbgAssign(AssignRecord Assign, BlockInfo *LiveSet);
1360void processDbgValue(
1362 BlockInfo *LiveSet);
1363 /// Add an assignment to memory for the variable /p Var. 1364void addMemDef(BlockInfo *LiveSet,
VariableID Var,
const Assignment &AV);
1365 /// Add an assignment to the variable /p Var. 1366void addDbgDef(BlockInfo *LiveSet,
VariableID Var,
const Assignment &AV);
1369 /// Set the LocKind for \p Var. 1370void setLocKind(BlockInfo *LiveSet,
VariableID Var, LocKind K);
1371 /// Get the live LocKind for a \p Var. Requires addMemDef or addDbgDef to 1372 /// have been called for \p Var first. 1373 LocKind getLocKind(BlockInfo *LiveSet,
VariableID Var);
1374 /// Return true if \p Var has an assignment in \p M matching \p AV. 1375bool hasVarWithAssignment(BlockInfo *LiveSet, BlockInfo::AssignmentKind Kind,
1377 /// Return the set of VariableIDs corresponding the fragments contained fully 1378 /// within the variable/fragment \p Var. 1381 /// Mark \p Var as having been touched this frame. Note, this applies only 1382 /// to the exact fragment \p Var and not to any fragments contained within. 1385 /// Emit info for variables that are fully promoted. 1391 : Fn(Fn), Layout(Layout), VarsWithStackSlot(VarsWithStackSlot) {}
1392 /// Run the analysis, adding variable location info to \p FnVarLocs. Returns 1393 /// true if any variable locations have been added to FnVarLocs. 1399AssignmentTrackingLowering::getContainedFragments(
VariableID Var)
const{
1400autoR = VarContains.
find(Var);
1401if (R == VarContains.
end())
1406void AssignmentTrackingLowering::touchFragment(
VariableID Var) {
1407 VarsTouchedThisFrame.insert(Var);
1410void AssignmentTrackingLowering::setLocKind(BlockInfo *LiveSet,
VariableID Var,
1412auto SetKind = [
this](BlockInfo *LiveSet,
VariableID Var, LocKind
K) {
1413 LiveSet->setLocKind(Var, K);
1416 SetKind(LiveSet, Var, K);
1418// Update the LocKind for all fragments contained within Var. 1419for (
VariableID Frag : getContainedFragments(Var))
1420 SetKind(LiveSet, Frag, K);
1423AssignmentTrackingLowering::LocKind
1424AssignmentTrackingLowering::getLocKind(BlockInfo *LiveSet,
VariableID Var) {
1425return LiveSet->getLocKind(Var);
1428void AssignmentTrackingLowering::addMemDef(BlockInfo *LiveSet,
VariableID Var,
1429const Assignment &AV) {
1430 LiveSet->setAssignment(BlockInfo::Stack, Var, AV);
1432// Use this assigment for all fragments contained within Var, but do not 1433// provide a Source because we cannot convert Var's value to a value for the 1435 Assignment FragAV = AV;
1436 FragAV.Source =
nullptr;
1437for (
VariableID Frag : getContainedFragments(Var))
1438 LiveSet->setAssignment(BlockInfo::Stack, Frag, FragAV);
1441void AssignmentTrackingLowering::addDbgDef(BlockInfo *LiveSet,
VariableID Var,
1442const Assignment &AV) {
1443 LiveSet->setAssignment(BlockInfo::Debug, Var, AV);
1445// Use this assigment for all fragments contained within Var, but do not 1446// provide a Source because we cannot convert Var's value to a value for the 1448 Assignment FragAV = AV;
1449 FragAV.Source =
nullptr;
1450for (
VariableID Frag : getContainedFragments(Var))
1451 LiveSet->setAssignment(BlockInfo::Debug, Frag, FragAV);
1455return cast<DIAssignID>(
I.getMetadata(LLVMContext::MD_DIAssignID));
1464"Cannot get a DIAssignID from a non-assign DbgVariableRecord!");
1468/// Return true if \p Var has an assignment in \p M matching \p AV. 1469bool AssignmentTrackingLowering::hasVarWithAssignment(
1470 BlockInfo *LiveSet, BlockInfo::AssignmentKind Kind,
VariableID Var,
1471const Assignment &AV) {
1472if (!LiveSet->hasAssignment(Kind, Var, AV))
1475// Check all the frags contained within Var as these will have all been 1476// mapped to AV at the last store to Var. 1477for (
VariableID Frag : getContainedFragments(Var))
1478if (!LiveSet->hasAssignment(Kind, Frag, AV))
1484constchar *
locStr(AssignmentTrackingLowering::LocKind Loc) {
1485usingLocKind = AssignmentTrackingLowering::LocKind;
1511if (isa<const Instruction *>(InsertPt))
1512returngetNextNode(cast<const Instruction *>(InsertPt));
1513returngetNextNode(cast<const DbgRecord *>(InsertPt));
1517return cast<DbgAssignIntrinsic>(DVI);
1522"Attempted to cast non-assign DbgVariableRecord to DVRAssign.");
1526void AssignmentTrackingLowering::emitDbgValue(
1527 AssignmentTrackingLowering::LocKind Kind,
1529if (isa<DbgAssignIntrinsic *>(Source))
1530 emitDbgValue(Kind, cast<DbgAssignIntrinsic *>(Source),
After);
1532 emitDbgValue(Kind, cast<DbgVariableRecord *>(Source),
After);
1534template <
typename T>
1535void AssignmentTrackingLowering::emitDbgValue(
1536 AssignmentTrackingLowering::LocKind Kind,
constT Source,
1546// Find a suitable insert point. 1548assert(InsertBefore &&
"Shouldn't be inserting after a terminator");
1556// Insert it into the map for later. 1557 InsertBeforeMap[InsertBefore].
push_back(VarLoc);
1560// NOTE: This block can mutate Kind. 1561if (Kind == LocKind::Mem) {
1563// Check the address hasn't been dropped (e.g. the debug uses may not have 1564// been replaced before deleting a Value). 1565if (
Assign->isKillAddress()) {
1566// The address isn't valid so treat this as a non-memory def. 1572"fragment info should be stored in value-expression only");
1573// Copy the fragment info over from the value-expression to the new 1575if (
auto OptFragInfo =
Source->getExpression()->getFragmentInfo()) {
1576auto FragInfo = *OptFragInfo;
1578 Expr, FragInfo.OffsetInBits, FragInfo.SizeInBits);
1580// The address-expression has an implicit deref, add it now. 1581 std::tie(Val, Expr) =
1588if (Kind == LocKind::Val) {
1589 Emit(
Source->getRawLocation(),
Source->getExpression());
1593if (Kind == LocKind::None) {
1594 Emit(
nullptr,
Source->getExpression());
1599void AssignmentTrackingLowering::processNonDbgInstruction(
1600Instruction &
I, AssignmentTrackingLowering::BlockInfo *LiveSet) {
1601if (
I.hasMetadata(LLVMContext::MD_DIAssignID))
1602 processTaggedInstruction(
I, LiveSet);
1604 processUntaggedInstruction(
I, LiveSet);
1607void AssignmentTrackingLowering::processUntaggedInstruction(
1608Instruction &
I, AssignmentTrackingLowering::BlockInfo *LiveSet) {
1609// Interpret stack stores that are not tagged as an assignment in memory for 1610// the variables associated with that address. These stores may not be tagged 1611// because a) the store cannot be represented using dbg.assigns (non-const 1612// length or offset) or b) the tag was accidentally dropped during 1613// optimisations. For these stores we fall back to assuming that the stack 1614// home is a valid location for the variables. The benefit is that this 1615// prevents us missing an assignment and therefore incorrectly maintaining 1616// earlier location definitions, and in many cases it should be a reasonable 1617// assumption. However, this will occasionally lead to slight 1618// inaccuracies. The value of a hoisted untagged store will be visible 1619// "early", for example. 1620assert(!
I.hasMetadata(LLVMContext::MD_DIAssignID));
1621auto It = UntaggedStoreVars.find(&
I);
1622if (It == UntaggedStoreVars.end())
1623return;
// No variables associated with the store destination. 1627// Iterate over the variables that this store affects, add a NoneOrPhi dbg 1628// and mem def, set lockind to Mem, and emit a location def for each. 1629for (
auto [Var, Info] : It->second) {
1630// This instruction is treated as both a debug and memory assignment, 1631// meaning the memory location should be used. We don't have an assignment 1632// ID though so use Assignment::makeNoneOrPhi() to create an imaginary one. 1633 addMemDef(LiveSet, Var, Assignment::makeNoneOrPhi());
1634 addDbgDef(LiveSet, Var, Assignment::makeNoneOrPhi());
1635 setLocKind(LiveSet, Var, LocKind::Mem);
1638// Build the dbg location def to insert. 1640// DIExpression: Add fragment and offset. 1643if (
auto Frag =
V.getFragment()) {
1646assert(R &&
"unexpected createFragmentExpression failure");
1650if (
Info.OffsetInBits)
1651 Ops = {dwarf::DW_OP_plus_uconst,
Info.OffsetInBits / 8};
1654/*EntryValue=*/false);
1655// Find a suitable insert point, before the next instruction or DbgRecord 1658assert(InsertBefore &&
"Shouldn't be inserting after a terminator");
1660// Get DILocation for this unrecorded assignment. 1663 Fn.
getContext(), 0, 0,
V.getVariable()->getScope(), InlinedAt);
1671// 3. Insert it into the map for later. 1672 InsertBeforeMap[InsertBefore].push_back(VarLoc);
1676void AssignmentTrackingLowering::processTaggedInstruction(
1677Instruction &
I, AssignmentTrackingLowering::BlockInfo *LiveSet) {
1680// No dbg.assign intrinsics linked. 1681// FIXME: All vars that have a stack slot this store modifies that don't have 1682// a dbg.assign linked to it should probably treat this like an untagged 1684if (Linked.empty() && LinkedDPAssigns.empty())
1688auto ProcessLinkedAssign = [&](
auto *
Assign) {
1690// Something has gone wrong if VarsWithStackSlot doesn't contain a variable 1691// that is linked to a store. 1693"expected Assign's variable to have stack slot");
1696 addMemDef(LiveSet, Var, AV);
1702// The last assignment to the stack is now AV. Check if the last debug 1703// assignment has a matching Assignment. 1704if (hasVarWithAssignment(LiveSet, BlockInfo::Debug, Var, AV)) {
1705// The StackHomeValue and DebugValue for this variable match so we can 1706// emit a stack home location here. 1710 LiveSet->DebugValue[
static_cast<unsigned>(Var)].
dump(
dbgs());
1712 setLocKind(LiveSet, Var, LocKind::Mem);
1713 emitDbgValue(LocKind::Mem, Assign, &
I);
1717// The StackHomeValue and DebugValue for this variable do not match. I.e. 1718// The value currently stored in the stack is not what we'd expect to 1719// see, so we cannot use emit a stack home location here. Now we will 1720// look at the live LocKind for the variable and determine an appropriate 1721// dbg.value to emit. 1722 LocKind PrevLoc = getLocKind(LiveSet, Var);
1725// The value in memory in memory has changed but we're not currently 1726// using the memory location. Do nothing. 1728 setLocKind(LiveSet, Var, LocKind::Val);
1731// There's been an assignment to memory that we were using as a 1732// location for this variable, and the Assignment doesn't match what 1733// we'd expect to see in memory. 1734 Assignment DbgAV = LiveSet->getAssignment(BlockInfo::Debug, Var);
1735if (DbgAV.Status == Assignment::NoneOrPhi) {
1736// We need to terminate any previously open location now. 1738 setLocKind(LiveSet, Var, LocKind::None);
1739 emitDbgValue(LocKind::None, Assign, &
I);
1741// The previous DebugValue Value can be used here. 1743 setLocKind(LiveSet, Var, LocKind::Val);
1745 emitDbgValue(LocKind::Val, DbgAV.Source, &
I);
1747// PrevAV.Source is nullptr so we must emit undef here. 1748 emitDbgValue(LocKind::None, Assign, &
I);
1752case LocKind::None: {
1753// There's been an assignment to memory and we currently are 1754// not tracking a location for the variable. Do not emit anything. 1756 setLocKind(LiveSet, Var, LocKind::None);
1761 ProcessLinkedAssign(DAI);
1763 ProcessLinkedAssign(DVR);
1766void AssignmentTrackingLowering::processDbgAssign(AssignRecord Assign,
1767 BlockInfo *LiveSet) {
1768auto ProcessDbgAssignImpl = [&](
auto *DbgAssign) {
1769// Only bother tracking variables that are at some point stack homed. Other 1770// variables can be dealt with trivially later. 1775 Assignment AV = Assignment::make(
getIDFromMarker(*DbgAssign), DbgAssign);
1776 addDbgDef(LiveSet, Var, AV);
1778LLVM_DEBUG(
dbgs() <<
"processDbgAssign on " << *DbgAssign <<
"\n";);
1782// Check if the DebugValue and StackHomeValue both hold the same 1784if (hasVarWithAssignment(LiveSet, BlockInfo::Stack, Var, AV)) {
1785// They match. We can use the stack home because the debug intrinsics 1786// state that an assignment happened here, and we know that specific 1787// assignment was the last one to take place in memory for this variable. 1789if (DbgAssign->isKillAddress()) {
1792 <<
"Val, Stack matches Debug program but address is killed\n";);
1798 setLocKind(LiveSet, Var, Kind);
1799 emitDbgValue(Kind, DbgAssign, DbgAssign);
1801// The last assignment to the memory location isn't the one that we want 1802// to show to the user so emit a dbg.value(Value). Value may be undef. 1804 setLocKind(LiveSet, Var, LocKind::Val);
1805 emitDbgValue(LocKind::Val, DbgAssign, DbgAssign);
1808if (isa<DbgVariableRecord *>(Assign))
1809return ProcessDbgAssignImpl(cast<DbgVariableRecord *>(Assign));
1810return ProcessDbgAssignImpl(cast<DbgAssignIntrinsic *>(Assign));
1813void AssignmentTrackingLowering::processDbgValue(
1815 BlockInfo *LiveSet) {
1816auto ProcessDbgValueImpl = [&](
auto *
DbgValue) {
1817// Only other tracking variables that are at some point stack homed. 1818// Other variables can be dealt with trivally later. 1823// We have no ID to create an Assignment with so we mark this assignment as 1824// NoneOrPhi. Note that the dbg.value still exists, we just cannot determine 1825// the assignment responsible for setting this value. 1826// This is fine; dbg.values are essentially interchangable with unlinked 1827// dbg.assigns, and some passes such as mem2reg and instcombine add them to 1828// PHIs for promoted variables. 1829 Assignment AV = Assignment::makeNoneOrPhi();
1830 addDbgDef(LiveSet, Var, AV);
1834 <<
" -> Val, dbg.value override");
1836 setLocKind(LiveSet, Var, LocKind::Val);
1839if (isa<DbgVariableRecord *>(DbgValueRecord))
1840return ProcessDbgValueImpl(cast<DbgVariableRecord *>(DbgValueRecord));
1841return ProcessDbgValueImpl(cast<DbgValueInst *>(DbgValueRecord));
1845if (
autoF =
DbgValue.getExpression()->getFragmentInfo())
1846returnF->SizeInBits == 0;
1850void AssignmentTrackingLowering::processDbgInstruction(
1852auto *DVI = dyn_cast<DbgVariableIntrinsic>(&
I);
1856// Ignore assignments to zero bits of the variable. 1860if (
auto *DAI = dyn_cast<DbgAssignIntrinsic>(&
I))
1861 processDbgAssign(DAI, LiveSet);
1862elseif (
auto *DVI = dyn_cast<DbgValueInst>(&
I))
1863 processDbgValue(DVI, LiveSet);
1865void AssignmentTrackingLowering::processDbgVariableRecord(
1867// Ignore assignments to zero bits of the variable. 1872 processDbgAssign(&DVR, LiveSet);
1874 processDbgValue(&DVR, LiveSet);
1878assert(!
After.isTerminator() &&
"Can't insert after a terminator");
1880if (R == InsertBeforeMap.end())
1886if (R == InsertBeforeMap.end())
1891void AssignmentTrackingLowering::process(
BasicBlock &BB, BlockInfo *LiveSet) {
1892// If the block starts with DbgRecords, we need to process those DbgRecords as 1893// their own frame without processing any instructions first. 1894bool ProcessedLeadingDbgRecords = !BB.
begin()->hasDbgRecords();
1896assert(VarsTouchedThisFrame.empty());
1897// Process the instructions in "frames". A "frame" includes a single 1898// non-debug instruction followed any debug instructions before the 1899// next non-debug instruction. 1901// Skip the current instruction if it has unprocessed DbgRecords attached 1902// (see comment above `ProcessedLeadingDbgRecords`). 1903if (ProcessedLeadingDbgRecords) {
1904// II is now either a debug intrinsic, a non-debug instruction with no 1905// attached DbgRecords, or a non-debug instruction with attached processed 1907// II has not been processed. 1908if (!isa<DbgInfoIntrinsic>(&*
II)) {
1909if (
II->isTerminator())
1911 resetInsertionPoint(*
II);
1912 processNonDbgInstruction(*
II, LiveSet);
1913assert(LiveSet->isValid());
1917// II is now either a debug intrinsic, a non-debug instruction with no 1918// attached DbgRecords, or a non-debug instruction with attached unprocessed 1920if (
II != EI &&
II->hasDbgRecords()) {
1921// Skip over non-variable debug records (i.e., labels). They're going to 1922// be read from IR (possibly re-ordering them within the debug record 1923// range) rather than from the analysis results. 1925 resetInsertionPoint(DVR);
1926 processDbgVariableRecord(DVR, LiveSet);
1927assert(LiveSet->isValid());
1930 ProcessedLeadingDbgRecords =
true;
1932auto *
Dbg = dyn_cast<DbgInfoIntrinsic>(&*
II);
1935 resetInsertionPoint(*
II);
1936 processDbgInstruction(*Dbg, LiveSet);
1937assert(LiveSet->isValid());
1940// II is now a non-debug instruction either with no attached DbgRecords, or 1941// with attached processed DbgRecords. II has not been processed, and all 1942// debug instructions or DbgRecords in the frame preceding II have been 1945// We've processed everything in the "frame". Now determine which variables 1946// cannot be represented by a dbg.declare. 1947for (
auto Var : VarsTouchedThisFrame) {
1948 LocKind Loc = getLocKind(LiveSet, Var);
1949// If a variable's LocKind is anything other than LocKind::Mem then we 1950// must note that it cannot be represented with a dbg.declare. 1951// Note that this check is enough without having to check the result of 1952// joins() because for join to produce anything other than Mem after 1953// we've already seen a Mem we'd be joining None or Val with Mem. In that 1954// case, we've already hit this codepath when we set the LocKind to Val 1955// or None in that block. 1956if (Loc != LocKind::Mem) {
1959 NotAlwaysStackHomed.insert(Aggr);
1962 VarsTouchedThisFrame.clear();
1966AssignmentTrackingLowering::LocKind
1967AssignmentTrackingLowering::joinKind(LocKind
A, LocKind
B) {
1970returnA ==
B ?
A : LocKind::None;
1973AssignmentTrackingLowering::Assignment
1974AssignmentTrackingLowering::joinAssignment(
const Assignment &
A,
1975const Assignment &
B) {
1977// NoneOrPhi(null, null) > Known(v, ?s) 1979// If either are NoneOrPhi the join is NoneOrPhi. 1980// If either value is different then the result is 1981// NoneOrPhi (joining two values is a Phi). 1982if (!
A.isSameSourceAssignment(
B))
1983return Assignment::makeNoneOrPhi();
1984if (
A.Status == Assignment::NoneOrPhi)
1985return Assignment::makeNoneOrPhi();
1987// Source is used to lookup the value + expression in the debug program if 1988// the stack slot gets assigned a value earlier than expected. Because 1989// we're only tracking the one dbg.assign, we can't capture debug PHIs. 1990// It's unlikely that we're losing out on much coverage by avoiding that 1992// The Source may differ in this situation: 1994// dbg.assign i32 0, ..., !1, ... 1996// dbg.assign i32 1, ..., !1, ... 1997// Here the same assignment (!1) was performed in both preds in the source, 1998// but we can't use either one unless they are identical (e.g. .we don't 1999// want to arbitrarily pick between constant values). 2000auto JoinSource = [&]() -> AssignRecord {
2001if (
A.Source ==
B.Source)
2003if (!
A.Source || !
B.Source)
2004return AssignRecord();
2005assert(isa<DbgVariableRecord *>(
A.Source) ==
2006 isa<DbgVariableRecord *>(
B.Source));
2007if (isa<DbgVariableRecord *>(
A.Source) &&
2008 cast<DbgVariableRecord *>(
A.Source)->isEquivalentTo(
2009 *cast<DbgVariableRecord *>(
B.Source)))
2011if (isa<DbgAssignIntrinsic *>(
A.Source) &&
2012 cast<DbgAssignIntrinsic *>(
A.Source)->isIdenticalTo(
2013 cast<DbgAssignIntrinsic *>(
B.Source)))
2015return AssignRecord();
2017 AssignRecord
Source = JoinSource();
2018assert(
A.Status ==
B.Status &&
A.Status == Assignment::Known);
2020return Assignment::make(
A.ID, Source);
2023AssignmentTrackingLowering::BlockInfo
2024AssignmentTrackingLowering::joinBlockInfo(
const BlockInfo &
A,
2025const BlockInfo &
B) {
2026return BlockInfo::join(
A,
B, TrackedVariablesVectorSize);
2029bool AssignmentTrackingLowering::join(
2033// Ignore backedges if we have not visited the predecessor yet. As the 2034// predecessor hasn't yet had locations propagated into it, most locations 2035// will not yet be valid, so treat them as all being uninitialized and 2036// potentially valid. If a location guessed to be correct here is 2037// invalidated later, we will remove it when we revisit this block. This 2038// is essentially the same as initialising all LocKinds and Assignments to 2039// an implicit ⊥ value which is the identity value for the join operation. 2041if (Visited.
count(Pred))
2045// No preds visited yet. 2046if (VisitedPreds.
empty()) {
2048bool DidInsert = It.second;
2050 It.first->second.init(TrackedVariablesVectorSize);
2051return/*Changed*/ DidInsert;
2054// Exactly one visited pred. Copy the LiveOut from that pred into BB LiveIn. 2055if (VisitedPreds.
size() == 1) {
2056const BlockInfo &PredLiveOut = LiveOut.
find(VisitedPreds[0])->second;
2057auto CurrentLiveInEntry = LiveIn.
find(&BB);
2059// Check if there isn't an entry, or there is but the LiveIn set has 2060// changed (expensive check). 2061if (CurrentLiveInEntry == LiveIn.
end())
2062 LiveIn.
insert(std::make_pair(&BB, PredLiveOut));
2063elseif (PredLiveOut != CurrentLiveInEntry->second)
2064 CurrentLiveInEntry->second = PredLiveOut;
2066return/*Changed*/false;
2067return/*Changed*/true;
2070// More than one pred. Join LiveOuts of blocks 1 and 2. 2072const BlockInfo &PredLiveOut0 = LiveOut.
find(VisitedPreds[0])->second;
2073const BlockInfo &PredLiveOut1 = LiveOut.
find(VisitedPreds[1])->second;
2074 BlockInfo BBLiveIn = joinBlockInfo(PredLiveOut0, PredLiveOut1);
2076// Join the LiveOuts of subsequent blocks. 2079constauto &PredLiveOut = LiveOut.
find(Pred);
2081"block should have been processed already");
2082 BBLiveIn = joinBlockInfo(std::move(BBLiveIn), PredLiveOut->second);
2085// Save the joined result for BB. 2086auto CurrentLiveInEntry = LiveIn.
find(&BB);
2087// Check if there isn't an entry, or there is but the LiveIn set has changed 2088// (expensive check). 2089if (CurrentLiveInEntry == LiveIn.
end())
2091elseif (BBLiveIn != CurrentLiveInEntry->second)
2092 CurrentLiveInEntry->second = std::move(BBLiveIn);
2094return/*Changed*/false;
2095return/*Changed*/true;
2098/// Return true if A fully contains B. 2101auto ALeft =
A.OffsetInBits;
2102auto BLeft =
B.OffsetInBits;
2106auto ARight = ALeft +
A.SizeInBits;
2107auto BRight = BLeft +
B.SizeInBits;
2113static std::optional<at::AssignmentInfo>
2115// Don't bother checking if this is an AllocaInst. We know this 2116// instruction has no tag which means there are no variables associated 2118if (
constauto *SI = dyn_cast<StoreInst>(&
I))
2120if (
constauto *
MI = dyn_cast<MemIntrinsic>(&
I))
2122// Alloca or non-store-like inst. 2127return dyn_cast<DbgDeclareInst>(DVI);
2134/// Build a map of {Variable x: Variables y} where all variable fragments 2135/// contained within the variable fragment x are in set y. This means that 2136/// y does not contain all overlaps because partial overlaps are excluded. 2138/// While we're iterating over the function, add single location defs for 2139/// dbg.declares to \p FnVarLocs. 2141/// Variables that are interesting to this pass in are added to 2142/// FnVarLocs->Variables first. TrackedVariablesVectorSize is set to the ID of 2143/// the last interesting variable plus 1, meaning variables with ID 1 2144/// (inclusive) to TrackedVariablesVectorSize (exclusive) are interesting. The 2145/// subsequent variables are either stack homed or fully promoted. 2147/// Finally, populate UntaggedStoreVars with a mapping of untagged stores to 2148/// the stored-to variable fragments. 2150/// These tasks are bundled together to reduce the number of times we need 2151/// to iterate over the function as they can be achieved together in one pass. 2156unsigned &TrackedVariablesVectorSize) {
2158// Map of Variable: [Fragments]. 2160// Iterate over all instructions: 2161// - dbg.declare -> add single location variable record 2162// - dbg.* -> Add fragments to FragmentMap 2163// - untagged store -> Add fragments to FragmentMap and update 2164// UntaggedStoreVars. 2165// We need to add fragments for untagged stores too so that we can correctly 2166// clobber overlapped fragment locations later. 2169auto ProcessDbgRecord = [&](
auto *
Record,
auto &DeclareList) {
2176if (!VarsWithStackSlot.
contains(DA))
2178if (Seen.
insert(DV).second)
2179 FragmentMap[DA].push_back(DV);
2181for (
auto &BB : Fn) {
2184 ProcessDbgRecord(&DVR, DPDeclares);
2185if (
auto *DII = dyn_cast<DbgVariableIntrinsic>(&
I)) {
2186 ProcessDbgRecord(DII, InstDeclares);
2189// Find markers linked to this alloca. 2190auto HandleDbgAssignForStore = [&](
auto *Assign) {
2191 std::optional<DIExpression::FragmentInfo> FragInfo;
2193// Skip this assignment if the affected bits are outside of the 2194// variable fragment. 2196I.getDataLayout(),
Info->Base,
2197Info->OffsetInBits,
Info->SizeInBits, Assign, FragInfo) ||
2198 (FragInfo && FragInfo->SizeInBits == 0))
2201// FragInfo from calculateFragmentIntersect is nullopt if the 2202// resultant fragment matches DAI's fragment or entire variable - in 2203// which case copy the fragment info from DAI. If FragInfo is still 2204// nullopt after the copy it means "no fragment info" instead, which 2205// is how it is usually interpreted. 2207 FragInfo = Assign->getExpression()->getFragmentInfo();
2211 Assign->getDebugLoc().getInlinedAt());
2213if (!VarsWithStackSlot.
contains(DA))
2216// Cache this info for later. 2217 UntaggedStoreVars[&
I].push_back(
2220if (Seen.
insert(DV).second)
2221 FragmentMap[DA].push_back(DV);
2224 HandleDbgAssignForStore(DAI);
2226 HandleDbgAssignForStore(DVR);
2231// Sort the fragment map for each DebugAggregate in ascending 2232// order of fragment size - there should be no duplicates. 2233for (
auto &Pair : FragmentMap) {
2235 std::sort(Frags.
begin(), Frags.
end(),
2237 return Elmt.getFragmentOrDefault().SizeInBits >
2238 Next.getFragmentOrDefault().SizeInBits;
2240// Check for duplicates. 2246for (
auto &Pair : FragmentMap) {
2247auto &Frags = Pair.second;
2248for (
auto It = Frags.begin(), IEnd = Frags.end(); It != IEnd; ++It) {
2250// Find the frags that this is contained within. 2252// Because Frags is sorted by size and none have the same offset and 2253// size, we know that this frag can only be contained by subsequent 2258for (; OtherIt != IEnd; ++OtherIt) {
2262 Map[OtherVar].push_back(ThisVar);
2267// VariableIDs are 1-based so the variable-tracking bitvector needs 2268// NumVariables plus 1 bits. 2271// Finally, insert the declares afterwards, so the first IDs are all 2272// partially stack homed vars. 2273for (
auto *DDI : InstDeclares)
2275 DDI->getDebugLoc(), DDI->getWrappedLocation());
2276for (
auto *DVR : DPDeclares)
2286 <<
": too many blocks (" << Fn.
size() <<
")\n");
2291 FnVarLocs = FnVarLocsBuilder;
2293// The general structure here is inspired by VarLocBasedImpl.cpp 2294// (LiveDebugValues). 2296// Build the variable fragment overlap map. 2297// Note that this pass doesn't handle partial overlaps correctly (FWIW 2298// neither does LiveDebugVariables) because that is difficult to do and 2299// appears to be rare occurance. 2301 Fn, FnVarLocs, *VarsWithStackSlot, UntaggedStoreVars,
2302 TrackedVariablesVectorSize);
2304// Prepare for traversal. 2306 std::priority_queue<unsigned int, std::vector<unsigned int>,
2307 std::greater<unsigned int>>
2309 std::priority_queue<unsigned int, std::vector<unsigned int>,
2310 std::greater<unsigned int>>
2314 {
// Init OrderToBB and BBToOrder. 2315unsignedint RPONumber = 0;
2317 OrderToBB[RPONumber] = BB;
2318 BBToOrder[BB] = RPONumber;
2319 Worklist.push(RPONumber);
2322 LiveIn.
init(RPONumber);
2323 LiveOut.
init(RPONumber);
2326// Perform the traversal. 2328// This is a standard "union of predecessor outs" dataflow problem. To solve 2329// it, we perform join() and process() using the two worklist method until 2330// the LiveIn data for each block becomes unchanging. The "proof" that this 2331// terminates can be put together by looking at the comments around LocKind, 2332// Assignment, and the various join methods, which show that all the elements 2333// involved are made up of join-semilattices; LiveIn(n) can only 2334// monotonically increase in value throughout the dataflow. 2337while (!Worklist.empty()) {
2338// We track what is on the pending worklist to avoid inserting the same 2342while (!Worklist.empty()) {
2346bool InChanged = join(*BB, Visited);
2347// Always consider LiveIn changed on the first visit. 2348 InChanged |= Visited.
insert(BB).second;
2351// Mutate a copy of LiveIn while processing BB. After calling process 2352// LiveSet is the LiveOut set for BB. 2353 BlockInfo LiveSet = LiveIn[BB];
2355// Process the instructions in the block. 2356 process(*BB, &LiveSet);
2358// Relatively expensive check: has anything changed in LiveOut for BB? 2359if (LiveOut[BB] != LiveSet) {
2361 <<
" has new OutLocs, add succs to worklist: [ ");
2362 LiveOut[BB] = std::move(LiveSet);
2364if (OnPending.
insert(Succ).second) {
2366 Pending.push(BBToOrder[Succ]);
2373 Worklist.swap(Pending);
2374// At this point, pending must be empty, since it was just the empty 2376assert(Pending.empty() &&
"Pending should be empty");
2379// That's the hard part over. Now we just have some admin to do. 2381// Record whether we inserted any intrinsics. 2382bool InsertedAnyIntrinsics =
false;
2384// Identify and add defs for single location variables. 2386// Go through all of the defs that we plan to add. If the aggregate variable 2387// it's a part of is not in the NotAlwaysStackHomed set we can emit a single 2388// location def and omit the rest. Add an entry to AlwaysStackHomed so that 2389// we can identify those uneeded defs later. 2391for (
constauto &Pair : InsertBeforeMap) {
2392auto &Vec = Pair.second;
2397// Skip this Var if it's not always stack homed. 2398if (NotAlwaysStackHomed.contains(Aggr))
2401// Skip complex cases such as when different fragments of a variable have 2402// been split into different allocas. Skipping in this case means falling 2403// back to using a list of defs (which could reduce coverage, but is no 2408 NotAlwaysStackHomed.insert(Aggr);
2412// All source assignments to this variable remain and all stores to any 2413// part of the variable store to the same address (with varying 2414// offsets). We can just emit a single location for the whole variable. 2416// Unless we've already done so, create the single location def now. 2417if (AlwaysStackHomed.
insert(Aggr).second) {
2419// TODO: When more complex cases are handled VarLoc.Expr should be 2420// built appropriately rather than always using an empty DIExpression. 2421// The assert below is a reminder. 2426 InsertedAnyIntrinsics =
true;
2431// Insert the other DEFs. 2432for (
constauto &[InsertBefore, Vec] : InsertBeforeMap) {
2437// If this variable is always stack homed then we have already inserted a 2438// dbg.declare and deleted this dbg.value. 2439if (AlwaysStackHomed.
contains(Aggr))
2442 InsertedAnyIntrinsics =
true;
2445 FnVarLocs->
setWedge(InsertBefore, std::move(NewDefs));
2448 InsertedAnyIntrinsics |= emitPromotedVarLocs(FnVarLocs);
2450return InsertedAnyIntrinsics;
2453bool AssignmentTrackingLowering::emitPromotedVarLocs(
2455bool InsertedAnyIntrinsics =
false;
2456// Go through every block, translating debug intrinsics for fully promoted 2457// variables into FnVarLocs location defs. No analysis required for these. 2458auto TranslateDbgRecord = [&](
auto *
Record) {
2459// Skip variables that haven't been promoted - we've dealt with those 2464assert(InsertBefore &&
"Unexpected: debug intrinsics after a terminator");
2468 InsertedAnyIntrinsics =
true;
2470for (
auto &BB : Fn) {
2472// Skip instructions other than dbg.values and dbg.assigns. 2475 TranslateDbgRecord(&DVR);
2476auto *DVI = dyn_cast<DbgValueInst>(&
I);
2478 TranslateDbgRecord(DVI);
2481return InsertedAnyIntrinsics;
2484/// Remove redundant definitions within sequences of consecutive location defs. 2485/// This is done using a backward scan to keep the last def describing a 2486/// specific variable/fragment. 2488/// This implements removeRedundantDbgInstrsUsingBackwardScan from 2489/// lib/Transforms/Utils/BasicBlockUtils.cpp for locations described with 2490/// FunctionVarLocsBuilder instead of with intrinsics. 2496// Scan over the entire block, not just over the instructions mapped by 2497// FnVarLocs, because wedges in FnVarLocs may only be separated by debug 2500if (!isa<DbgVariableIntrinsic>(
I)) {
2501// Sequence of consecutive defs ended. Clear map for the next one. 2502 VariableDefinedBytes.
clear();
2505auto HandleLocsForWedge = [&](
auto *WedgePosition) {
2506// Get the location defs that start just before this instruction. 2507constauto *Locs = FnVarLocs.
getWedge(WedgePosition);
2512bool ChangedThisWedge =
false;
2513// The new pruned set of defs, reversed because we're scanning backwards. 2516// Iterate over the existing defs in reverse. 2517for (
auto RIt = Locs->rbegin(), REnd = Locs->rend(); RIt != REnd; ++RIt) {
2521uint64_t SizeInBits = Aggr.first->getSizeInBits().value_or(0);
2524// Cutoff for large variables to prevent expensive bitvector operations. 2527if (SizeInBytes == 0 || SizeInBytes > MaxSizeBytes) {
2528// If the size is unknown (0) then keep this location def to be safe. 2529// Do the same for defs of large variables, which would be expensive 2530// to represent with a BitVector. 2535// Only keep this location definition if it is not fully eclipsed by 2536// other definitions in this wedge that come after it 2538// Inert the bytes the location definition defines. 2541bool FirstDefinition = InsertResult.second;
2542BitVector &DefinedBytes = InsertResult.first->second;
2545 RIt->Expr->getFragmentInfo().value_or(
2547bool InvalidFragment = Fragment.
endInBits() > SizeInBits;
2551// If this defines any previously undefined bytes, keep it. 2552if (FirstDefinition || InvalidFragment ||
2554if (!InvalidFragment)
2555 DefinedBytes.
set(StartInBytes, EndInBytes);
2560// Redundant def found: throw it away. Since the wedge of defs is being 2561// rebuilt, doing nothing is the same as deleting an entry. 2562 ChangedThisWedge =
true;
2566// Un-reverse the defs and replace the wedge with the pruned version. 2567if (ChangedThisWedge) {
2568 std::reverse(NewDefsReversed.
begin(), NewDefsReversed.
end());
2569 FnVarLocs.
setWedge(WedgePosition, std::move(NewDefsReversed));
2574 HandleLocsForWedge(&
I);
2576 HandleLocsForWedge(&DVR);
2582/// Remove redundant location defs using a forward scan. This can remove a 2583/// location definition that is redundant due to indicating that a variable has 2584/// the same value as is already being indicated by an earlier def. 2586/// This implements removeRedundantDbgInstrsUsingForwardScan from 2587/// lib/Transforms/Utils/BasicBlockUtils.cpp for locations described with 2588/// FunctionVarLocsBuilder instead of with intrinsics 2596// Scan over the entire block, not just over the instructions mapped by 2597// FnVarLocs, because wedges in FnVarLocs may only be separated by debug 2600// Get the defs that come just before this instruction. 2601auto HandleLocsForWedge = [&](
auto *WedgePosition) {
2602constauto *Locs = FnVarLocs.
getWedge(WedgePosition);
2607bool ChangedThisWedge =
false;
2608// The new pruned set of defs. 2611// Iterate over the existing defs. 2615 std::nullopt, Loc.DL.getInlinedAt());
2616auto VMI = VariableMap.
find(Key);
2618// Update the map if we found a new value/expression describing the 2619// variable, or if the variable wasn't mapped already. 2620if (VMI == VariableMap.
end() || VMI->second.first != Loc.Values ||
2621 VMI->second.second != Loc.Expr) {
2622 VariableMap[Key] = {Loc.Values, Loc.Expr};
2627// Did not insert this Loc, which is the same as removing it. 2628 ChangedThisWedge =
true;
2632// Replace the existing wedge with the pruned version. 2633if (ChangedThisWedge) {
2634 FnVarLocs.
setWedge(WedgePosition, std::move(NewDefs));
2641 HandleLocsForWedge(&DVR);
2642 HandleLocsForWedge(&
I);
2652// Do extra work to ensure that we remove semantically unimportant undefs. 2654// This is to work around the fact that SelectionDAG will hoist dbg.values 2655// using argument values to the top of the entry block. That can move arg 2656// dbg.values before undef and constant dbg.values which they previously 2657// followed. The easiest thing to do is to just try to feed SelectionDAG 2658// input it's happy with. 2660// Map of {Variable x: Fragments y} where the fragments y of variable x have 2661// have at least one non-undef location defined already. Don't use directly, 2662// instead call DefineBits and HasDefinedBits. 2665// Specify that V (a fragment of A) has a non-undef location. 2667 VarsWithDef[
A].
insert(V.getFragmentOrDefault());
2669// Return true if a non-undef location has been defined for V (a fragment of 2670// A). Doesn't imply that the location is currently non-undef, just that a 2671// non-undef location has been seen previously. 2673auto FragsIt = VarsWithDef.
find(
A);
2674if (FragsIt == VarsWithDef.
end())
2677 return DIExpression::fragmentsOverlap(Frag, V.getFragmentOrDefault());
2684// Scan over the entire block, not just over the instructions mapped by 2685// FnVarLocs, because wedges in FnVarLocs may only be separated by debug 2688// Get the defs that come just before this instruction. 2689auto HandleLocsForWedge = [&](
auto *WedgePosition) {
2690constauto *Locs = FnVarLocs.
getWedge(WedgePosition);
2695bool ChangedThisWedge =
false;
2696// The new pruned set of defs. 2699// Iterate over the existing defs. 2703 Loc.DL.getInlinedAt()};
2706// Remove undef entries that are encountered before any non-undef 2707// intrinsics from the entry block. 2708if (Loc.Values.isKillLocation(Loc.Expr) && !HasDefinedBits(Aggr, Var)) {
2709// Did not insert this Loc, which is the same as removing it. 2711 ChangedThisWedge =
true;
2715 DefineBits(Aggr, Var);
2719// Replace the existing wedge with the pruned version. 2720if (ChangedThisWedge) {
2721 FnVarLocs.
setWedge(WedgePosition, std::move(NewDefs));
2727 HandleLocsForWedge(&DVR);
2728 HandleLocsForWedge(&
I);
2736bool MadeChanges =
false;
2750for (
auto &BB : Fn) {
2752// Any variable linked to an instruction is considered 2753// interesting. Ideally we only need to check Allocas, however, a 2754// DIAssignID might get dropped from an alloca but not stores. In that 2755// case, we need to consider the variable interesting for NFC behaviour 2756// with this change. TODO: Consider only looking at allocas. 2758 Result.insert({DAI->getVariable(), DAI->getDebugLoc().getInlinedAt()});
2770// The analysis will generate location definitions for all variables, but we 2771// only need to perform a dataflow on the set of variables which have a stack 2772// slot. Find those now. 2777// Use a scope block to clean up AssignmentTrackingLowering before running 2778// MemLocFragmentFill to reduce peak memory consumption. 2780 AssignmentTrackingLowering
Pass(Fn, Layout, &VarsWithStackSlot);
2781 Changed =
Pass.run(FnVarLocs);
2785 MemLocFragmentFill
Pass(Fn, &VarsWithStackSlot,
2789// Remove redundant entries. As well as reducing memory consumption and 2790// avoiding waiting cycles later by burning some now, this has another 2791// important job. That is to work around some SelectionDAG quirks. See 2792// removeRedundantDbgLocsUsingForwardScan comments for more info on that. 2804auto &
DL =
F.getDataLayout();
2809// Save these results. 2831// Clear previous results. 2837// Save these results. 2838 Results->init(Builder);
2841 Results->print(
errs(),
F);
2843// Return false because this pass does not modify the function. 2853"Assignment Tracking Analysis",
false,
true)
for(const MachineOperand &MO :llvm::drop_begin(OldMI.operands(), Desc.getNumOperands()))
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
Function Alias Analysis Results
std::pair< const DILocalVariable *, const DILocation * > DebugAggregate
A whole (unfragmented) source variable.
VarLocInsertPt getNextNode(const DbgRecord *DVR)
static void analyzeFunction(Function &Fn, const DataLayout &Layout, FunctionVarLocsBuilder *FnVarLocs)
static std::pair< Value *, DIExpression * > walkToAllocaAndPrependOffsetDeref(const DataLayout &DL, Value *Start, DIExpression *Expression)
Walk backwards along constant GEPs and bitcasts to the base storage from Start as far as possible.
static DIAssignID * getIDFromMarker(const DbgAssignIntrinsic &DAI)
static DenseSet< DebugAggregate > findVarsWithStackSlot(Function &Fn)
static bool fullyContains(DIExpression::FragmentInfo A, DIExpression::FragmentInfo B)
Return true if A fully contains B.
static DebugAggregate getAggregate(const DbgVariableIntrinsic *DII)
static std::optional< at::AssignmentInfo > getUntaggedStoreAssignmentInfo(const Instruction &I, const DataLayout &Layout)
static AssignmentTrackingLowering::OverlapMap buildOverlapMapAndRecordDeclares(Function &Fn, FunctionVarLocsBuilder *FnVarLocs, const DenseSet< DebugAggregate > &VarsWithStackSlot, AssignmentTrackingLowering::UntaggedStoreAssignmentMap &UntaggedStoreVars, unsigned &TrackedVariablesVectorSize)
Build a map of {Variable x: Variables y} where all variable fragments contained within the variable f...
static bool removeUndefDbgLocsFromEntryBlock(const BasicBlock *BB, FunctionVarLocsBuilder &FnVarLocs)
static cl::opt< bool > PrintResults("print-debug-ata", cl::init(false), cl::Hidden)
Print the results of the analysis. Respects -filter-print-funcs.
const char * locStr(AssignmentTrackingLowering::LocKind Loc)
PointerUnion< const Instruction *, const DbgRecord * > VarLocInsertPt
static bool removeRedundantDbgLocsUsingForwardScan(const BasicBlock *BB, FunctionVarLocsBuilder &FnVarLocs)
Remove redundant location defs using a forward scan.
static bool removeRedundantDbgLocs(const BasicBlock *BB, FunctionVarLocsBuilder &FnVarLocs)
static cl::opt< bool > EnableMemLocFragFill("mem-loc-frag-fill", cl::init(true), cl::Hidden)
Option for debugging the pass, determines if the memory location fragment filling happens after gener...
static DIAssignID * getIDFromInst(const Instruction &I)
DbgAssignIntrinsic * CastToDbgAssign(DbgVariableIntrinsic *DVI)
static std::optional< int64_t > getDerefOffsetInBytes(const DIExpression *DIExpr)
Extract the offset used in DIExpr.
static bool removeRedundantDbgLocsUsingBackwardScan(const BasicBlock *BB, FunctionVarLocsBuilder &FnVarLocs)
Remove redundant definitions within sequences of consecutive location defs.
static bool hasZeroSizedFragment(T &DbgValue)
static cl::opt< cl::boolOrDefault > CoalesceAdjacentFragmentsOpt("debug-ata-coalesce-frags", cl::Hidden)
Coalesce adjacent dbg locs describing memory locations that have contiguous fragments.
static cl::opt< unsigned > MaxNumBlocks("debug-ata-max-blocks", cl::init(10000), cl::desc("Maximum num basic blocks before debug info dropped"), cl::Hidden)
static bool shouldCoalesceFragments(Function &F)
DbgDeclareInst * DynCastToDbgDeclare(DbgVariableIntrinsic *DVI)
This file implements the BitVector class.
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
Analysis containing CSE Info
static ManagedStatic< cl::opt< bool, true >, CreateDebug > Debug
This file defines DenseMapInfo traits for DenseMap.
This file contains constants used for implementing Dwarf debug support.
std::optional< std::vector< StOtherPiece > > Other
Module.h This file contains the declarations for the Module class.
This header defines various interfaces for pass management in LLVM.
This file implements a coalescing interval map for small objects.
uint64_t IntrinsicInst * II
FunctionAnalysisManager FAM
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
This file builds on the ADT/GraphTraits.h file to build a generic graph post order iterator.
static bool isValid(const char C)
Returns true if C is a valid mangled character: <0-9a-zA-Z_>.
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
Scalar Replacement Of Aggregates
This file contains some templates that are useful if you are working with the STL at all.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
Helper class to build FunctionVarLocs, since that class isn't easy to modify.
void setWedge(VarLocInsertPt Before, SmallVector< VarLocInfo > &&Wedge)
Replace the defs that come just before /p Before with /p Wedge.
const SmallVectorImpl< VarLocInfo > * getWedge(VarLocInsertPt Before) const
Return ptr to wedge of defs or nullptr if no defs come just before /p Before.
unsigned getNumVariables() const
void addSingleLocVar(DebugVariable Var, DIExpression *Expr, DebugLoc DL, RawLocationWrapper R)
Add a def for a variable that is valid for its lifetime.
VariableID insertVariable(DebugVariable V)
Find or insert V and return the ID.
void addVarLoc(VarLocInsertPt Before, DebugVariable Var, DIExpression *Expr, DebugLoc DL, RawLocationWrapper R)
Add a def to the wedge of defs just before /p Before.
const DebugVariable & getVariable(VariableID ID) const
Get a variable from its ID.
Class recording the (high level) value of a variable.
Class for arbitrary precision integers.
uint64_t getZExtValue() const
Get zero extended value.
bool getBoolValue() const
Convert APInt to a boolean value.
an instruction to allocate memory on the stack
A container for analyses that lazily runs them and caches their results.
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)
Get the result of an analysis pass for a given IR unit.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
ArrayRef< T > drop_front(size_t N=1) const
Drop the first N elements of the array.
AssignmentTrackingAnalysis()
bool runOnFunction(Function &F) override
runOnFunction - Virtual method overriden by subclasses to do the per-function processing of the pass.
LLVM Basic Block Representation.
iterator begin()
Instruction iterator methods.
bool isEntryBlock() const
Return true if this is the entry block of the containing function.
int find_first_unset_in(unsigned Begin, unsigned End) const
find_first_unset_in - Returns the index of the first unset bit in the range [Begin,...
iterator_range< const_set_bits_iterator > set_bits() const
A structured debug information entry.
static DIExpression * append(const DIExpression *Expr, ArrayRef< uint64_t > Ops)
Append the opcodes Ops to DIExpr.
unsigned getNumElements() const
bool startsWithDeref() const
Return whether the first element a DW_OP_deref.
static std::optional< FragmentInfo > getFragmentInfo(expr_op_iterator Start, expr_op_iterator End)
Retrieve the details of this fragment expression.
ArrayRef< uint64_t > getElements() const
static std::optional< DIExpression * > createFragmentExpression(const DIExpression *Expr, unsigned OffsetInBits, unsigned SizeInBits)
Create a DIExpression to describe one part of an aggregate variable that is fragmented across multipl...
static DIExpression * prepend(const DIExpression *Expr, uint8_t Flags, int64_t Offset=0)
Prepend DIExpr with a deref and offset operation and optionally turn it into a stack value or/and an ...
static DIExpression * prependOpcodes(const DIExpression *Expr, SmallVectorImpl< uint64_t > &Ops, bool StackValue=false, bool EntryValue=false)
Prepend DIExpr with the given opcodes and optionally turn it into a stack value.
std::optional< uint64_t > getSizeInBits() const
Determines the size of the variable's type.
StringRef getName() const
This class represents an Operation in the Expression.
A parsed version of the target data layout string in and methods for querying it.
This represents the llvm.dbg.assign instruction.
DIAssignID * getAssignID() const
This represents the llvm.dbg.declare instruction.
This is the common base class for debug info intrinsics.
Instruction * MarkedInstr
Link back to the Instruction that owns this marker.
iterator_range< simple_ilist< DbgRecord >::iterator > getDbgRecordRange()
Produce a range over all the DbgRecords in this Marker.
Base class for non-instruction debug metadata records that have positions within IR.
DebugLoc getDebugLoc() const
This is the common base class for debug info intrinsics for variables.
DILocalVariable * getVariable() const
Record of a variable value-assignment, aka a non instruction representation of the dbg....
DIAssignID * getAssignID() const
DIExpression * getExpression() const
DILocalVariable * getVariable() const
Metadata * getRawLocation() const
Returns the metadata operand for the first location description.
bool isDbgDeclare() const
Result run(Function &F, FunctionAnalysisManager &FAM)
PreservedAnalyses run(Function &F, FunctionAnalysisManager &FAM)
DILocation * getInlinedAt() const
Identifies a unique instance of a variable.
const DILocation * getInlinedAt() const
const DILocalVariable * getVariable() const
iterator find(const_arg_type_t< KeyT > Val)
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&...Args)
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
void init(unsigned InitNumEntries)
Implements a dense probed hash-table based set.
Class representing an expression and its matching format.
FunctionPass class - This class is used to implement most global optimizations.
Data structure describing the variable locations in a function.
void print(raw_ostream &OS, const Function &Fn) const
const VarLocInfo * locs_begin(const Instruction *Before) const
First variable location definition that comes before Before.
const VarLocInfo * single_locs_begin() const
const VarLocInfo * locs_end(const Instruction *Before) const
One past the last variable location definition that comes before Before.
const VarLocInfo * single_locs_end() const
One past the last single-location variable location definition.
void init(FunctionVarLocsBuilder &Builder)
const DataLayout & getDataLayout() const
Get the data layout of the module this function belongs to.
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
iterator_range< simple_ilist< DbgRecord >::iterator > getDbgRecordRange() const
Return a range over the DbgRecords attached to this instruction.
const DebugLoc & getDebugLoc() const
Return the debug location for this node as a DebugLoc.
bool hasDbgRecords() const
Returns true if any DbgRecords are attached to this instruction.
const_iterator begin() const
void insert(KeyT a, KeyT b, ValT y)
insert - Add a mapping of [a;b] to y, coalesce with adjacent intervals.
void clear()
clear - Remove all entries.
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
void push_back(MachineInstr *MI)
This class implements a map that also provides access to all stored values in a deterministic order.
Root of the metadata hierarchy.
Pass interface - Implemented by all 'passes'.
A discriminated union of two or more pointer types, with the discriminator in the low bit of the poin...
void * getOpaqueValue() const
static PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
Lightweight class that wraps the location operand metadata of a debug intrinsic.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
reference emplace_back(ArgTypes &&... Args)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
Target - Wrapper for Target specific information.
Triple - Helper class for working with autoconf configuration names.
static IntegerType * getInt1Ty(LLVMContext &C)
UniqueVector - This class produces a sequential ID number (base 1) for each unique entry that is adde...
unsigned insert(const T &Entry)
insert - Append entry to the vector if it doesn't already exist.
size_t size() const
size - Returns the number of entries in the vector.
iterator end()
Return an iterator to the end of the vector.
iterator begin()
Return an iterator to the start of the vector.
static ValueAsMetadata * get(Value *V)
LLVM Value Representation.
StringRef getName() const
Return a constant reference to the value's name.
std::pair< iterator, bool > insert(const ValueT &V)
bool contains(const_arg_type_t< ValueT > V) const
Check if the set contains the given element.
size_type count(const_arg_type_t< ValueT > V) const
Return 1 if the specified key is in the set, 0 otherwise.
self_iterator getIterator()
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
This class implements an extremely fast bulk output stream that can only output to a stream.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
constexpr std::underlying_type_t< E > Mask()
Get a bitmask with 1s in all places up to the high-order bit of E's largest value.
@ Tail
Attemps to make calls as fast as possible while guaranteeing that tail call optimization can always b...
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
void deleteAll(Function *F)
Remove all Assignment Tracking related intrinsics and metadata from F.
AssignmentMarkerRange getAssignmentMarkers(DIAssignID *ID)
Return a range of dbg.assign intrinsics which use \ID as an operand.
SmallVector< DbgVariableRecord * > getDVRAssignmentMarkers(const Instruction *Inst)
std::optional< AssignmentInfo > getAssignmentInfo(const DataLayout &DL, const MemIntrinsic *I)
bool calculateFragmentIntersect(const DataLayout &DL, const Value *Dest, uint64_t SliceOffsetInBits, uint64_t SliceSizeInBits, const DbgAssignIntrinsic *DbgAssign, std::optional< DIExpression::FragmentInfo > &Result)
Calculate the fragment of the variable in DAI covered from (Dest + SliceOffsetInBits) to to (Dest + S...
initializer< Ty > init(const Ty &Val)
@ DW_OP_LLVM_fragment
Only used in LLVM metadata.
PointerTypeMap run(const Module &M)
Compute the PointerTypeMap for the module M.
This is an optimization pass for GlobalISel generic memory operations.
void dump(const SparseBitVector< ElementSize > &LHS, raw_ostream &out)
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
auto successors(const MachineBasicBlock *BB)
bool operator!=(uint64_t V1, const APInt &V2)
bool operator==(const AddressRangeValuePair &LHS, const AddressRangeValuePair &RHS)
Printable print(const GCNRegPressure &RP, const GCNSubtarget *ST=nullptr)
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
auto reverse(ContainerTy &&C)
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
std::tuple< const DIScope *, const DIScope *, const DILocalVariable * > VarID
A unique key that represents a debug variable.
bool isFunctionInPrintList(StringRef FunctionName)
VariableID
Type wrapper for integer ID for Variables. 0 is reserved.
raw_fd_ostream & errs()
This returns a reference to a raw_ostream for standard error.
constexpr T divideCeil(U Numerator, V Denominator)
Returns the integer ceil(Numerator / Denominator).
bool isAssignmentTrackingEnabled(const Module &M)
Return true if assignment tracking is enabled for module M.
auto predecessors(const MachineBasicBlock *BB)
const char * toString(DWARFSectionKind Kind)
static auto filterDbgVars(iterator_range< simple_ilist< DbgRecord >::iterator > R)
Filter the DbgRecord range to DbgVariableRecord types only and downcast.
bool debuginfoShouldUseDebugInstrRef(const Triple &T)
Implement std::hash so that hash_code can be used in STL containers.
A special type used by analysis passes to provide an address that identifies that particular analysis...
uint64_t startInBits() const
Return the index of the first bit of the fragment.
uint64_t endInBits() const
Return the index of the bit after the end of the fragment, e.g.
static VariableID getTombstoneKey()
static bool isEqual(const VariableID &LHS, const VariableID &RHS)
static unsigned getHashValue(const VariableID &Val)
static VariableID getEmptyKey()
An information struct used to provide DenseMap with the various necessary components for a given valu...
Variable location definition used by FunctionVarLocs.
RawLocationWrapper Values
llvm::VariableID VariableID
result_type operator()(const argument_type &Arg) const