1//===- DependencyGraph.cpp ------------------------------------------===// 3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4// See https://llvm.org/LICENSE.txt for license information. 5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 7//===----------------------------------------------------------------------===// 18// If it's a DGNode then we dereference the operand iterator. 19if (!isa<MemDGNode>(N)) {
20assert(OpIt != OpItE &&
"Can't dereference end iterator!");
23// It's a MemDGNode, so we check if we return either the use-def operand, 24// or a mem predecessor. 27// It's a MemDGNode with OpIt == end, so we need to use MemIt. 28assert(MemIt != cast<MemDGNode>(N)->MemPreds.end() &&
29"Cant' dereference end iterator!");
34// If it's a DGNode then we increment the use-def iterator. 35if (!isa<MemDGNode>(N)) {
36assert(OpIt != OpItE &&
"Already at end!");
38// Skip operands that are not instructions. 42// It's a MemDGNode, so if we are not at the end of the use-def iterator we 43// need to first increment that. 46// Skip operands that are not instructions. 50// It's a MemDGNode with OpIt == end, so we need to increment MemIt. 51assert(MemIt != cast<MemDGNode>(N)->MemPreds.end() &&
"Already at end!");
57assert(DAG ==
Other.DAG &&
"Iterators of different DAGs!");
58assert(N ==
Other.N &&
"Iterators of different nodes!");
59return OpIt ==
Other.OpIt && MemIt ==
Other.MemIt;
65SB->eraseFromBundle(
this);
77staticconstexprconstunsigned Indent = 4;
78for (
auto *Pred : MemPreds)
79OS.indent(Indent) <<
"<-" << *Pred->getInstruction() <<
"\n";
89// Walk down the chain looking for a mem-dep candidate instruction. 94return cast<MemDGNode>(DAG.
getNode(
I));
102// Walk up the chain looking for a mem-dep candidate instruction. 107return cast<MemDGNode>(DAG.
getNode(
I));
114// If we couldn't find a mem node in range TopN - BotN then it's empty. 115if (TopMemN ==
nullptr)
118assert(BotMemN !=
nullptr &&
"TopMemN should be null too!");
119// Now that we have the mem-dep nodes, create and return the range. 123DependencyGraph::DependencyType
125// TODO: Perhaps compile-time improvement by skipping if neither is mem? 128return DependencyType::ReadAfterWrite;
130return DependencyType::WriteAfterWrite;
133return DependencyType::WriteAfterRead;
135if (isa<sandboxir::PHINode>(FromI) || isa<sandboxir::PHINode>(ToI))
136return DependencyType::Control;
138return DependencyType::Control;
141return DependencyType::Other;
142return DependencyType::None;
147if (
auto *LI = dyn_cast<LoadInst>(
I))
148return !LI->isUnordered();
149if (
auto *SI = dyn_cast<StoreInst>(
I))
150return !SI->isUnordered();
155bool Is = IsOrdered(
I);
157"An ordered instruction must be a MemDepCandidate!");
162 DependencyType DepType) {
163 std::optional<MemoryLocation> DstLocOpt =
169"Expected a mem instr");
170// TODO: Check AABudget 176case DependencyType::ReadAfterWrite:
177case DependencyType::WriteAfterWrite:
179case DependencyType::WriteAfterRead:
186bool DependencyGraph::hasDep(Instruction *SrcI, Instruction *DstI) {
187 DependencyType RoughDepType = getRoughDepType(SrcI, DstI);
188switch (RoughDepType) {
189case DependencyType::ReadAfterWrite:
190case DependencyType::WriteAfterWrite:
191case DependencyType::WriteAfterRead:
192return alias(SrcI, DstI, RoughDepType);
193case DependencyType::Control:
194// Adding actual dep edges from PHIs/to terminator would just create too 195// many edges, which would be bad for compile-time. 196// So we ignore them in the DAG formation but handle them in the 197// scheduler, while sorting the ready list. 199case DependencyType::Other:
201case DependencyType::None:
207void DependencyGraph::scanAndAddDeps(MemDGNode &DstN,
209assert(isa<MemDGNode>(DstN) &&
210"DstN is the mem dep destination, so it must be mem");
211 Instruction *DstI = DstN.getInstruction();
212// Walk up the instruction chain from ScanRange bottom to top, looking for 213// memory instrs that may alias. 214for (MemDGNode &SrcN :
reverse(SrcScanRange)) {
215 Instruction *SrcI = SrcN.getInstruction();
216if (hasDep(SrcI, DstI))
217 DstN.addMemPred(&SrcN);
221void DependencyGraph::setDefUseUnscheduledSuccs(
229// Set the intra-interval counters in NewInterval. 230for (Instruction &
I : NewInterval) {
231for (Value *
Op :
I.operands()) {
232auto *OpI = dyn_cast<Instruction>(
Op);
235// TODO: For now don't cross BBs. 236if (OpI->getParent() !=
I.getParent())
238if (!NewInterval.contains(OpI))
243 ++OpN->UnscheduledSuccs;
247// Now handle the cross-interval edges. 248bool NewIsAbove = DAGInterval.empty() || NewInterval.comesBefore(DAGInterval);
249constauto &TopInterval = NewIsAbove ? NewInterval : DAGInterval;
250constauto &BotInterval = NewIsAbove ? DAGInterval : NewInterval;
259// Walk over all instructions in "BotInterval" and update the counter 260// of operands that are in "TopInterval". 261for (Instruction &BotI : BotInterval) {
263// Skip scheduled nodes. 264if (BotN->scheduled())
266for (Value *
Op : BotI.operands()) {
267auto *OpI = dyn_cast<Instruction>(
Op);
273if (!TopInterval.contains(OpI))
275 ++OpN->UnscheduledSuccs;
281// Create Nodes only for the new sections of the DAG. 283MemDGNode *LastMemN = dyn_cast<MemDGNode>(LastN);
286// Build the Mem node chain. 287if (
auto *MemN = dyn_cast<MemDGNode>(
N)) {
288 MemN->setPrevNode(LastMemN);
292// Link new MemDGNode chain with the old one, if any. 293if (!DAGInterval.empty()) {
294bool NewIsAbove = NewInterval.comesBefore(DAGInterval);
295constauto &TopInterval = NewIsAbove ? NewInterval : DAGInterval;
296constauto &BotInterval = NewIsAbove ? DAGInterval : NewInterval;
301assert((LinkTopN ==
nullptr || LinkBotN ==
nullptr ||
302 LinkTopN->comesBefore(LinkBotN)) &&
304if (LinkTopN !=
nullptr && LinkBotN !=
nullptr) {
305 LinkTopN->setNextNode(LinkBotN);
308// TODO: Remove this once we've done enough testing. 309// Check that the chain is well formed. 310auto UnionIntvl = DAGInterval.getUnionInterval(NewInterval);
315if (ChainTopN !=
nullptr && ChainBotN !=
nullptr) {
316for (
auto *
N = ChainTopN->getNextNode(), *LastN = ChainTopN;
N !=
nullptr;
317 LastN =
N,
N =
N->getNextNode()) {
318assert(
N == LastN->getNextNode() &&
"Bad chain!");
319assert(
N->getPrevNode() == LastN &&
"Bad chain!");
325 setDefUseUnscheduledSuccs(NewInterval);
328MemDGNode *DependencyGraph::getMemDGNodeBefore(DGNode *
N,
bool IncludingN,
329 MemDGNode *SkipN)
const{
330auto *
I =
N->getInstruction();
331for (
auto *PrevI = IncludingN ?
I :
I->getPrevNode(); PrevI !=
nullptr;
332 PrevI = PrevI->getPrevNode()) {
336auto *PrevMemN = dyn_cast<MemDGNode>(PrevN);
337if (PrevMemN !=
nullptr && PrevMemN != SkipN)
343MemDGNode *DependencyGraph::getMemDGNodeAfter(DGNode *
N,
bool IncludingN,
344 MemDGNode *SkipN)
const{
345auto *
I =
N->getInstruction();
346for (
auto *NextI = IncludingN ?
I :
I->getNextNode(); NextI !=
nullptr;
347 NextI = NextI->getNextNode()) {
351auto *NextMemN = dyn_cast<MemDGNode>(NextN);
352if (NextMemN !=
nullptr && NextMemN != SkipN)
358void DependencyGraph::notifyCreateInstr(Instruction *
I) {
360// TODO: Update the dependencies for the new node. 362// Update the MemDGNode chain if this is a memory node. 364if (
auto *PrevMemN = getMemDGNodeBefore(MemN,
/*IncludingN=*/false)) {
365 PrevMemN->NextMemN = MemN;
366 MemN->PrevMemN = PrevMemN;
368if (
auto *NextMemN = getMemDGNodeAfter(MemN,
/*IncludingN=*/false)) {
369 NextMemN->PrevMemN = MemN;
370 MemN->NextMemN = NextMemN;
375void DependencyGraph::notifyMoveInstr(Instruction *
I,
const BBIterator &To) {
376// NOTE: This function runs before `I` moves to its new destination. 378assert(!(To != BB->end() && &*To ==
I->getNextNode()) &&
379 !(To == BB->end() && std::next(
I->getIterator()) == BB->end()) &&
380"Should not have been called if destination is same as origin.");
382// TODO: We can only handle fully internal movements within DAGInterval or at 383// the borders, i.e., right before the top or right after the bottom. 384assert(To.getNodeParent() ==
I->getParent() &&
385"TODO: We don't support movement across BBs!");
387 (To == std::next(DAGInterval.bottom()->getIterator()) ||
388 (To != BB->end() && std::next(To) == DAGInterval.top()->getIterator()) ||
389 (To != BB->end() && DAGInterval.contains(&*To))) &&
390"TODO: To should be either within the DAGInterval or right " 393// Make a copy of the DAGInterval before we update it. 394auto OrigDAGInterval = DAGInterval;
396// Maintain the DAGInterval. 397 DAGInterval.notifyMoveInstr(
I, To);
399// TODO: Perhaps check if this is legal by checking the dependencies? 401// Update the MemDGNode chain to reflect the instr movement if necessary. 409// First safely detach it from the existing chain. 410 MemN->detachFromChain();
412// Now insert it back into the chain at the new location. 414// We won't always have a DGNode to insert before it. If `To` is BB->end() or 415// if it points to an instr after DAGInterval.bottom() then we will have to 416// find a node to insert *after*. 420// I2 I2 | DAGInteval [I1 to I3] 422// I4 I4 <- `To` == right after DAGInterval 423// <- `To` == BB->end() 425if (To == BB->end() ||
426 To == std::next(OrigDAGInterval.bottom()->getIterator())) {
427// If we don't have a node to insert before, find a node to insert after and 431 getMemDGNodeBefore(InsertAfterN,
/*IncludingN=*/true,
/*SkipN=*/MemN));
433// We have a node to insert before, so update the chain. 436 getMemDGNodeBefore(BeforeToN,
/*IncludingN=*/false,
/*SkipN=*/MemN));
438 getMemDGNodeAfter(BeforeToN,
/*IncludingN=*/true,
/*SkipN=*/MemN));
442void DependencyGraph::notifyEraseInstr(Instruction *
I) {
443// Update the MemDGNode chain if this is a memory node. 445auto *PrevMemN = getMemDGNodeBefore(MemN,
/*IncludingN=*/false);
446auto *NextMemN = getMemDGNodeAfter(MemN,
/*IncludingN=*/false);
447if (PrevMemN !=
nullptr)
448 PrevMemN->NextMemN = NextMemN;
449if (NextMemN !=
nullptr)
450 NextMemN->PrevMemN = PrevMemN;
453 InstrToNodeMap.erase(
I);
455// TODO: Update the dependencies. 464auto NewInterval = Union.getSingleDiff(DAGInterval);
465if (NewInterval.empty())
468 createNewNodes(NewInterval);
470// Create the dependencies. 472// 1. This is a new DAG, DAGInterval is empty. Fully scan the whole interval. 476// |New| v | | DstRange 480// We are scanning for deps with destination in NewInterval and sources in 481// NewInterval until DstN, for each DstN. 484if (!DstRange.empty()) {
487 scanAndAddDeps(DstN, SrcRange);
491if (DAGInterval.empty()) {
492assert(NewInterval == InstrsInterval &&
"Expected empty DAGInterval!");
493 FullScan(NewInterval);
495// 2. The new section is below the old section. 503// |New| v | | DstRange 507// We are scanning for deps with destination in NewInterval because the deps 508// in DAGInterval have already been computed. We consider sources in the whole 509// range including both NewInterval and DAGInterval until DstN, for each DstN. 510elseif (DAGInterval.bottom()->comesBefore(NewInterval.top())) {
513 DAGInterval.getUnionInterval(NewInterval), *
this);
517 scanAndAddDeps(DstN, SrcRange);
520// 3. The new section is above the old section. 521elseif (NewInterval.bottom()->comesBefore(DAGInterval.top())) {
524// |New| | | SrcRange | DstRange 533// When scanning for deps with destination in NewInterval we need to fully 534// scan the interval. This is the same as the scanning for a new DAG. 535 FullScan(NewInterval);
539// |New| SrcN | SrcRange 548// When scanning for deps with destination in DAGInterval we need to 549// consider sources from the NewInterval only, because all intra-DAGInterval 550// dependencies have already been created. 554 scanAndAddDeps(DstN, SrcRange);
565// InstrToNodeMap is unordered so we need to create an ordered vector. 567 Nodes.
reserve(InstrToNodeMap.size());
568for (
constauto &Pair : InstrToNodeMap)
570// Sort them based on which one comes first in the BB. 575N->print(
OS,
/*PrintDeps=*/true);
584}
// namespace llvm::sandboxir std::pair< uint64_t, uint64_t > Interval
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
bool empty() const
empty - Check if the array is empty.
bool mayWriteToMemory() const LLVM_READONLY
Return true if this instruction may modify memory.
bool mayReadFromMemory() const LLVM_READONLY
Return true if this instruction may read memory.
void reserve(size_type N)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
This class implements an extremely fast bulk output stream that can only output to a stream.
A DependencyGraph Node that points to an Instruction and contains memory dependency edges.
virtual void print(raw_ostream &OS, bool PrintDeps=true) const
static bool isMemDepCandidate(Instruction *I)
We consider I as a Memory Dependency Candidate instruction if it reads/write memory or if it has side...
unsigned UnscheduledSuccs
The number of unscheduled successors.
SchedBundle * SB
The scheduler bundle that this node belongs to.
bool Scheduled
This is true if this node has been scheduled.
static bool isMemDepNodeCandidate(Instruction *I)
\Returns true if I is a memory dependency candidate instruction.
static bool isFenceLike(Instruction *I)
\Returns true if I is fence like. It excludes non-mem intrinsics.
LLVM_DUMP_METHOD void dump() const
Instruction * getInstruction() const
static bool isStackSaveOrRestoreIntrinsic(Instruction *I)
LLVM_DUMP_METHOD void dump() const
DGNode * getNode(Instruction *I) const
DGNode * getNodeOrNull(Instruction *I) const
Like getNode() but returns nullptr if I is nullptr.
void print(raw_ostream &OS) const
DGNode * getOrCreateNode(Instruction *I)
Interval< Instruction > extend(ArrayRef< Instruction * > Instrs)
Build/extend the dependency graph such that it includes Instrs.
A sandboxir::User with operands, opcode and linked with previous/next instructions in an instruction ...
bool mayWriteToMemory() const
bool mayReadFromMemory() const
bool isTerminator() const
static MemDGNode * getBotMemDGNode(const Interval< Instruction > &Intvl, const DependencyGraph &DAG)
Scans the instruction chain in Intvl bottom-up, returning the bottom-most MemDGNode,...
static MemDGNode * getTopMemDGNode(const Interval< Instruction > &Intvl, const DependencyGraph &DAG)
Scans the instruction chain in Intvl top-down, returning the top-most MemDGNode, or nullptr.
static Interval< MemDGNode > make(const Interval< Instruction > &Instrs, DependencyGraph &DAG)
Given Instrs it finds their closest mem nodes in the interval and returns the corresponding mem range...
A DependencyGraph Node for instructions that may read/write memory, or have some ordering constraints...
virtual void print(raw_ostream &OS, bool PrintDeps=true) const override
Iterate over both def-use and mem dependencies.
PredIterator & operator++()
bool operator==(const PredIterator &Other) const
static ModRefInfo aliasAnalysisGetModRefInfo(BatchAAResults &BatchAA, const Instruction *I, const std::optional< MemoryLocation > &OptLoc)
Equivalent to BatchAA::getModRefInfo().
static std::optional< llvm::MemoryLocation > memoryLocationGetOrNone(const Instruction *I)
Equivalent to MemoryLocation::getOrNone(I).
A SandboxIR Value has users. This is the base class.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
@ BasicBlock
Various leaf nodes.
static bool isOrdered(Instruction *I)
static User::op_iterator skipNonInstr(User::op_iterator OpIt, User::op_iterator OpItE)
While OpIt points to a Value that is not an Instruction keep incrementing it.
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
auto reverse(ContainerTy &&C)
bool isModSet(const ModRefInfo MRI)
void sort(IteratorTy Start, IteratorTy End)
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
ModRefInfo
Flags indicating whether a memory access modifies or references memory.
@ ModRef
The access may reference and may modify the value stored in memory.
DWARFExpression::Operation Op
bool isRefSet(const ModRefInfo MRI)