• Home
  • History
  • Annotate
  • Raw
  • Download
  • only in /freebsd-13-stable/contrib/llvm-project/llvm/lib/Analysis/

Lines Matching refs:MemoryAccess

109     if (MemoryAccess *MA = MSSA->getMemoryAccess(BB))
115 if (MemoryAccess *MA = MSSA->getMemoryAccess(I))
344 // The MemoryAccess we actually got called with, used to test local domination
345 const MemoryAccess *OriginalAccess = nullptr;
351 UpwardsMemoryQuery(const Instruction *Inst, const MemoryAccess *Access)
390 /// \param Start The MemoryAccess that we want to walk from.
400 checkClobberSanity(const MemoryAccess *Start, MemoryAccess *ClobberAt,
469 upward_defs_begin({const_cast<MemoryAccess *>(MA), MAP.second},
503 MemoryAccess *First;
504 MemoryAccess *Last;
507 DefPath(const MemoryLocation &Loc, MemoryAccess *First, MemoryAccess *Last,
511 DefPath(const MemoryLocation &Loc, MemoryAccess *Init,
527 const MemoryAccess *getWalkTarget(const MemoryPhi *From) const {
531 MemoryAccess *Result = MSSA.getLiveOnEntryDef();
545 MemoryAccess *Result;
556 walkToPhiOrClobber(DefPath &Desc, const MemoryAccess *StopAt = nullptr,
557 const MemoryAccess *SkipStopAt = nullptr) const {
570 for (MemoryAccess *Current : def_chain(Desc.Last)) {
611 MemoryAccess *Clobber;
624 getBlockingAccess(const MemoryAccess *StopWhere,
657 const MemoryAccess *SkipStopWhere = nullptr;
764 /// A path is a series of {MemoryAccess, MemoryLocation} pairs. A path
765 /// terminates when a MemoryAccess that clobbers said MemoryLocation is found.
766 OptznResult tryOptimizePhi(MemoryPhi *Phi, MemoryAccess *Start,
861 MemoryAccess *DefChainEnd = nullptr;
876 for (auto *MA : def_chain(const_cast<MemoryAccess *>(Target)))
933 MemoryAccess *findClobber(MemoryAccess *Start, UpwardsMemoryQuery &Q,
941 MemoryAccess *Current = Start;
951 MemoryAccess *Result;
974 MemoryAccess *IncomingVal;
977 MemoryAccess *M)
999 MemoryAccess *getClobberingMemoryAccessBase(MemoryAccess *,
1008 MemoryAccess *getClobberingMemoryAccessBase(MemoryAccess *, unsigned &, bool);
1025 MemoryAccess *getClobberingMemoryAccess(MemoryAccess *MA, unsigned &UWL) {
1028 MemoryAccess *getClobberingMemoryAccess(MemoryAccess *MA,
1034 MemoryAccess *getClobberingMemoryAccess(MemoryAccess *MA) override {
1038 MemoryAccess *getClobberingMemoryAccess(MemoryAccess *MA,
1044 void invalidateInfo(MemoryAccess *MA) override {
1061 MemoryAccess *getClobberingMemoryAccess(MemoryAccess *MA, unsigned &UWL) {
1064 MemoryAccess *getClobberingMemoryAccess(MemoryAccess *MA,
1070 MemoryAccess *getClobberingMemoryAccess(MemoryAccess *MA) override {
1074 MemoryAccess *getClobberingMemoryAccess(MemoryAccess *MA,
1080 void invalidateInfo(MemoryAccess *MA) override {
1088 void MemorySSA::renameSuccessorPhis(BasicBlock *BB, MemoryAccess *IncomingVal,
1115 MemoryAccess *MemorySSA::renameBlock(BasicBlock *BB, MemoryAccess *IncomingVal,
1121 for (MemoryAccess &L : *Accesses) {
1139 void MemorySSA::renamePass(DomTreeNode *Root, MemoryAccess *IncomingVal,
1187 /// unreachable blocks, and marking all other unreachable MemoryAccess's as
1247 for (MemoryAccess &MA : *Pair.second)
1304 SmallVectorImpl<MemoryAccess *> &,
1329 SmallVectorImpl<MemoryAccess *> &VersionStack,
1351 for (MemoryAccess &MA : *Accesses) {
1430 MemoryAccess *Result =
1482 SmallVector<MemoryAccess *, 16> VersionStack;
1596 void MemorySSA::insertIntoListsForBlock(MemoryAccess *NewAccess,
1609 *Accesses, [](const MemoryAccess &MA) { return isa<MemoryPhi>(MA); });
1614 *Defs, [](const MemoryAccess &MA) { return isa<MemoryPhi>(MA); });
1628 void MemorySSA::insertIntoListsBefore(MemoryAccess *What, const BasicBlock *BB,
1656 void MemorySSA::prepareForMoveTo(MemoryAccess *What, BasicBlock *BB) {
1678 void MemorySSA::moveTo(MemoryAccess *What, BasicBlock *BB,
1704 MemoryAccess *Definition,
1793 bool MemorySSA::dominatesUse(const MemoryAccess *Replacer,
1794 const MemoryAccess *Replacee) const {
1810 void MemorySSA::removeFromLookups(MemoryAccess *MA) {
1837 void MemorySSA::removeFromLists(MemoryAccess *MA, bool ShouldDelete) {
1944 for (const MemoryAccess &MA : *Accesses) {
1947 "MemoryAccess has no domination number in a valid block!");
1972 SmallVector<MemoryAccess *, 32> ActualAccesses;
1973 SmallVector<MemoryAccess *, 32> ActualDefs;
2008 if (MemoryAccess *MD = dyn_cast<MemoryDef>(MA)) {
2059 void MemorySSA::verifyUseInDefs(MemoryAccess *Def, MemoryAccess *Use) const {
2090 bool MemorySSA::locallyDominates(const MemoryAccess *Dominator,
2091 const MemoryAccess *Dominatee) const {
2121 bool MemorySSA::dominates(const MemoryAccess *Dominator,
2122 const MemoryAccess *Dominatee) const {
2134 bool MemorySSA::dominates(const MemoryAccess *Dominator,
2142 return locallyDominates(Dominator, cast<MemoryAccess>(Dominatee));
2145 return dominates(Dominator, cast<MemoryAccess>(Dominatee.getUser()));
2150 void MemoryAccess::print(raw_ostream &OS) const {
2160 MemoryAccess *UO = getDefiningAccess();
2162 auto printID = [&OS](MemoryAccess *A) {
2187 MemoryAccess *MA = cast<MemoryAccess>(Op);
2209 MemoryAccess *UO = getDefiningAccess();
2221 void MemoryAccess::dump() const {
2314 /// the MemoryAccess that actually clobbers Loc.
2318 MemoryAccess *
2320 MemoryAccess *StartingAccess, const MemoryLocation &Loc,
2345 MemoryAccess *DefiningAccess = isa<MemoryUse>(StartingUseOrDef)
2349 MemoryAccess *Clobber =
2359 MemoryAccess *
2361 MemoryAccess *MA, unsigned &UpwardWalkLimit, bool SkipSelf) {
2388 MemoryAccess *LiveOnEntry = MSSA->getLiveOnEntryDef();
2394 MemoryAccess *OptimizedAccess;
2397 MemoryAccess *DefiningAccess = StartingAccess->getDefiningAccess();
2421 MemoryAccess *Result;
2436 MemoryAccess *
2437 DoNothingMemorySSAWalker::getClobberingMemoryAccess(MemoryAccess *MA) {
2443 MemoryAccess *DoNothingMemorySSAWalker::getClobberingMemoryAccess(
2444 MemoryAccess *StartingAccess, const MemoryLocation &) {