1//===-- WebAssemblyCFGSort.cpp - CFG Sorting ------------------------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8///
9/// \file
10/// This file implements a CFG sorting pass.
11///
12/// This pass reorders the blocks in a function to put them into topological
13/// order, ignoring loop backedges, and without any loop or exception being
14/// interrupted by a block not dominated by the its header, with special care
15/// to keep the order as similar as possible to the original order.
16///
17////===----------------------------------------------------------------------===//
18
19#include "MCTargetDesc/WebAssemblyMCTargetDesc.h"
20#include "WebAssembly.h"
21#include "WebAssemblyExceptionInfo.h"
22#include "WebAssemblySubtarget.h"
23#include "WebAssemblyUtilities.h"
24#include "llvm/ADT/PriorityQueue.h"
25#include "llvm/ADT/SetVector.h"
26#include "llvm/CodeGen/MachineDominators.h"
27#include "llvm/CodeGen/MachineFunction.h"
28#include "llvm/CodeGen/MachineLoopInfo.h"
29#include "llvm/CodeGen/MachineRegisterInfo.h"
30#include "llvm/CodeGen/Passes.h"
31#include "llvm/Support/Debug.h"
32#include "llvm/Support/raw_ostream.h"
33using namespace llvm;
34
35#define DEBUG_TYPE "wasm-cfg-sort"
36
37// Option to disable EH pad first sorting. Only for testing unwind destination
38// mismatches in CFGStackify.
39static cl::opt<bool> WasmDisableEHPadSort(
40    "wasm-disable-ehpad-sort", cl::ReallyHidden,
41    cl::desc(
42        "WebAssembly: Disable EH pad-first sort order. Testing purpose only."),
43    cl::init(false));
44
45namespace {
46
47// Wrapper for loops and exceptions
48class Region {
49public:
50  virtual ~Region() = default;
51  virtual MachineBasicBlock *getHeader() const = 0;
52  virtual bool contains(const MachineBasicBlock *MBB) const = 0;
53  virtual unsigned getNumBlocks() const = 0;
54  using block_iterator = typename ArrayRef<MachineBasicBlock *>::const_iterator;
55  virtual iterator_range<block_iterator> blocks() const = 0;
56  virtual bool isLoop() const = 0;
57};
58
59template <typename T> class ConcreteRegion : public Region {
60  const T *Region;
61
62public:
63  ConcreteRegion(const T *Region) : Region(Region) {}
64  MachineBasicBlock *getHeader() const override { return Region->getHeader(); }
65  bool contains(const MachineBasicBlock *MBB) const override {
66    return Region->contains(MBB);
67  }
68  unsigned getNumBlocks() const override { return Region->getNumBlocks(); }
69  iterator_range<block_iterator> blocks() const override {
70    return Region->blocks();
71  }
72  bool isLoop() const override { return false; }
73};
74
75template <> bool ConcreteRegion<MachineLoop>::isLoop() const { return true; }
76
77// This class has information of nested Regions; this is analogous to what
78// LoopInfo is for loops.
79class RegionInfo {
80  const MachineLoopInfo &MLI;
81  const WebAssemblyExceptionInfo &WEI;
82  std::vector<const Region *> Regions;
83  DenseMap<const MachineLoop *, std::unique_ptr<Region>> LoopMap;
84  DenseMap<const WebAssemblyException *, std::unique_ptr<Region>> ExceptionMap;
85
86public:
87  RegionInfo(const MachineLoopInfo &MLI, const WebAssemblyExceptionInfo &WEI)
88      : MLI(MLI), WEI(WEI) {}
89
90  // Returns a smallest loop or exception that contains MBB
91  const Region *getRegionFor(const MachineBasicBlock *MBB) {
92    const auto *ML = MLI.getLoopFor(MBB);
93    const auto *WE = WEI.getExceptionFor(MBB);
94    if (!ML && !WE)
95      return nullptr;
96    if ((ML && !WE) || (ML && WE && ML->getNumBlocks() < WE->getNumBlocks())) {
97      // If the smallest region containing MBB is a loop
98      if (LoopMap.count(ML))
99        return LoopMap[ML].get();
100      LoopMap[ML] = std::make_unique<ConcreteRegion<MachineLoop>>(ML);
101      return LoopMap[ML].get();
102    } else {
103      // If the smallest region containing MBB is an exception
104      if (ExceptionMap.count(WE))
105        return ExceptionMap[WE].get();
106      ExceptionMap[WE] =
107          std::make_unique<ConcreteRegion<WebAssemblyException>>(WE);
108      return ExceptionMap[WE].get();
109    }
110  }
111};
112
113class WebAssemblyCFGSort final : public MachineFunctionPass {
114  StringRef getPassName() const override { return "WebAssembly CFG Sort"; }
115
116  void getAnalysisUsage(AnalysisUsage &AU) const override {
117    AU.setPreservesCFG();
118    AU.addRequired<MachineDominatorTree>();
119    AU.addPreserved<MachineDominatorTree>();
120    AU.addRequired<MachineLoopInfo>();
121    AU.addPreserved<MachineLoopInfo>();
122    AU.addRequired<WebAssemblyExceptionInfo>();
123    AU.addPreserved<WebAssemblyExceptionInfo>();
124    MachineFunctionPass::getAnalysisUsage(AU);
125  }
126
127  bool runOnMachineFunction(MachineFunction &MF) override;
128
129public:
130  static char ID; // Pass identification, replacement for typeid
131  WebAssemblyCFGSort() : MachineFunctionPass(ID) {}
132};
133} // end anonymous namespace
134
135char WebAssemblyCFGSort::ID = 0;
136INITIALIZE_PASS(WebAssemblyCFGSort, DEBUG_TYPE,
137                "Reorders blocks in topological order", false, false)
138
139FunctionPass *llvm::createWebAssemblyCFGSort() {
140  return new WebAssemblyCFGSort();
141}
142
143static void maybeUpdateTerminator(MachineBasicBlock *MBB) {
144#ifndef NDEBUG
145  bool AnyBarrier = false;
146#endif
147  bool AllAnalyzable = true;
148  for (const MachineInstr &Term : MBB->terminators()) {
149#ifndef NDEBUG
150    AnyBarrier |= Term.isBarrier();
151#endif
152    AllAnalyzable &= Term.isBranch() && !Term.isIndirectBranch();
153  }
154  assert((AnyBarrier || AllAnalyzable) &&
155         "AnalyzeBranch needs to analyze any block with a fallthrough");
156  if (AllAnalyzable)
157    MBB->updateTerminator();
158}
159
160namespace {
161// EH pads are selected first regardless of the block comparison order.
162// When only one of the BBs is an EH pad, we give a higher priority to it, to
163// prevent common mismatches between possibly throwing calls and ehpads they
164// unwind to, as in the example below:
165//
166// bb0:
167//   call @foo      // If this throws, unwind to bb2
168// bb1:
169//   call @bar      // If this throws, unwind to bb3
170// bb2 (ehpad):
171//   handler_bb2
172// bb3 (ehpad):
173//   handler_bb3
174// continuing code
175//
176// Because this pass tries to preserve the original BB order, this order will
177// not change. But this will result in this try-catch structure in CFGStackify,
178// resulting in a mismatch:
179// try
180//   try
181//     call @foo
182//     call @bar    // This should unwind to bb3, not bb2!
183//   catch
184//     handler_bb2
185//   end
186// catch
187//   handler_bb3
188// end
189// continuing code
190//
191// If we give a higher priority to an EH pad whenever it is ready in this
192// example, when both bb1 and bb2 are ready, we would pick up bb2 first.
193
194/// Sort blocks by their number.
195struct CompareBlockNumbers {
196  bool operator()(const MachineBasicBlock *A,
197                  const MachineBasicBlock *B) const {
198    if (!WasmDisableEHPadSort) {
199      if (A->isEHPad() && !B->isEHPad())
200        return false;
201      if (!A->isEHPad() && B->isEHPad())
202        return true;
203    }
204
205    return A->getNumber() > B->getNumber();
206  }
207};
208/// Sort blocks by their number in the opposite order..
209struct CompareBlockNumbersBackwards {
210  bool operator()(const MachineBasicBlock *A,
211                  const MachineBasicBlock *B) const {
212    if (!WasmDisableEHPadSort) {
213      if (A->isEHPad() && !B->isEHPad())
214        return false;
215      if (!A->isEHPad() && B->isEHPad())
216        return true;
217    }
218
219    return A->getNumber() < B->getNumber();
220  }
221};
222/// Bookkeeping for a region to help ensure that we don't mix blocks not
223/// dominated by the its header among its blocks.
224struct Entry {
225  const Region *TheRegion;
226  unsigned NumBlocksLeft;
227
228  /// List of blocks not dominated by Loop's header that are deferred until
229  /// after all of Loop's blocks have been seen.
230  std::vector<MachineBasicBlock *> Deferred;
231
232  explicit Entry(const class Region *R)
233      : TheRegion(R), NumBlocksLeft(R->getNumBlocks()) {}
234};
235} // end anonymous namespace
236
237/// Sort the blocks, taking special care to make sure that regions are not
238/// interrupted by blocks not dominated by their header.
239/// TODO: There are many opportunities for improving the heuristics here.
240/// Explore them.
241static void sortBlocks(MachineFunction &MF, const MachineLoopInfo &MLI,
242                       const WebAssemblyExceptionInfo &WEI,
243                       const MachineDominatorTree &MDT) {
244  // Prepare for a topological sort: Record the number of predecessors each
245  // block has, ignoring loop backedges.
246  MF.RenumberBlocks();
247  SmallVector<unsigned, 16> NumPredsLeft(MF.getNumBlockIDs(), 0);
248  for (MachineBasicBlock &MBB : MF) {
249    unsigned N = MBB.pred_size();
250    if (MachineLoop *L = MLI.getLoopFor(&MBB))
251      if (L->getHeader() == &MBB)
252        for (const MachineBasicBlock *Pred : MBB.predecessors())
253          if (L->contains(Pred))
254            --N;
255    NumPredsLeft[MBB.getNumber()] = N;
256  }
257
258  // Topological sort the CFG, with additional constraints:
259  //  - Between a region header and the last block in the region, there can be
260  //    no blocks not dominated by its header.
261  //  - It's desirable to preserve the original block order when possible.
262  // We use two ready lists; Preferred and Ready. Preferred has recently
263  // processed successors, to help preserve block sequences from the original
264  // order. Ready has the remaining ready blocks. EH blocks are picked first
265  // from both queues.
266  PriorityQueue<MachineBasicBlock *, std::vector<MachineBasicBlock *>,
267                CompareBlockNumbers>
268      Preferred;
269  PriorityQueue<MachineBasicBlock *, std::vector<MachineBasicBlock *>,
270                CompareBlockNumbersBackwards>
271      Ready;
272
273  RegionInfo RI(MLI, WEI);
274  SmallVector<Entry, 4> Entries;
275  for (MachineBasicBlock *MBB = &MF.front();;) {
276    const Region *R = RI.getRegionFor(MBB);
277    if (R) {
278      // If MBB is a region header, add it to the active region list. We can't
279      // put any blocks that it doesn't dominate until we see the end of the
280      // region.
281      if (R->getHeader() == MBB)
282        Entries.push_back(Entry(R));
283      // For each active region the block is in, decrement the count. If MBB is
284      // the last block in an active region, take it off the list and pick up
285      // any blocks deferred because the header didn't dominate them.
286      for (Entry &E : Entries)
287        if (E.TheRegion->contains(MBB) && --E.NumBlocksLeft == 0)
288          for (auto DeferredBlock : E.Deferred)
289            Ready.push(DeferredBlock);
290      while (!Entries.empty() && Entries.back().NumBlocksLeft == 0)
291        Entries.pop_back();
292    }
293    // The main topological sort logic.
294    for (MachineBasicBlock *Succ : MBB->successors()) {
295      // Ignore backedges.
296      if (MachineLoop *SuccL = MLI.getLoopFor(Succ))
297        if (SuccL->getHeader() == Succ && SuccL->contains(MBB))
298          continue;
299      // Decrement the predecessor count. If it's now zero, it's ready.
300      if (--NumPredsLeft[Succ->getNumber()] == 0)
301        Preferred.push(Succ);
302    }
303    // Determine the block to follow MBB. First try to find a preferred block,
304    // to preserve the original block order when possible.
305    MachineBasicBlock *Next = nullptr;
306    while (!Preferred.empty()) {
307      Next = Preferred.top();
308      Preferred.pop();
309      // If X isn't dominated by the top active region header, defer it until
310      // that region is done.
311      if (!Entries.empty() &&
312          !MDT.dominates(Entries.back().TheRegion->getHeader(), Next)) {
313        Entries.back().Deferred.push_back(Next);
314        Next = nullptr;
315        continue;
316      }
317      // If Next was originally ordered before MBB, and it isn't because it was
318      // loop-rotated above the header, it's not preferred.
319      if (Next->getNumber() < MBB->getNumber() &&
320          (WasmDisableEHPadSort || !Next->isEHPad()) &&
321          (!R || !R->contains(Next) ||
322           R->getHeader()->getNumber() < Next->getNumber())) {
323        Ready.push(Next);
324        Next = nullptr;
325        continue;
326      }
327      break;
328    }
329    // If we didn't find a suitable block in the Preferred list, check the
330    // general Ready list.
331    if (!Next) {
332      // If there are no more blocks to process, we're done.
333      if (Ready.empty()) {
334        maybeUpdateTerminator(MBB);
335        break;
336      }
337      for (;;) {
338        Next = Ready.top();
339        Ready.pop();
340        // If Next isn't dominated by the top active region header, defer it
341        // until that region is done.
342        if (!Entries.empty() &&
343            !MDT.dominates(Entries.back().TheRegion->getHeader(), Next)) {
344          Entries.back().Deferred.push_back(Next);
345          continue;
346        }
347        break;
348      }
349    }
350    // Move the next block into place and iterate.
351    Next->moveAfter(MBB);
352    maybeUpdateTerminator(MBB);
353    MBB = Next;
354  }
355  assert(Entries.empty() && "Active sort region list not finished");
356  MF.RenumberBlocks();
357
358#ifndef NDEBUG
359  SmallSetVector<const Region *, 8> OnStack;
360
361  // Insert a sentinel representing the degenerate loop that starts at the
362  // function entry block and includes the entire function as a "loop" that
363  // executes once.
364  OnStack.insert(nullptr);
365
366  for (auto &MBB : MF) {
367    assert(MBB.getNumber() >= 0 && "Renumbered blocks should be non-negative.");
368    const Region *Region = RI.getRegionFor(&MBB);
369
370    if (Region && &MBB == Region->getHeader()) {
371      if (Region->isLoop()) {
372        // Loop header. The loop predecessor should be sorted above, and the
373        // other predecessors should be backedges below.
374        for (auto Pred : MBB.predecessors())
375          assert(
376              (Pred->getNumber() < MBB.getNumber() || Region->contains(Pred)) &&
377              "Loop header predecessors must be loop predecessors or "
378              "backedges");
379      } else {
380        // Not a loop header. All predecessors should be sorted above.
381        for (auto Pred : MBB.predecessors())
382          assert(Pred->getNumber() < MBB.getNumber() &&
383                 "Non-loop-header predecessors should be topologically sorted");
384      }
385      assert(OnStack.insert(Region) &&
386             "Regions should be declared at most once.");
387
388    } else {
389      // Not a loop header. All predecessors should be sorted above.
390      for (auto Pred : MBB.predecessors())
391        assert(Pred->getNumber() < MBB.getNumber() &&
392               "Non-loop-header predecessors should be topologically sorted");
393      assert(OnStack.count(RI.getRegionFor(&MBB)) &&
394             "Blocks must be nested in their regions");
395    }
396    while (OnStack.size() > 1 && &MBB == WebAssembly::getBottom(OnStack.back()))
397      OnStack.pop_back();
398  }
399  assert(OnStack.pop_back_val() == nullptr &&
400         "The function entry block shouldn't actually be a region header");
401  assert(OnStack.empty() &&
402         "Control flow stack pushes and pops should be balanced.");
403#endif
404}
405
406bool WebAssemblyCFGSort::runOnMachineFunction(MachineFunction &MF) {
407  LLVM_DEBUG(dbgs() << "********** CFG Sorting **********\n"
408                       "********** Function: "
409                    << MF.getName() << '\n');
410
411  const auto &MLI = getAnalysis<MachineLoopInfo>();
412  const auto &WEI = getAnalysis<WebAssemblyExceptionInfo>();
413  auto &MDT = getAnalysis<MachineDominatorTree>();
414  // Liveness is not tracked for VALUE_STACK physreg.
415  MF.getRegInfo().invalidateLiveness();
416
417  // Sort the blocks, with contiguous sort regions.
418  sortBlocks(MF, MLI, WEI, MDT);
419
420  return true;
421}
422