1//===- ConcatOutputSection.cpp --------------------------------------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8
9#include "ConcatOutputSection.h"
10#include "Config.h"
11#include "OutputSegment.h"
12#include "SymbolTable.h"
13#include "Symbols.h"
14#include "SyntheticSections.h"
15#include "Target.h"
16#include "lld/Common/CommonLinkerContext.h"
17#include "llvm/BinaryFormat/MachO.h"
18#include "llvm/Support/ScopedPrinter.h"
19#include "llvm/Support/TimeProfiler.h"
20
21using namespace llvm;
22using namespace llvm::MachO;
23using namespace lld;
24using namespace lld::macho;
25
26MapVector<NamePair, ConcatOutputSection *> macho::concatOutputSections;
27
28void ConcatOutputSection::addInput(ConcatInputSection *input) {
29  assert(input->parent == this);
30  if (inputs.empty()) {
31    align = input->align;
32    flags = input->getFlags();
33  } else {
34    align = std::max(align, input->align);
35    finalizeFlags(input);
36  }
37  inputs.push_back(input);
38}
39
40// Branch-range extension can be implemented in two ways, either through ...
41//
42// (1) Branch islands: Single branch instructions (also of limited range),
43//     that might be chained in multiple hops to reach the desired
44//     destination. On ARM64, as 16 branch islands are needed to hop between
45//     opposite ends of a 2 GiB program. LD64 uses branch islands exclusively,
46//     even when it needs excessive hops.
47//
48// (2) Thunks: Instruction(s) to load the destination address into a scratch
49//     register, followed by a register-indirect branch. Thunks are
50//     constructed to reach any arbitrary address, so need not be
51//     chained. Although thunks need not be chained, a program might need
52//     multiple thunks to the same destination distributed throughout a large
53//     program so that all call sites can have one within range.
54//
55// The optimal approach is to mix islands for destinations within two hops,
56// and use thunks for destinations at greater distance. For now, we only
57// implement thunks. TODO: Adding support for branch islands!
58//
59// Internally -- as expressed in LLD's data structures -- a
60// branch-range-extension thunk consists of:
61//
62// (1) new Defined symbol for the thunk named
63//     <FUNCTION>.thunk.<SEQUENCE>, which references ...
64// (2) new InputSection, which contains ...
65// (3.1) new data for the instructions to load & branch to the far address +
66// (3.2) new Relocs on instructions to load the far address, which reference ...
67// (4.1) existing Defined symbol for the real function in __text, or
68// (4.2) existing DylibSymbol for the real function in a dylib
69//
70// Nearly-optimal thunk-placement algorithm features:
71//
72// * Single pass: O(n) on the number of call sites.
73//
74// * Accounts for the exact space overhead of thunks - no heuristics
75//
76// * Exploits the full range of call instructions - forward & backward
77//
78// Data:
79//
80// * DenseMap<Symbol *, ThunkInfo> thunkMap: Maps the function symbol
81//   to its thunk bookkeeper.
82//
83// * struct ThunkInfo (bookkeeper): Call instructions have limited range, and
84//   distant call sites might be unable to reach the same thunk, so multiple
85//   thunks are necessary to serve all call sites in a very large program. A
86//   thunkInfo stores state for all thunks associated with a particular
87//   function:
88//     (a) thunk symbol
89//     (b) input section containing stub code, and
90//     (c) sequence number for the active thunk incarnation.
91//   When an old thunk goes out of range, we increment the sequence number and
92//   create a new thunk named <FUNCTION>.thunk.<SEQUENCE>.
93//
94// * A thunk consists of
95//     (a) a Defined symbol pointing to
96//     (b) an InputSection holding machine code (similar to a MachO stub), and
97//     (c) relocs referencing the real function for fixing up the stub code.
98//
99// * std::vector<InputSection *> MergedInputSection::thunks: A vector parallel
100//   to the inputs vector. We store new thunks via cheap vector append, rather
101//   than costly insertion into the inputs vector.
102//
103// Control Flow:
104//
105// * During address assignment, MergedInputSection::finalize() examines call
106//   sites by ascending address and creates thunks.  When a function is beyond
107//   the range of a call site, we need a thunk. Place it at the largest
108//   available forward address from the call site. Call sites increase
109//   monotonically and thunks are always placed as far forward as possible;
110//   thus, we place thunks at monotonically increasing addresses. Once a thunk
111//   is placed, it and all previous input-section addresses are final.
112//
113// * ConcatInputSection::finalize() and ConcatInputSection::writeTo() merge
114//   the inputs and thunks vectors (both ordered by ascending address), which
115//   is simple and cheap.
116
117DenseMap<Symbol *, ThunkInfo> lld::macho::thunkMap;
118
119// Determine whether we need thunks, which depends on the target arch -- RISC
120// (i.e., ARM) generally does because it has limited-range branch/call
121// instructions, whereas CISC (i.e., x86) generally doesn't. RISC only needs
122// thunks for programs so large that branch source & destination addresses
123// might differ more than the range of branch instruction(s).
124bool TextOutputSection::needsThunks() const {
125  if (!target->usesThunks())
126    return false;
127  uint64_t isecAddr = addr;
128  for (ConcatInputSection *isec : inputs)
129    isecAddr = alignTo(isecAddr, isec->align) + isec->getSize();
130  if (isecAddr - addr + in.stubs->getSize() <=
131      std::min(target->backwardBranchRange, target->forwardBranchRange))
132    return false;
133  // Yes, this program is large enough to need thunks.
134  for (ConcatInputSection *isec : inputs) {
135    for (Reloc &r : isec->relocs) {
136      if (!target->hasAttr(r.type, RelocAttrBits::BRANCH))
137        continue;
138      auto *sym = r.referent.get<Symbol *>();
139      // Pre-populate the thunkMap and memoize call site counts for every
140      // InputSection and ThunkInfo. We do this for the benefit of
141      // estimateStubsInRangeVA().
142      ThunkInfo &thunkInfo = thunkMap[sym];
143      // Knowing ThunkInfo call site count will help us know whether or not we
144      // might need to create more for this referent at the time we are
145      // estimating distance to __stubs in estimateStubsInRangeVA().
146      ++thunkInfo.callSiteCount;
147      // We can avoid work on InputSections that have no BRANCH relocs.
148      isec->hasCallSites = true;
149    }
150  }
151  return true;
152}
153
154// Since __stubs is placed after __text, we must estimate the address
155// beyond which stubs are within range of a simple forward branch.
156// This is called exactly once, when the last input section has been finalized.
157uint64_t TextOutputSection::estimateStubsInRangeVA(size_t callIdx) const {
158  // Tally the functions which still have call sites remaining to process,
159  // which yields the maximum number of thunks we might yet place.
160  size_t maxPotentialThunks = 0;
161  for (auto &tp : thunkMap) {
162    ThunkInfo &ti = tp.second;
163    // This overcounts: Only sections that are in forward jump range from the
164    // currently-active section get finalized, and all input sections are
165    // finalized when estimateStubsInRangeVA() is called. So only backward
166    // jumps will need thunks, but we count all jumps.
167    if (ti.callSitesUsed < ti.callSiteCount)
168      maxPotentialThunks += 1;
169  }
170  // Tally the total size of input sections remaining to process.
171  uint64_t isecVA = inputs[callIdx]->getVA();
172  uint64_t isecEnd = isecVA;
173  for (size_t i = callIdx; i < inputs.size(); i++) {
174    InputSection *isec = inputs[i];
175    isecEnd = alignTo(isecEnd, isec->align) + isec->getSize();
176  }
177  // Estimate the address after which call sites can safely call stubs
178  // directly rather than through intermediary thunks.
179  uint64_t forwardBranchRange = target->forwardBranchRange;
180  assert(isecEnd > forwardBranchRange &&
181         "should not run thunk insertion if all code fits in jump range");
182  assert(isecEnd - isecVA <= forwardBranchRange &&
183         "should only finalize sections in jump range");
184  uint64_t stubsInRangeVA = isecEnd + maxPotentialThunks * target->thunkSize +
185                            in.stubs->getSize() - forwardBranchRange;
186  log("thunks = " + std::to_string(thunkMap.size()) +
187      ", potential = " + std::to_string(maxPotentialThunks) +
188      ", stubs = " + std::to_string(in.stubs->getSize()) + ", isecVA = " +
189      utohexstr(isecVA) + ", threshold = " + utohexstr(stubsInRangeVA) +
190      ", isecEnd = " + utohexstr(isecEnd) +
191      ", tail = " + utohexstr(isecEnd - isecVA) +
192      ", slop = " + utohexstr(forwardBranchRange - (isecEnd - isecVA)));
193  return stubsInRangeVA;
194}
195
196void ConcatOutputSection::finalizeOne(ConcatInputSection *isec) {
197  size = alignTo(size, isec->align);
198  fileSize = alignTo(fileSize, isec->align);
199  isec->outSecOff = size;
200  isec->isFinal = true;
201  size += isec->getSize();
202  fileSize += isec->getFileSize();
203}
204
205void ConcatOutputSection::finalizeContents() {
206  for (ConcatInputSection *isec : inputs)
207    finalizeOne(isec);
208}
209
210void TextOutputSection::finalize() {
211  if (!needsThunks()) {
212    for (ConcatInputSection *isec : inputs)
213      finalizeOne(isec);
214    return;
215  }
216
217  uint64_t forwardBranchRange = target->forwardBranchRange;
218  uint64_t backwardBranchRange = target->backwardBranchRange;
219  uint64_t stubsInRangeVA = TargetInfo::outOfRangeVA;
220  size_t thunkSize = target->thunkSize;
221  size_t relocCount = 0;
222  size_t callSiteCount = 0;
223  size_t thunkCallCount = 0;
224  size_t thunkCount = 0;
225
226  // Walk all sections in order. Finalize all sections that are less than
227  // forwardBranchRange in front of it.
228  // isecVA is the address of the current section.
229  // addr + size is the start address of the first non-finalized section.
230
231  // inputs[finalIdx] is for finalization (address-assignment)
232  size_t finalIdx = 0;
233  // Kick-off by ensuring that the first input section has an address
234  for (size_t callIdx = 0, endIdx = inputs.size(); callIdx < endIdx;
235       ++callIdx) {
236    if (finalIdx == callIdx)
237      finalizeOne(inputs[finalIdx++]);
238    ConcatInputSection *isec = inputs[callIdx];
239    assert(isec->isFinal);
240    uint64_t isecVA = isec->getVA();
241
242    // Assign addresses up-to the forward branch-range limit.
243    // Every call instruction needs a small number of bytes (on Arm64: 4),
244    // and each inserted thunk needs a slightly larger number of bytes
245    // (on Arm64: 12). If a section starts with a branch instruction and
246    // contains several branch instructions in succession, then the distance
247    // from the current position to the position where the thunks are inserted
248    // grows. So leave room for a bunch of thunks.
249    unsigned slop = 1024 * thunkSize;
250    while (finalIdx < endIdx && addr + size + inputs[finalIdx]->getSize() <
251                                    isecVA + forwardBranchRange - slop)
252      finalizeOne(inputs[finalIdx++]);
253
254    if (!isec->hasCallSites)
255      continue;
256
257    if (finalIdx == endIdx && stubsInRangeVA == TargetInfo::outOfRangeVA) {
258      // When we have finalized all input sections, __stubs (destined
259      // to follow __text) comes within range of forward branches and
260      // we can estimate the threshold address after which we can
261      // reach any stub with a forward branch. Note that although it
262      // sits in the middle of a loop, this code executes only once.
263      // It is in the loop because we need to call it at the proper
264      // time: the earliest call site from which the end of __text
265      // (and start of __stubs) comes within range of a forward branch.
266      stubsInRangeVA = estimateStubsInRangeVA(callIdx);
267    }
268    // Process relocs by ascending address, i.e., ascending offset within isec
269    std::vector<Reloc> &relocs = isec->relocs;
270    // FIXME: This property does not hold for object files produced by ld64's
271    // `-r` mode.
272    assert(is_sorted(relocs,
273                     [](Reloc &a, Reloc &b) { return a.offset > b.offset; }));
274    for (Reloc &r : reverse(relocs)) {
275      ++relocCount;
276      if (!target->hasAttr(r.type, RelocAttrBits::BRANCH))
277        continue;
278      ++callSiteCount;
279      // Calculate branch reachability boundaries
280      uint64_t callVA = isecVA + r.offset;
281      uint64_t lowVA =
282          backwardBranchRange < callVA ? callVA - backwardBranchRange : 0;
283      uint64_t highVA = callVA + forwardBranchRange;
284      // Calculate our call referent address
285      auto *funcSym = r.referent.get<Symbol *>();
286      ThunkInfo &thunkInfo = thunkMap[funcSym];
287      // The referent is not reachable, so we need to use a thunk ...
288      if (funcSym->isInStubs() && callVA >= stubsInRangeVA) {
289        assert(callVA != TargetInfo::outOfRangeVA);
290        // ... Oh, wait! We are close enough to the end that __stubs
291        // are now within range of a simple forward branch.
292        continue;
293      }
294      uint64_t funcVA = funcSym->resolveBranchVA();
295      ++thunkInfo.callSitesUsed;
296      if (lowVA <= funcVA && funcVA <= highVA) {
297        // The referent is reachable with a simple call instruction.
298        continue;
299      }
300      ++thunkInfo.thunkCallCount;
301      ++thunkCallCount;
302      // If an existing thunk is reachable, use it ...
303      if (thunkInfo.sym) {
304        uint64_t thunkVA = thunkInfo.isec->getVA();
305        if (lowVA <= thunkVA && thunkVA <= highVA) {
306          r.referent = thunkInfo.sym;
307          continue;
308        }
309      }
310      // ... otherwise, create a new thunk.
311      if (addr + size > highVA) {
312        // There were too many consecutive branch instructions for `slop`
313        // above. If you hit this: For the current algorithm, just bumping up
314        // slop above and trying again is probably simplest. (See also PR51578
315        // comment 5).
316        fatal(Twine(__FUNCTION__) + ": FIXME: thunk range overrun");
317      }
318      thunkInfo.isec =
319          makeSyntheticInputSection(isec->getSegName(), isec->getName());
320      thunkInfo.isec->parent = this;
321
322      // This code runs after dead code removal. Need to set the `live` bit
323      // on the thunk isec so that asserts that check that only live sections
324      // get written are happy.
325      thunkInfo.isec->live = true;
326
327      StringRef thunkName = saver().save(funcSym->getName() + ".thunk." +
328                                         std::to_string(thunkInfo.sequence++));
329      if (!isa<Defined>(funcSym) || cast<Defined>(funcSym)->isExternal()) {
330        r.referent = thunkInfo.sym = symtab->addDefined(
331            thunkName, /*file=*/nullptr, thunkInfo.isec, /*value=*/0, thunkSize,
332            /*isWeakDef=*/false, /*isPrivateExtern=*/true,
333            /*isThumb=*/false, /*isReferencedDynamically=*/false,
334            /*noDeadStrip=*/false, /*isWeakDefCanBeHidden=*/false);
335      } else {
336        r.referent = thunkInfo.sym = make<Defined>(
337            thunkName, /*file=*/nullptr, thunkInfo.isec, /*value=*/0, thunkSize,
338            /*isWeakDef=*/false, /*isExternal=*/false, /*isPrivateExtern=*/true,
339            /*includeInSymtab=*/true, /*isThumb=*/false,
340            /*isReferencedDynamically=*/false, /*noDeadStrip=*/false,
341            /*isWeakDefCanBeHidden=*/false);
342      }
343      thunkInfo.sym->used = true;
344      target->populateThunk(thunkInfo.isec, funcSym);
345      finalizeOne(thunkInfo.isec);
346      thunks.push_back(thunkInfo.isec);
347      ++thunkCount;
348    }
349  }
350
351  log("thunks for " + parent->name + "," + name +
352      ": funcs = " + std::to_string(thunkMap.size()) +
353      ", relocs = " + std::to_string(relocCount) +
354      ", all calls = " + std::to_string(callSiteCount) +
355      ", thunk calls = " + std::to_string(thunkCallCount) +
356      ", thunks = " + std::to_string(thunkCount));
357}
358
359void ConcatOutputSection::writeTo(uint8_t *buf) const {
360  for (ConcatInputSection *isec : inputs)
361    isec->writeTo(buf + isec->outSecOff);
362}
363
364void TextOutputSection::writeTo(uint8_t *buf) const {
365  // Merge input sections from thunk & ordinary vectors
366  size_t i = 0, ie = inputs.size();
367  size_t t = 0, te = thunks.size();
368  while (i < ie || t < te) {
369    while (i < ie && (t == te || inputs[i]->empty() ||
370                      inputs[i]->outSecOff < thunks[t]->outSecOff)) {
371      inputs[i]->writeTo(buf + inputs[i]->outSecOff);
372      ++i;
373    }
374    while (t < te && (i == ie || thunks[t]->outSecOff < inputs[i]->outSecOff)) {
375      thunks[t]->writeTo(buf + thunks[t]->outSecOff);
376      ++t;
377    }
378  }
379}
380
381void ConcatOutputSection::finalizeFlags(InputSection *input) {
382  switch (sectionType(input->getFlags())) {
383  default /*type-unspec'ed*/:
384    // FIXME: Add additional logic here when supporting emitting obj files.
385    break;
386  case S_4BYTE_LITERALS:
387  case S_8BYTE_LITERALS:
388  case S_16BYTE_LITERALS:
389  case S_CSTRING_LITERALS:
390  case S_ZEROFILL:
391  case S_LAZY_SYMBOL_POINTERS:
392  case S_MOD_TERM_FUNC_POINTERS:
393  case S_THREAD_LOCAL_REGULAR:
394  case S_THREAD_LOCAL_ZEROFILL:
395  case S_THREAD_LOCAL_VARIABLES:
396  case S_THREAD_LOCAL_INIT_FUNCTION_POINTERS:
397  case S_THREAD_LOCAL_VARIABLE_POINTERS:
398  case S_NON_LAZY_SYMBOL_POINTERS:
399  case S_SYMBOL_STUBS:
400    flags |= input->getFlags();
401    break;
402  }
403}
404
405ConcatOutputSection *
406ConcatOutputSection::getOrCreateForInput(const InputSection *isec) {
407  NamePair names = maybeRenameSection({isec->getSegName(), isec->getName()});
408  ConcatOutputSection *&osec = concatOutputSections[names];
409  if (!osec) {
410    if (isec->getSegName() == segment_names::text &&
411        isec->getName() != section_names::gccExceptTab &&
412        isec->getName() != section_names::ehFrame)
413      osec = make<TextOutputSection>(names.second);
414    else
415      osec = make<ConcatOutputSection>(names.second);
416  }
417  return osec;
418}
419
420NamePair macho::maybeRenameSection(NamePair key) {
421  auto newNames = config->sectionRenameMap.find(key);
422  if (newNames != config->sectionRenameMap.end())
423    return newNames->second;
424  return key;
425}
426