1274955Ssvnmir//===-- RuntimeDyldMachOAArch64.h -- MachO/AArch64 specific code. -*- C++ -*-=//
2274955Ssvnmir//
3274955Ssvnmir//                     The LLVM Compiler Infrastructure
4274955Ssvnmir//
5274955Ssvnmir// This file is distributed under the University of Illinois Open Source
6274955Ssvnmir// License. See LICENSE.TXT for details.
7274955Ssvnmir//
8274955Ssvnmir//===----------------------------------------------------------------------===//
9274955Ssvnmir
10280031Sdim#ifndef LLVM_LIB_EXECUTIONENGINE_RUNTIMEDYLD_TARGETS_RUNTIMEDYLDMACHOAARCH64_H
11280031Sdim#define LLVM_LIB_EXECUTIONENGINE_RUNTIMEDYLD_TARGETS_RUNTIMEDYLDMACHOAARCH64_H
12274955Ssvnmir
13274955Ssvnmir#include "../RuntimeDyldMachO.h"
14280031Sdim#include "llvm/Support/Endian.h"
15274955Ssvnmir
16274955Ssvnmir#define DEBUG_TYPE "dyld"
17274955Ssvnmir
18274955Ssvnmirnamespace llvm {
19274955Ssvnmir
20274955Ssvnmirclass RuntimeDyldMachOAArch64
21274955Ssvnmir    : public RuntimeDyldMachOCRTPBase<RuntimeDyldMachOAArch64> {
22274955Ssvnmirpublic:
23280031Sdim
24280031Sdim  typedef uint64_t TargetPtrT;
25280031Sdim
26288943Sdim  RuntimeDyldMachOAArch64(RuntimeDyld::MemoryManager &MM,
27288943Sdim                          RuntimeDyld::SymbolResolver &Resolver)
28288943Sdim      : RuntimeDyldMachOCRTPBase(MM, Resolver) {}
29274955Ssvnmir
30274955Ssvnmir  unsigned getMaxStubSize() override { return 8; }
31274955Ssvnmir
32274955Ssvnmir  unsigned getStubAlignment() override { return 8; }
33274955Ssvnmir
34280031Sdim  /// Extract the addend encoded in the instruction / memory location.
35280031Sdim  int64_t decodeAddend(const RelocationEntry &RE) const {
36280031Sdim    const SectionEntry &Section = Sections[RE.SectionID];
37296417Sdim    uint8_t *LocalAddress = Section.getAddressWithOffset(RE.Offset);
38280031Sdim    unsigned NumBytes = 1 << RE.Size;
39280031Sdim    int64_t Addend = 0;
40280031Sdim    // Verify that the relocation has the correct size and alignment.
41280031Sdim    switch (RE.RelType) {
42280031Sdim    default:
43280031Sdim      llvm_unreachable("Unsupported relocation type!");
44280031Sdim    case MachO::ARM64_RELOC_UNSIGNED:
45280031Sdim      assert((NumBytes == 4 || NumBytes == 8) && "Invalid relocation size.");
46280031Sdim      break;
47280031Sdim    case MachO::ARM64_RELOC_BRANCH26:
48280031Sdim    case MachO::ARM64_RELOC_PAGE21:
49280031Sdim    case MachO::ARM64_RELOC_PAGEOFF12:
50280031Sdim    case MachO::ARM64_RELOC_GOT_LOAD_PAGE21:
51280031Sdim    case MachO::ARM64_RELOC_GOT_LOAD_PAGEOFF12:
52280031Sdim      assert(NumBytes == 4 && "Invalid relocation size.");
53280031Sdim      assert((((uintptr_t)LocalAddress & 0x3) == 0) &&
54280031Sdim             "Instruction address is not aligned to 4 bytes.");
55280031Sdim      break;
56280031Sdim    }
57280031Sdim
58280031Sdim    switch (RE.RelType) {
59280031Sdim    default:
60280031Sdim      llvm_unreachable("Unsupported relocation type!");
61280031Sdim    case MachO::ARM64_RELOC_UNSIGNED:
62280031Sdim      // This could be an unaligned memory location.
63280031Sdim      if (NumBytes == 4)
64280031Sdim        Addend = *reinterpret_cast<support::ulittle32_t *>(LocalAddress);
65280031Sdim      else
66280031Sdim        Addend = *reinterpret_cast<support::ulittle64_t *>(LocalAddress);
67280031Sdim      break;
68280031Sdim    case MachO::ARM64_RELOC_BRANCH26: {
69280031Sdim      // Verify that the relocation points to the expected branch instruction.
70280031Sdim      auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
71280031Sdim      assert((*p & 0xFC000000) == 0x14000000 && "Expected branch instruction.");
72280031Sdim
73280031Sdim      // Get the 26 bit addend encoded in the branch instruction and sign-extend
74280031Sdim      // to 64 bit. The lower 2 bits are always zeros and are therefore implicit
75280031Sdim      // (<< 2).
76280031Sdim      Addend = (*p & 0x03FFFFFF) << 2;
77280031Sdim      Addend = SignExtend64(Addend, 28);
78280031Sdim      break;
79280031Sdim    }
80280031Sdim    case MachO::ARM64_RELOC_GOT_LOAD_PAGE21:
81280031Sdim    case MachO::ARM64_RELOC_PAGE21: {
82280031Sdim      // Verify that the relocation points to the expected adrp instruction.
83280031Sdim      auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
84280031Sdim      assert((*p & 0x9F000000) == 0x90000000 && "Expected adrp instruction.");
85280031Sdim
86280031Sdim      // Get the 21 bit addend encoded in the adrp instruction and sign-extend
87280031Sdim      // to 64 bit. The lower 12 bits (4096 byte page) are always zeros and are
88280031Sdim      // therefore implicit (<< 12).
89280031Sdim      Addend = ((*p & 0x60000000) >> 29) | ((*p & 0x01FFFFE0) >> 3) << 12;
90280031Sdim      Addend = SignExtend64(Addend, 33);
91280031Sdim      break;
92280031Sdim    }
93280031Sdim    case MachO::ARM64_RELOC_GOT_LOAD_PAGEOFF12: {
94280031Sdim      // Verify that the relocation points to one of the expected load / store
95280031Sdim      // instructions.
96280031Sdim      auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
97280031Sdim      (void)p;
98280031Sdim      assert((*p & 0x3B000000) == 0x39000000 &&
99280031Sdim             "Only expected load / store instructions.");
100280031Sdim    } // fall-through
101280031Sdim    case MachO::ARM64_RELOC_PAGEOFF12: {
102280031Sdim      // Verify that the relocation points to one of the expected load / store
103280031Sdim      // or add / sub instructions.
104280031Sdim      auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
105280031Sdim      assert((((*p & 0x3B000000) == 0x39000000) ||
106280031Sdim              ((*p & 0x11C00000) == 0x11000000)   ) &&
107280031Sdim             "Expected load / store  or add/sub instruction.");
108280031Sdim
109280031Sdim      // Get the 12 bit addend encoded in the instruction.
110280031Sdim      Addend = (*p & 0x003FFC00) >> 10;
111280031Sdim
112280031Sdim      // Check which instruction we are decoding to obtain the implicit shift
113280031Sdim      // factor of the instruction.
114280031Sdim      int ImplicitShift = 0;
115280031Sdim      if ((*p & 0x3B000000) == 0x39000000) { // << load / store
116280031Sdim        // For load / store instructions the size is encoded in bits 31:30.
117280031Sdim        ImplicitShift = ((*p >> 30) & 0x3);
118280031Sdim        if (ImplicitShift == 0) {
119280031Sdim          // Check if this a vector op to get the correct shift value.
120280031Sdim          if ((*p & 0x04800000) == 0x04800000)
121280031Sdim            ImplicitShift = 4;
122280031Sdim        }
123280031Sdim      }
124280031Sdim      // Compensate for implicit shift.
125280031Sdim      Addend <<= ImplicitShift;
126280031Sdim      break;
127280031Sdim    }
128280031Sdim    }
129280031Sdim    return Addend;
130280031Sdim  }
131280031Sdim
132280031Sdim  /// Extract the addend encoded in the instruction.
133280031Sdim  void encodeAddend(uint8_t *LocalAddress, unsigned NumBytes,
134280031Sdim                    MachO::RelocationInfoType RelType, int64_t Addend) const {
135280031Sdim    // Verify that the relocation has the correct alignment.
136280031Sdim    switch (RelType) {
137280031Sdim    default:
138280031Sdim      llvm_unreachable("Unsupported relocation type!");
139280031Sdim    case MachO::ARM64_RELOC_UNSIGNED:
140280031Sdim      assert((NumBytes == 4 || NumBytes == 8) && "Invalid relocation size.");
141280031Sdim      break;
142280031Sdim    case MachO::ARM64_RELOC_BRANCH26:
143280031Sdim    case MachO::ARM64_RELOC_PAGE21:
144280031Sdim    case MachO::ARM64_RELOC_PAGEOFF12:
145280031Sdim    case MachO::ARM64_RELOC_GOT_LOAD_PAGE21:
146280031Sdim    case MachO::ARM64_RELOC_GOT_LOAD_PAGEOFF12:
147280031Sdim      assert(NumBytes == 4 && "Invalid relocation size.");
148280031Sdim      assert((((uintptr_t)LocalAddress & 0x3) == 0) &&
149280031Sdim             "Instruction address is not aligned to 4 bytes.");
150280031Sdim      break;
151280031Sdim    }
152280031Sdim
153280031Sdim    switch (RelType) {
154280031Sdim    default:
155280031Sdim      llvm_unreachable("Unsupported relocation type!");
156280031Sdim    case MachO::ARM64_RELOC_UNSIGNED:
157280031Sdim      // This could be an unaligned memory location.
158280031Sdim      if (NumBytes == 4)
159280031Sdim        *reinterpret_cast<support::ulittle32_t *>(LocalAddress) = Addend;
160280031Sdim      else
161280031Sdim        *reinterpret_cast<support::ulittle64_t *>(LocalAddress) = Addend;
162280031Sdim      break;
163280031Sdim    case MachO::ARM64_RELOC_BRANCH26: {
164280031Sdim      auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
165280031Sdim      // Verify that the relocation points to the expected branch instruction.
166280031Sdim      assert((*p & 0xFC000000) == 0x14000000 && "Expected branch instruction.");
167280031Sdim
168280031Sdim      // Verify addend value.
169280031Sdim      assert((Addend & 0x3) == 0 && "Branch target is not aligned");
170280031Sdim      assert(isInt<28>(Addend) && "Branch target is out of range.");
171280031Sdim
172280031Sdim      // Encode the addend as 26 bit immediate in the branch instruction.
173280031Sdim      *p = (*p & 0xFC000000) | ((uint32_t)(Addend >> 2) & 0x03FFFFFF);
174280031Sdim      break;
175280031Sdim    }
176280031Sdim    case MachO::ARM64_RELOC_GOT_LOAD_PAGE21:
177280031Sdim    case MachO::ARM64_RELOC_PAGE21: {
178280031Sdim      // Verify that the relocation points to the expected adrp instruction.
179280031Sdim      auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
180280031Sdim      assert((*p & 0x9F000000) == 0x90000000 && "Expected adrp instruction.");
181280031Sdim
182280031Sdim      // Check that the addend fits into 21 bits (+ 12 lower bits).
183280031Sdim      assert((Addend & 0xFFF) == 0 && "ADRP target is not page aligned.");
184280031Sdim      assert(isInt<33>(Addend) && "Invalid page reloc value.");
185280031Sdim
186280031Sdim      // Encode the addend into the instruction.
187280031Sdim      uint32_t ImmLoValue = ((uint64_t)Addend << 17) & 0x60000000;
188280031Sdim      uint32_t ImmHiValue = ((uint64_t)Addend >> 9) & 0x00FFFFE0;
189280031Sdim      *p = (*p & 0x9F00001F) | ImmHiValue | ImmLoValue;
190280031Sdim      break;
191280031Sdim    }
192280031Sdim    case MachO::ARM64_RELOC_GOT_LOAD_PAGEOFF12: {
193280031Sdim      // Verify that the relocation points to one of the expected load / store
194280031Sdim      // instructions.
195280031Sdim      auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
196280031Sdim      assert((*p & 0x3B000000) == 0x39000000 &&
197280031Sdim             "Only expected load / store instructions.");
198280031Sdim      (void)p;
199280031Sdim    } // fall-through
200280031Sdim    case MachO::ARM64_RELOC_PAGEOFF12: {
201280031Sdim      // Verify that the relocation points to one of the expected load / store
202280031Sdim      // or add / sub instructions.
203280031Sdim      auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
204280031Sdim      assert((((*p & 0x3B000000) == 0x39000000) ||
205280031Sdim              ((*p & 0x11C00000) == 0x11000000)   ) &&
206280031Sdim             "Expected load / store  or add/sub instruction.");
207280031Sdim
208280031Sdim      // Check which instruction we are decoding to obtain the implicit shift
209280031Sdim      // factor of the instruction and verify alignment.
210280031Sdim      int ImplicitShift = 0;
211280031Sdim      if ((*p & 0x3B000000) == 0x39000000) { // << load / store
212280031Sdim        // For load / store instructions the size is encoded in bits 31:30.
213280031Sdim        ImplicitShift = ((*p >> 30) & 0x3);
214280031Sdim        switch (ImplicitShift) {
215280031Sdim        case 0:
216280031Sdim          // Check if this a vector op to get the correct shift value.
217280031Sdim          if ((*p & 0x04800000) == 0x04800000) {
218280031Sdim            ImplicitShift = 4;
219280031Sdim            assert(((Addend & 0xF) == 0) &&
220280031Sdim                   "128-bit LDR/STR not 16-byte aligned.");
221280031Sdim          }
222280031Sdim          break;
223280031Sdim        case 1:
224280031Sdim          assert(((Addend & 0x1) == 0) && "16-bit LDR/STR not 2-byte aligned.");
225280031Sdim          break;
226280031Sdim        case 2:
227280031Sdim          assert(((Addend & 0x3) == 0) && "32-bit LDR/STR not 4-byte aligned.");
228280031Sdim          break;
229280031Sdim        case 3:
230280031Sdim          assert(((Addend & 0x7) == 0) && "64-bit LDR/STR not 8-byte aligned.");
231280031Sdim          break;
232280031Sdim        }
233280031Sdim      }
234280031Sdim      // Compensate for implicit shift.
235280031Sdim      Addend >>= ImplicitShift;
236280031Sdim      assert(isUInt<12>(Addend) && "Addend cannot be encoded.");
237280031Sdim
238280031Sdim      // Encode the addend into the instruction.
239280031Sdim      *p = (*p & 0xFFC003FF) | ((uint32_t)(Addend << 10) & 0x003FFC00);
240280031Sdim      break;
241280031Sdim    }
242280031Sdim    }
243280031Sdim  }
244280031Sdim
245274955Ssvnmir  relocation_iterator
246274955Ssvnmir  processRelocationRef(unsigned SectionID, relocation_iterator RelI,
247280031Sdim                       const ObjectFile &BaseObjT,
248280031Sdim                       ObjSectionToIDMap &ObjSectionToID,
249280031Sdim                       StubMap &Stubs) override {
250274955Ssvnmir    const MachOObjectFile &Obj =
251280031Sdim      static_cast<const MachOObjectFile &>(BaseObjT);
252274955Ssvnmir    MachO::any_relocation_info RelInfo =
253274955Ssvnmir        Obj.getRelocation(RelI->getRawDataRefImpl());
254274955Ssvnmir
255274955Ssvnmir    assert(!Obj.isRelocationScattered(RelInfo) && "");
256274955Ssvnmir
257274955Ssvnmir    // ARM64 has an ARM64_RELOC_ADDEND relocation type that carries an explicit
258274955Ssvnmir    // addend for the following relocation. If found: (1) store the associated
259274955Ssvnmir    // addend, (2) consume the next relocation, and (3) use the stored addend to
260274955Ssvnmir    // override the addend.
261274955Ssvnmir    int64_t ExplicitAddend = 0;
262274955Ssvnmir    if (Obj.getAnyRelocationType(RelInfo) == MachO::ARM64_RELOC_ADDEND) {
263274955Ssvnmir      assert(!Obj.getPlainRelocationExternal(RelInfo));
264274955Ssvnmir      assert(!Obj.getAnyRelocationPCRel(RelInfo));
265274955Ssvnmir      assert(Obj.getAnyRelocationLength(RelInfo) == 2);
266274955Ssvnmir      int64_t RawAddend = Obj.getPlainRelocationSymbolNum(RelInfo);
267274955Ssvnmir      // Sign-extend the 24-bit to 64-bit.
268280031Sdim      ExplicitAddend = SignExtend64(RawAddend, 24);
269274955Ssvnmir      ++RelI;
270274955Ssvnmir      RelInfo = Obj.getRelocation(RelI->getRawDataRefImpl());
271274955Ssvnmir    }
272274955Ssvnmir
273280031Sdim    RelocationEntry RE(getRelocationEntry(SectionID, Obj, RelI));
274280031Sdim    RE.Addend = decodeAddend(RE);
275274955Ssvnmir
276280031Sdim    assert((ExplicitAddend == 0 || RE.Addend == 0) && "Relocation has "\
277280031Sdim      "ARM64_RELOC_ADDEND and embedded addend in the instruction.");
278296417Sdim    if (ExplicitAddend)
279274955Ssvnmir      RE.Addend = ExplicitAddend;
280274955Ssvnmir
281296417Sdim    RelocationValueRef Value(
282296417Sdim        getRelocationValueRef(Obj, RelI, RE, ObjSectionToID));
283296417Sdim
284274955Ssvnmir    bool IsExtern = Obj.getPlainRelocationExternal(RelInfo);
285274955Ssvnmir    if (!IsExtern && RE.IsPCRel)
286288943Sdim      makeValueAddendPCRel(Value, RelI, 1 << RE.Size);
287274955Ssvnmir
288280031Sdim    RE.Addend = Value.Offset;
289274955Ssvnmir
290274955Ssvnmir    if (RE.RelType == MachO::ARM64_RELOC_GOT_LOAD_PAGE21 ||
291274955Ssvnmir        RE.RelType == MachO::ARM64_RELOC_GOT_LOAD_PAGEOFF12)
292274955Ssvnmir      processGOTRelocation(RE, Value, Stubs);
293274955Ssvnmir    else {
294274955Ssvnmir      if (Value.SymbolName)
295274955Ssvnmir        addRelocationForSymbol(RE, Value.SymbolName);
296274955Ssvnmir      else
297274955Ssvnmir        addRelocationForSection(RE, Value.SectionID);
298274955Ssvnmir    }
299274955Ssvnmir
300274955Ssvnmir    return ++RelI;
301274955Ssvnmir  }
302274955Ssvnmir
303280031Sdim  void resolveRelocation(const RelocationEntry &RE, uint64_t Value) override {
304274955Ssvnmir    DEBUG(dumpRelocationToResolve(RE, Value));
305274955Ssvnmir
306274955Ssvnmir    const SectionEntry &Section = Sections[RE.SectionID];
307296417Sdim    uint8_t *LocalAddress = Section.getAddressWithOffset(RE.Offset);
308280031Sdim    MachO::RelocationInfoType RelType =
309280031Sdim      static_cast<MachO::RelocationInfoType>(RE.RelType);
310274955Ssvnmir
311280031Sdim    switch (RelType) {
312274955Ssvnmir    default:
313274955Ssvnmir      llvm_unreachable("Invalid relocation type!");
314274955Ssvnmir    case MachO::ARM64_RELOC_UNSIGNED: {
315274955Ssvnmir      assert(!RE.IsPCRel && "PCRel and ARM64_RELOC_UNSIGNED not supported");
316274955Ssvnmir      // Mask in the target value a byte at a time (we don't have an alignment
317274955Ssvnmir      // guarantee for the target address, so this is safest).
318274955Ssvnmir      if (RE.Size < 2)
319274955Ssvnmir        llvm_unreachable("Invalid size for ARM64_RELOC_UNSIGNED");
320274955Ssvnmir
321280031Sdim      encodeAddend(LocalAddress, 1 << RE.Size, RelType, Value + RE.Addend);
322274955Ssvnmir      break;
323274955Ssvnmir    }
324274955Ssvnmir    case MachO::ARM64_RELOC_BRANCH26: {
325274955Ssvnmir      assert(RE.IsPCRel && "not PCRel and ARM64_RELOC_BRANCH26 not supported");
326274955Ssvnmir      // Check if branch is in range.
327296417Sdim      uint64_t FinalAddress = Section.getLoadAddressWithOffset(RE.Offset);
328280031Sdim      int64_t PCRelVal = Value - FinalAddress + RE.Addend;
329280031Sdim      encodeAddend(LocalAddress, /*Size=*/4, RelType, PCRelVal);
330274955Ssvnmir      break;
331274955Ssvnmir    }
332274955Ssvnmir    case MachO::ARM64_RELOC_GOT_LOAD_PAGE21:
333274955Ssvnmir    case MachO::ARM64_RELOC_PAGE21: {
334274955Ssvnmir      assert(RE.IsPCRel && "not PCRel and ARM64_RELOC_PAGE21 not supported");
335274955Ssvnmir      // Adjust for PC-relative relocation and offset.
336296417Sdim      uint64_t FinalAddress = Section.getLoadAddressWithOffset(RE.Offset);
337280031Sdim      int64_t PCRelVal =
338280031Sdim        ((Value + RE.Addend) & (-4096)) - (FinalAddress & (-4096));
339280031Sdim      encodeAddend(LocalAddress, /*Size=*/4, RelType, PCRelVal);
340274955Ssvnmir      break;
341274955Ssvnmir    }
342274955Ssvnmir    case MachO::ARM64_RELOC_GOT_LOAD_PAGEOFF12:
343274955Ssvnmir    case MachO::ARM64_RELOC_PAGEOFF12: {
344274955Ssvnmir      assert(!RE.IsPCRel && "PCRel and ARM64_RELOC_PAGEOFF21 not supported");
345274955Ssvnmir      // Add the offset from the symbol.
346274955Ssvnmir      Value += RE.Addend;
347274955Ssvnmir      // Mask out the page address and only use the lower 12 bits.
348274955Ssvnmir      Value &= 0xFFF;
349280031Sdim      encodeAddend(LocalAddress, /*Size=*/4, RelType, Value);
350274955Ssvnmir      break;
351274955Ssvnmir    }
352274955Ssvnmir    case MachO::ARM64_RELOC_SUBTRACTOR:
353274955Ssvnmir    case MachO::ARM64_RELOC_POINTER_TO_GOT:
354274955Ssvnmir    case MachO::ARM64_RELOC_TLVP_LOAD_PAGE21:
355274955Ssvnmir    case MachO::ARM64_RELOC_TLVP_LOAD_PAGEOFF12:
356280031Sdim      llvm_unreachable("Relocation type not yet implemented!");
357274955Ssvnmir    case MachO::ARM64_RELOC_ADDEND:
358274955Ssvnmir      llvm_unreachable("ARM64_RELOC_ADDEND should have been handeled by "
359274955Ssvnmir                       "processRelocationRef!");
360274955Ssvnmir    }
361274955Ssvnmir  }
362274955Ssvnmir
363280031Sdim  void finalizeSection(const ObjectFile &Obj, unsigned SectionID,
364274955Ssvnmir                       const SectionRef &Section) {}
365274955Ssvnmir
366274955Ssvnmirprivate:
367274955Ssvnmir  void processGOTRelocation(const RelocationEntry &RE,
368274955Ssvnmir                            RelocationValueRef &Value, StubMap &Stubs) {
369274955Ssvnmir    assert(RE.Size == 2);
370274955Ssvnmir    SectionEntry &Section = Sections[RE.SectionID];
371274955Ssvnmir    StubMap::const_iterator i = Stubs.find(Value);
372280031Sdim    int64_t Offset;
373274955Ssvnmir    if (i != Stubs.end())
374280031Sdim      Offset = static_cast<int64_t>(i->second);
375274955Ssvnmir    else {
376274955Ssvnmir      // FIXME: There must be a better way to do this then to check and fix the
377274955Ssvnmir      // alignment every time!!!
378296417Sdim      uintptr_t BaseAddress = uintptr_t(Section.getAddress());
379274955Ssvnmir      uintptr_t StubAlignment = getStubAlignment();
380274955Ssvnmir      uintptr_t StubAddress =
381296417Sdim          (BaseAddress + Section.getStubOffset() + StubAlignment - 1) &
382274955Ssvnmir          -StubAlignment;
383274955Ssvnmir      unsigned StubOffset = StubAddress - BaseAddress;
384274955Ssvnmir      Stubs[Value] = StubOffset;
385274955Ssvnmir      assert(((StubAddress % getStubAlignment()) == 0) &&
386274955Ssvnmir             "GOT entry not aligned");
387274955Ssvnmir      RelocationEntry GOTRE(RE.SectionID, StubOffset,
388280031Sdim                            MachO::ARM64_RELOC_UNSIGNED, Value.Offset,
389274955Ssvnmir                            /*IsPCRel=*/false, /*Size=*/3);
390274955Ssvnmir      if (Value.SymbolName)
391274955Ssvnmir        addRelocationForSymbol(GOTRE, Value.SymbolName);
392274955Ssvnmir      else
393274955Ssvnmir        addRelocationForSection(GOTRE, Value.SectionID);
394296417Sdim      Section.advanceStubOffset(getMaxStubSize());
395280031Sdim      Offset = static_cast<int64_t>(StubOffset);
396274955Ssvnmir    }
397280031Sdim    RelocationEntry TargetRE(RE.SectionID, RE.Offset, RE.RelType, Offset,
398274955Ssvnmir                             RE.IsPCRel, RE.Size);
399280031Sdim    addRelocationForSection(TargetRE, RE.SectionID);
400274955Ssvnmir  }
401274955Ssvnmir};
402274955Ssvnmir}
403274955Ssvnmir
404274955Ssvnmir#undef DEBUG_TYPE
405274955Ssvnmir
406280031Sdim#endif
407