Subversion Repositories QNX 8.QNX8 LLVM/Clang compiler suite

Rev

Blame | Last modification | View Log | Download | RSS feed

  1. //=== aarch64.h - Generic JITLink aarch64 edge kinds, utilities -*- C++ -*-===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // Generic utilities for graphs representing aarch64 objects.
  10. //
  11. //===----------------------------------------------------------------------===//
  12.  
  13. #ifndef LLVM_EXECUTIONENGINE_JITLINK_AARCH64_H
  14. #define LLVM_EXECUTIONENGINE_JITLINK_AARCH64_H
  15.  
  16. #include "TableManager.h"
  17. #include "llvm/ExecutionEngine/JITLink/JITLink.h"
  18. #include "llvm/ExecutionEngine/Orc/Shared/MemoryFlags.h"
  19.  
  20. namespace llvm {
  21. namespace jitlink {
  22. namespace aarch64 {
  23.  
  24. /// Represents aarch64 fixups and other aarch64-specific edge kinds.
  25. enum EdgeKind_aarch64 : Edge::Kind {
  26.  
  27.   /// A plain 64-bit pointer value relocation.
  28.   ///
  29.   /// Fixup expression:
  30.   ///   Fixup <- Target + Addend : uint64
  31.   ///
  32.   Pointer64 = Edge::FirstRelocation,
  33.  
  34.   /// A plain 32-bit pointer value relocation.
  35.   ///
  36.   /// Fixup expression:
  37.   ///   Fixup <- Target + Addend : uint32
  38.   ///
  39.   /// Errors:
  40.   ///   - The target must reside in the low 32-bits of the address space,
  41.   ///     otherwise an out-of-range error will be returned.
  42.   ///
  43.   Pointer32,
  44.  
  45.   /// A 64-bit delta.
  46.   ///
  47.   /// Delta from the fixup to the target.
  48.   ///
  49.   /// Fixup expression:
  50.   ///   Fixup <- Target - Fixup + Addend : int64
  51.   ///
  52.   Delta64,
  53.  
  54.   /// A 32-bit delta.
  55.   ///
  56.   /// Delta from the fixup to the target.
  57.   ///
  58.   /// Fixup expression:
  59.   ///   Fixup <- Target - Fixup + Addend : int64
  60.   ///
  61.   /// Errors:
  62.   ///   - The result of the fixup expression must fit into an int32, otherwise
  63.   ///     an out-of-range error will be returned.
  64.   ///
  65.   Delta32,
  66.  
  67.   /// A 64-bit negative delta.
  68.   ///
  69.   /// Delta from target back to the fixup.
  70.   ///
  71.   /// Fixup expression:
  72.   ///   Fixup <- Fixup - Target + Addend : int64
  73.   ///
  74.   NegDelta64,
  75.  
  76.   /// A 32-bit negative delta.
  77.   ///
  78.   /// Delta from the target back to the fixup.
  79.   ///
  80.   /// Fixup expression:
  81.   ///   Fixup <- Fixup - Target + Addend : int32
  82.   ///
  83.   /// Errors:
  84.   ///   - The result of the fixup expression must fit into an int32, otherwise
  85.   ///     an out-of-range error will be returned.
  86.   NegDelta32,
  87.  
  88.   /// A 26-bit PC-relative branch.
  89.   ///
  90.   /// Represents a PC-relative call or branch to a target within +/-128Mb. The
  91.   /// target must be 32-bit aligned.
  92.   ///
  93.   /// Fixup expression:
  94.   ///   Fixup <- (Target - Fixup + Addend) >> 2 : int26
  95.   ///
  96.   /// Notes:
  97.   ///   The '26' in the name refers to the number operand bits and follows the
  98.   /// naming convention used by the corresponding ELF and MachO relocations.
  99.   /// Since the low two bits must be zero (because of the 32-bit alignment of
  100.   /// the target) the operand is effectively a signed 28-bit number.
  101.   ///
  102.   ///
  103.   /// Errors:
  104.   ///   - The result of the unshifted part of the fixup expression must be
  105.   ///     32-bit aligned otherwise an alignment error will be returned.
  106.   ///   - The result of the fixup expression must fit into an int26 otherwise an
  107.   ///     out-of-range error will be returned.
  108.   Branch26PCRel,
  109.  
  110.   /// A 16-bit slice of the target address (which slice depends on the
  111.   /// instruction at the fixup location).
  112.   ///
  113.   /// Used to fix up MOVK/MOVN/MOVZ instructions.
  114.   ///
  115.   /// Fixup expression:
  116.   ///
  117.   ///   Fixup <- (Target + Addend) >> Shift : uint16
  118.   ///
  119.   ///   where Shift is encoded in the instruction at the fixup location.
  120.   ///
  121.   MoveWide16,
  122.  
  123.   /// The signed 21-bit delta from the fixup to the target.
  124.   ///
  125.   /// Typically used to load a pointers at a PC-relative offset of +/- 1Mb. The
  126.   /// target must be 32-bit aligned.
  127.   ///
  128.   /// Fixup expression:
  129.   ///
  130.   ///   Fixup <- (Target - Fixup) >> 2 : int19
  131.   ///
  132.   /// Errors:
  133.   ///   - The result of the unshifted part of the fixup expression must be
  134.   ///     32-bit aligned otherwise an alignment error will be returned.
  135.   ///   - The result of the fixup expression must fit into an an int19 or an
  136.   ///     out-of-range error will be returned.
  137.   LDRLiteral19,
  138.  
  139.   /// The signed 21-bit delta from the fixup page to the page containing the
  140.   /// target.
  141.   ///
  142.   /// Fixup expression:
  143.   ///
  144.   ///   Fixup <- (((Target + Addend) & ~0xfff) - (Fixup & ~0xfff)) >> 12 : int21
  145.   ///
  146.   /// Notes:
  147.   ///   For ADRP fixups.
  148.   ///
  149.   /// Errors:
  150.   ///   - The result of the fixup expression must fit into an int21 otherwise an
  151.   ///     out-of-range error will be returned.
  152.   Page21,
  153.  
  154.   /// The 12-bit (potentially shifted) offset of the target within its page.
  155.   ///
  156.   /// Typically used to fix up LDR immediates.
  157.   ///
  158.   /// Fixup expression:
  159.   ///
  160.   ///   Fixup <- ((Target + Addend) >> Shift) & 0xfff : uint12
  161.   ///
  162.   ///   where Shift is encoded in the size field of the instruction.
  163.   ///
  164.   /// Errors:
  165.   ///   - The result of the unshifted part of the fixup expression must be
  166.   ///     aligned otherwise an alignment error will be returned.
  167.   ///   - The result of the fixup expression must fit into a uint12 otherwise an
  168.   ///     out-of-range error will be returned.
  169.   PageOffset12,
  170.  
  171.   /// A GOT entry getter/constructor, transformed to Page21 pointing at the GOT
  172.   /// entry for the original target.
  173.   ///
  174.   /// Indicates that this edge should be transformed into a Page21 targeting
  175.   /// the GOT entry for the edge's current target, maintaining the same addend.
  176.   /// A GOT entry for the target should be created if one does not already
  177.   /// exist.
  178.   ///
  179.   /// Edges of this kind are usually handled by a GOT builder pass inserted by
  180.   /// default.
  181.   ///
  182.   /// Fixup expression:
  183.   ///   NONE
  184.   ///
  185.   /// Errors:
  186.   ///   - *ASSERTION* Failure to handle edges of this kind prior to the fixup
  187.   ///     phase will result in an assert/unreachable during the fixup phase.
  188.   ///
  189.   RequestGOTAndTransformToPage21,
  190.  
  191.   /// A GOT entry getter/constructor, transformed to Pageoffset12 pointing at
  192.   /// the GOT entry for the original target.
  193.   ///
  194.   /// Indicates that this edge should be transformed into a PageOffset12
  195.   /// targeting the GOT entry for the edge's current target, maintaining the
  196.   /// same addend. A GOT entry for the target should be created if one does not
  197.   /// already exist.
  198.   ///
  199.   /// Edges of this kind are usually handled by a GOT builder pass inserted by
  200.   /// default.
  201.   ///
  202.   /// Fixup expression:
  203.   ///   NONE
  204.   ///
  205.   /// Errors:
  206.   ///   - *ASSERTION* Failure to handle edges of this kind prior to the fixup
  207.   ///     phase will result in an assert/unreachable during the fixup phase.
  208.   ///
  209.   RequestGOTAndTransformToPageOffset12,
  210.  
  211.   /// A GOT entry getter/constructor, transformed to Delta32 pointing at the GOT
  212.   /// entry for the original target.
  213.   ///
  214.   /// Indicates that this edge should be transformed into a Delta32/ targeting
  215.   /// the GOT entry for the edge's current target, maintaining the same addend.
  216.   /// A GOT entry for the target should be created if one does not already
  217.   /// exist.
  218.   ///
  219.   /// Edges of this kind are usually handled by a GOT builder pass inserted by
  220.   /// default.
  221.   ///
  222.   /// Fixup expression:
  223.   ///   NONE
  224.   ///
  225.   /// Errors:
  226.   ///   - *ASSERTION* Failure to handle edges of this kind prior to the fixup
  227.   ///     phase will result in an assert/unreachable during the fixup phase.
  228.   ///
  229.   RequestGOTAndTransformToDelta32,
  230.  
  231.   /// A TLVP entry getter/constructor, transformed to Page21.
  232.   ///
  233.   /// Indicates that this edge should be transformed into a Page21 targeting the
  234.   /// TLVP entry for the edge's current target. A TLVP entry for the target
  235.   /// should be created if one does not already exist.
  236.   ///
  237.   /// Fixup expression:
  238.   ///   NONE
  239.   ///
  240.   /// Errors:
  241.   ///   - *ASSERTION* Failure to handle edges of this kind prior to the fixup
  242.   ///     phase will result in an assert/unreachable during the fixup phase.
  243.   ///
  244.   RequestTLVPAndTransformToPage21,
  245.  
  246.   /// A TLVP entry getter/constructor, transformed to PageOffset12.
  247.   ///
  248.   /// Indicates that this edge should be transformed into a PageOffset12
  249.   /// targeting the TLVP entry for the edge's current target. A TLVP entry for
  250.   /// the target should be created if one does not already exist.
  251.   ///
  252.   /// Fixup expression:
  253.   ///   NONE
  254.   ///
  255.   /// Errors:
  256.   ///   - *ASSERTION* Failure to handle edges of this kind prior to the fixup
  257.   ///     phase will result in an assert/unreachable during the fixup phase.
  258.   ///
  259.   RequestTLVPAndTransformToPageOffset12,
  260.  
  261.   /// A TLSDesc entry getter/constructor, transformed to Page21.
  262.   ///
  263.   /// Indicates that this edge should be transformed into a Page21 targeting the
  264.   /// TLSDesc entry for the edge's current target. A TLSDesc entry for the
  265.   /// target should be created if one does not already exist.
  266.   ///
  267.   /// Fixup expression:
  268.   ///   NONE
  269.   ///
  270.   /// Errors:
  271.   ///   - *ASSERTION* Failure to handle edges of this kind prior to the fixup
  272.   ///     phase will result in an assert/unreachable during the fixup phase.
  273.   ///
  274.   RequestTLSDescEntryAndTransformToPage21,
  275.  
  276.   /// A TLSDesc entry getter/constructor, transformed to PageOffset12.
  277.   ///
  278.   /// Indicates that this edge should be transformed into a PageOffset12
  279.   /// targeting the TLSDesc entry for the edge's current target. A TLSDesc entry
  280.   /// for the target should be created if one does not already exist.
  281.   ///
  282.   /// Fixup expression:
  283.   ///   NONE
  284.   ///
  285.   /// Errors:
  286.   ///   - *ASSERTION* Failure to handle edges of this kind prior to the fixup
  287.   ///     phase will result in an assert/unreachable during the fixup phase.
  288.   ///
  289.   RequestTLSDescEntryAndTransformToPageOffset12,
  290. };
  291.  
  292. /// Returns a string name for the given aarch64 edge. For debugging purposes
  293. /// only
  294. const char *getEdgeKindName(Edge::Kind K);
  295.  
  296. // Returns whether the Instr is LD/ST (imm12)
  297. inline bool isLoadStoreImm12(uint32_t Instr) {
  298.   constexpr uint32_t LoadStoreImm12Mask = 0x3b000000;
  299.   return (Instr & LoadStoreImm12Mask) == 0x39000000;
  300. }
  301.  
  302. // Returns the amount the address operand of LD/ST (imm12)
  303. // should be shifted right by.
  304. //
  305. // The shift value varies by the data size of LD/ST instruction.
  306. // For instance, LDH instructoin needs the address to be shifted
  307. // right by 1.
  308. inline unsigned getPageOffset12Shift(uint32_t Instr) {
  309.   constexpr uint32_t Vec128Mask = 0x04800000;
  310.  
  311.   if (isLoadStoreImm12(Instr)) {
  312.     uint32_t ImplicitShift = Instr >> 30;
  313.     if (ImplicitShift == 0)
  314.       if ((Instr & Vec128Mask) == Vec128Mask)
  315.         ImplicitShift = 4;
  316.  
  317.     return ImplicitShift;
  318.   }
  319.  
  320.   return 0;
  321. }
  322.  
  323. // Returns whether the Instr is MOVK/MOVZ (imm16) with a zero immediate field
  324. inline bool isMoveWideImm16(uint32_t Instr) {
  325.   constexpr uint32_t MoveWideImm16Mask = 0x5f9fffe0;
  326.   return (Instr & MoveWideImm16Mask) == 0x52800000;
  327. }
  328.  
  329. // Returns the amount the address operand of MOVK/MOVZ (imm16)
  330. // should be shifted right by.
  331. //
  332. // The shift value is specfied in the assembly as LSL #<shift>.
  333. inline unsigned getMoveWide16Shift(uint32_t Instr) {
  334.   if (isMoveWideImm16(Instr)) {
  335.     uint32_t ImplicitShift = (Instr >> 21) & 0b11;
  336.     return ImplicitShift << 4;
  337.   }
  338.  
  339.   return 0;
  340. }
  341.  
  342. /// Apply fixup expression for edge to block content.
  343. inline Error applyFixup(LinkGraph &G, Block &B, const Edge &E) {
  344.   using namespace support;
  345.  
  346.   char *BlockWorkingMem = B.getAlreadyMutableContent().data();
  347.   char *FixupPtr = BlockWorkingMem + E.getOffset();
  348.   orc::ExecutorAddr FixupAddress = B.getAddress() + E.getOffset();
  349.  
  350.   switch (E.getKind()) {
  351.   case Pointer64: {
  352.     uint64_t Value = E.getTarget().getAddress().getValue() + E.getAddend();
  353.     *(ulittle64_t *)FixupPtr = Value;
  354.     break;
  355.   }
  356.   case Pointer32: {
  357.     uint64_t Value = E.getTarget().getAddress().getValue() + E.getAddend();
  358.     if (Value > std::numeric_limits<uint32_t>::max())
  359.       return makeTargetOutOfRangeError(G, B, E);
  360.     *(ulittle32_t *)FixupPtr = Value;
  361.     break;
  362.   }
  363.   case Delta32:
  364.   case Delta64:
  365.   case NegDelta32:
  366.   case NegDelta64: {
  367.     int64_t Value;
  368.     if (E.getKind() == Delta32 || E.getKind() == Delta64)
  369.       Value = E.getTarget().getAddress() - FixupAddress + E.getAddend();
  370.     else
  371.       Value = FixupAddress - E.getTarget().getAddress() + E.getAddend();
  372.  
  373.     if (E.getKind() == Delta32 || E.getKind() == NegDelta32) {
  374.       if (Value < std::numeric_limits<int32_t>::min() ||
  375.           Value > std::numeric_limits<int32_t>::max())
  376.         return makeTargetOutOfRangeError(G, B, E);
  377.       *(little32_t *)FixupPtr = Value;
  378.     } else
  379.       *(little64_t *)FixupPtr = Value;
  380.     break;
  381.   }
  382.   case Branch26PCRel: {
  383.     assert((FixupAddress.getValue() & 0x3) == 0 &&
  384.            "Branch-inst is not 32-bit aligned");
  385.  
  386.     int64_t Value = E.getTarget().getAddress() - FixupAddress + E.getAddend();
  387.  
  388.     if (static_cast<uint64_t>(Value) & 0x3)
  389.       return make_error<JITLinkError>("BranchPCRel26 target is not 32-bit "
  390.                                       "aligned");
  391.  
  392.     if (Value < -(1 << 27) || Value > ((1 << 27) - 1))
  393.       return makeTargetOutOfRangeError(G, B, E);
  394.  
  395.     uint32_t RawInstr = *(little32_t *)FixupPtr;
  396.     assert((RawInstr & 0x7fffffff) == 0x14000000 &&
  397.            "RawInstr isn't a B or BR immediate instruction");
  398.     uint32_t Imm = (static_cast<uint32_t>(Value) & ((1 << 28) - 1)) >> 2;
  399.     uint32_t FixedInstr = RawInstr | Imm;
  400.     *(little32_t *)FixupPtr = FixedInstr;
  401.     break;
  402.   }
  403.   case MoveWide16: {
  404.     uint64_t TargetOffset =
  405.         (E.getTarget().getAddress() + E.getAddend()).getValue();
  406.  
  407.     uint32_t RawInstr = *(ulittle32_t *)FixupPtr;
  408.     assert(isMoveWideImm16(RawInstr) &&
  409.            "RawInstr isn't a MOVK/MOVZ instruction");
  410.  
  411.     unsigned ImmShift = getMoveWide16Shift(RawInstr);
  412.     uint32_t Imm = (TargetOffset >> ImmShift) & 0xffff;
  413.     uint32_t FixedInstr = RawInstr | (Imm << 5);
  414.     *(ulittle32_t *)FixupPtr = FixedInstr;
  415.     break;
  416.   }
  417.   case LDRLiteral19: {
  418.     assert((FixupAddress.getValue() & 0x3) == 0 && "LDR is not 32-bit aligned");
  419.     assert(E.getAddend() == 0 && "LDRLiteral19 with non-zero addend");
  420.     uint32_t RawInstr = *(ulittle32_t *)FixupPtr;
  421.     assert(RawInstr == 0x58000010 && "RawInstr isn't a 64-bit LDR literal");
  422.     int64_t Delta = E.getTarget().getAddress() - FixupAddress;
  423.     if (Delta & 0x3)
  424.       return make_error<JITLinkError>("LDR literal target is not 32-bit "
  425.                                       "aligned");
  426.     if (Delta < -(1 << 20) || Delta > ((1 << 20) - 1))
  427.       return makeTargetOutOfRangeError(G, B, E);
  428.  
  429.     uint32_t EncodedImm = ((static_cast<uint32_t>(Delta) >> 2) & 0x7ffff) << 5;
  430.     uint32_t FixedInstr = RawInstr | EncodedImm;
  431.     *(ulittle32_t *)FixupPtr = FixedInstr;
  432.     break;
  433.   }
  434.   case Page21: {
  435.     uint64_t TargetPage =
  436.         (E.getTarget().getAddress().getValue() + E.getAddend()) &
  437.         ~static_cast<uint64_t>(4096 - 1);
  438.     uint64_t PCPage =
  439.         FixupAddress.getValue() & ~static_cast<uint64_t>(4096 - 1);
  440.  
  441.     int64_t PageDelta = TargetPage - PCPage;
  442.     if (!isInt<33>(PageDelta))
  443.       return makeTargetOutOfRangeError(G, B, E);
  444.  
  445.     uint32_t RawInstr = *(ulittle32_t *)FixupPtr;
  446.     assert((RawInstr & 0xffffffe0) == 0x90000000 &&
  447.            "RawInstr isn't an ADRP instruction");
  448.     uint32_t ImmLo = (static_cast<uint64_t>(PageDelta) >> 12) & 0x3;
  449.     uint32_t ImmHi = (static_cast<uint64_t>(PageDelta) >> 14) & 0x7ffff;
  450.     uint32_t FixedInstr = RawInstr | (ImmLo << 29) | (ImmHi << 5);
  451.     *(ulittle32_t *)FixupPtr = FixedInstr;
  452.     break;
  453.   }
  454.   case PageOffset12: {
  455.     uint64_t TargetOffset =
  456.         (E.getTarget().getAddress() + E.getAddend()).getValue() & 0xfff;
  457.  
  458.     uint32_t RawInstr = *(ulittle32_t *)FixupPtr;
  459.     unsigned ImmShift = getPageOffset12Shift(RawInstr);
  460.  
  461.     if (TargetOffset & ((1 << ImmShift) - 1))
  462.       return make_error<JITLinkError>("PAGEOFF12 target is not aligned");
  463.  
  464.     uint32_t EncodedImm = (TargetOffset >> ImmShift) << 10;
  465.     uint32_t FixedInstr = RawInstr | EncodedImm;
  466.     *(ulittle32_t *)FixupPtr = FixedInstr;
  467.     break;
  468.   }
  469.   default:
  470.     return make_error<JITLinkError>(
  471.         "In graph " + G.getName() + ", section " + B.getSection().getName() +
  472.         " unsupported edge kind " + getEdgeKindName(E.getKind()));
  473.   }
  474.  
  475.   return Error::success();
  476. }
  477.  
  478. /// aarch64 pointer size.
  479. constexpr uint64_t PointerSize = 8;
  480.  
  481. /// AArch64 null pointer content.
  482. extern const char NullPointerContent[PointerSize];
  483.  
  484. /// AArch64 pointer jump stub content.
  485. ///
  486. /// Contains the instruction sequence for an indirect jump via an in-memory
  487. /// pointer:
  488. ///   ADRP x16, ptr@page21
  489. ///   LDR  x16, [x16, ptr@pageoff12]
  490. ///   BR   x16
  491. extern const char PointerJumpStubContent[12];
  492.  
  493. /// Creates a new pointer block in the given section and returns an
  494. /// Anonymous symobl pointing to it.
  495. ///
  496. /// If InitialTarget is given then an Pointer64 relocation will be added to the
  497. /// block pointing at InitialTarget.
  498. ///
  499. /// The pointer block will have the following default values:
  500. ///   alignment: 64-bit
  501. ///   alignment-offset: 0
  502. ///   address: highest allowable (~7U)
  503. inline Symbol &createAnonymousPointer(LinkGraph &G, Section &PointerSection,
  504.                                       Symbol *InitialTarget = nullptr,
  505.                                       uint64_t InitialAddend = 0) {
  506.   auto &B = G.createContentBlock(PointerSection, NullPointerContent,
  507.                                  orc::ExecutorAddr(~uint64_t(7)), 8, 0);
  508.   if (InitialTarget)
  509.     B.addEdge(Pointer64, 0, *InitialTarget, InitialAddend);
  510.   return G.addAnonymousSymbol(B, 0, 8, false, false);
  511. }
  512.  
  513. /// Create a jump stub block that jumps via the pointer at the given symbol.
  514. ///
  515. /// The stub block will have the following default values:
  516. ///   alignment: 32-bit
  517. ///   alignment-offset: 0
  518. ///   address: highest allowable: (~11U)
  519. inline Block &createPointerJumpStubBlock(LinkGraph &G, Section &StubSection,
  520.                                          Symbol &PointerSymbol) {
  521.   auto &B = G.createContentBlock(StubSection, PointerJumpStubContent,
  522.                                  orc::ExecutorAddr(~uint64_t(11)), 1, 0);
  523.   B.addEdge(Page21, 0, PointerSymbol, 0);
  524.   B.addEdge(PageOffset12, 4, PointerSymbol, 0);
  525.   return B;
  526. }
  527.  
  528. /// Create a jump stub that jumps via the pointer at the given symbol and
  529. /// an anonymous symbol pointing to it. Return the anonymous symbol.
  530. ///
  531. /// The stub block will be created by createPointerJumpStubBlock.
  532. inline Symbol &createAnonymousPointerJumpStub(LinkGraph &G,
  533.                                               Section &StubSection,
  534.                                               Symbol &PointerSymbol) {
  535.   return G.addAnonymousSymbol(
  536.       createPointerJumpStubBlock(G, StubSection, PointerSymbol), 0,
  537.       sizeof(PointerJumpStubContent), true, false);
  538. }
  539.  
  540. /// Global Offset Table Builder.
  541. class GOTTableManager : public TableManager<GOTTableManager> {
  542. public:
  543.   static StringRef getSectionName() { return "$__GOT"; }
  544.  
  545.   bool visitEdge(LinkGraph &G, Block *B, Edge &E) {
  546.     Edge::Kind KindToSet = Edge::Invalid;
  547.     const char *BlockWorkingMem = B->getContent().data();
  548.     const char *FixupPtr = BlockWorkingMem + E.getOffset();
  549.  
  550.     switch (E.getKind()) {
  551.     case aarch64::RequestGOTAndTransformToPage21:
  552.     case aarch64::RequestTLVPAndTransformToPage21: {
  553.       KindToSet = aarch64::Page21;
  554.       break;
  555.     }
  556.     case aarch64::RequestGOTAndTransformToPageOffset12:
  557.     case aarch64::RequestTLVPAndTransformToPageOffset12: {
  558.       KindToSet = aarch64::PageOffset12;
  559.       uint32_t RawInstr = *(const support::ulittle32_t *)FixupPtr;
  560.       (void)RawInstr;
  561.       assert(E.getAddend() == 0 &&
  562.              "GOTPageOffset12/TLVPageOffset12 with non-zero addend");
  563.       assert((RawInstr & 0xfffffc00) == 0xf9400000 &&
  564.              "RawInstr isn't a 64-bit LDR immediate");
  565.       break;
  566.     }
  567.     case aarch64::RequestGOTAndTransformToDelta32: {
  568.       KindToSet = aarch64::Delta32;
  569.       break;
  570.     }
  571.     default:
  572.       return false;
  573.     }
  574.     assert(KindToSet != Edge::Invalid &&
  575.            "Fell through switch, but no new kind to set");
  576.     DEBUG_WITH_TYPE("jitlink", {
  577.       dbgs() << "  Fixing " << G.getEdgeKindName(E.getKind()) << " edge at "
  578.              << B->getFixupAddress(E) << " (" << B->getAddress() << " + "
  579.              << formatv("{0:x}", E.getOffset()) << ")\n";
  580.     });
  581.     E.setKind(KindToSet);
  582.     E.setTarget(getEntryForTarget(G, E.getTarget()));
  583.     return true;
  584.   }
  585.  
  586.   Symbol &createEntry(LinkGraph &G, Symbol &Target) {
  587.     return createAnonymousPointer(G, getGOTSection(G), &Target);
  588.   }
  589.  
  590. private:
  591.   Section &getGOTSection(LinkGraph &G) {
  592.     if (!GOTSection)
  593.       GOTSection = &G.createSection(getSectionName(),
  594.                                     orc::MemProt::Read | orc::MemProt::Exec);
  595.     return *GOTSection;
  596.   }
  597.  
  598.   Section *GOTSection = nullptr;
  599. };
  600.  
  601. /// Procedure Linkage Table Builder.
  602. class PLTTableManager : public TableManager<PLTTableManager> {
  603. public:
  604.   PLTTableManager(GOTTableManager &GOT) : GOT(GOT) {}
  605.  
  606.   static StringRef getSectionName() { return "$__STUBS"; }
  607.  
  608.   bool visitEdge(LinkGraph &G, Block *B, Edge &E) {
  609.     if (E.getKind() == aarch64::Branch26PCRel && !E.getTarget().isDefined()) {
  610.       DEBUG_WITH_TYPE("jitlink", {
  611.         dbgs() << "  Fixing " << G.getEdgeKindName(E.getKind()) << " edge at "
  612.                << B->getFixupAddress(E) << " (" << B->getAddress() << " + "
  613.                << formatv("{0:x}", E.getOffset()) << ")\n";
  614.       });
  615.       E.setTarget(getEntryForTarget(G, E.getTarget()));
  616.       return true;
  617.     }
  618.     return false;
  619.   }
  620.  
  621.   Symbol &createEntry(LinkGraph &G, Symbol &Target) {
  622.     return createAnonymousPointerJumpStub(G, getStubsSection(G),
  623.                                           GOT.getEntryForTarget(G, Target));
  624.   }
  625.  
  626. public:
  627.   Section &getStubsSection(LinkGraph &G) {
  628.     if (!StubsSection)
  629.       StubsSection = &G.createSection(getSectionName(),
  630.                                       orc::MemProt::Read | orc::MemProt::Exec);
  631.     return *StubsSection;
  632.   }
  633.  
  634.   GOTTableManager &GOT;
  635.   Section *StubsSection = nullptr;
  636. };
  637.  
  638. } // namespace aarch64
  639. } // namespace jitlink
  640. } // namespace llvm
  641.  
  642. #endif // LLVM_EXECUTIONENGINE_JITLINK_AARCH64_H
  643.