Subversion Repositories QNX 8.QNX8 LLVM/Clang compiler suite

Rev

Blame | Last modification | View Log | Download | RSS feed

  1. //===- llvm/CodeGen/GlobalISel/CallLowering.h - Call lowering ---*- C++ -*-===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. ///
  9. /// \file
  10. /// This file describes how to lower LLVM calls to machine code calls.
  11. ///
  12. //===----------------------------------------------------------------------===//
  13.  
  14. #ifndef LLVM_CODEGEN_GLOBALISEL_CALLLOWERING_H
  15. #define LLVM_CODEGEN_GLOBALISEL_CALLLOWERING_H
  16.  
  17. #include "llvm/ADT/ArrayRef.h"
  18. #include "llvm/ADT/SmallVector.h"
  19. #include "llvm/CodeGen/CallingConvLower.h"
  20. #include "llvm/CodeGen/MachineOperand.h"
  21. #include "llvm/CodeGen/TargetCallingConv.h"
  22. #include "llvm/IR/CallingConv.h"
  23. #include "llvm/IR/Type.h"
  24. #include "llvm/IR/Value.h"
  25. #include "llvm/Support/ErrorHandling.h"
  26. #include "llvm/Support/LowLevelTypeImpl.h"
  27. #include "llvm/Support/MachineValueType.h"
  28. #include <cstdint>
  29. #include <functional>
  30.  
  31. namespace llvm {
  32.  
  33. class AttributeList;
  34. class CallBase;
  35. class DataLayout;
  36. class Function;
  37. class FunctionLoweringInfo;
  38. class MachineIRBuilder;
  39. class MachineFunction;
  40. struct MachinePointerInfo;
  41. class MachineRegisterInfo;
  42. class TargetLowering;
  43.  
  44. class CallLowering {
  45.   const TargetLowering *TLI;
  46.  
  47.   virtual void anchor();
  48. public:
  49.   struct BaseArgInfo {
  50.     Type *Ty;
  51.     SmallVector<ISD::ArgFlagsTy, 4> Flags;
  52.     bool IsFixed;
  53.  
  54.     BaseArgInfo(Type *Ty,
  55.                 ArrayRef<ISD::ArgFlagsTy> Flags = ArrayRef<ISD::ArgFlagsTy>(),
  56.                 bool IsFixed = true)
  57.         : Ty(Ty), Flags(Flags.begin(), Flags.end()), IsFixed(IsFixed) {}
  58.  
  59.     BaseArgInfo() : Ty(nullptr), IsFixed(false) {}
  60.   };
  61.  
  62.   struct ArgInfo : public BaseArgInfo {
  63.     SmallVector<Register, 4> Regs;
  64.     // If the argument had to be split into multiple parts according to the
  65.     // target calling convention, then this contains the original vregs
  66.     // if the argument was an incoming arg.
  67.     SmallVector<Register, 2> OrigRegs;
  68.  
  69.     /// Optionally track the original IR value for the argument. This may not be
  70.     /// meaningful in all contexts. This should only be used on for forwarding
  71.     /// through to use for aliasing information in MachinePointerInfo for memory
  72.     /// arguments.
  73.     const Value *OrigValue = nullptr;
  74.  
  75.     /// Index original Function's argument.
  76.     unsigned OrigArgIndex;
  77.  
  78.     /// Sentinel value for implicit machine-level input arguments.
  79.     static const unsigned NoArgIndex = UINT_MAX;
  80.  
  81.     ArgInfo(ArrayRef<Register> Regs, Type *Ty, unsigned OrigIndex,
  82.             ArrayRef<ISD::ArgFlagsTy> Flags = ArrayRef<ISD::ArgFlagsTy>(),
  83.             bool IsFixed = true, const Value *OrigValue = nullptr)
  84.         : BaseArgInfo(Ty, Flags, IsFixed), Regs(Regs.begin(), Regs.end()),
  85.           OrigValue(OrigValue), OrigArgIndex(OrigIndex) {
  86.       if (!Regs.empty() && Flags.empty())
  87.         this->Flags.push_back(ISD::ArgFlagsTy());
  88.       // FIXME: We should have just one way of saying "no register".
  89.       assert(((Ty->isVoidTy() || Ty->isEmptyTy()) ==
  90.               (Regs.empty() || Regs[0] == 0)) &&
  91.              "only void types should have no register");
  92.     }
  93.  
  94.     ArgInfo(ArrayRef<Register> Regs, const Value &OrigValue, unsigned OrigIndex,
  95.             ArrayRef<ISD::ArgFlagsTy> Flags = ArrayRef<ISD::ArgFlagsTy>(),
  96.             bool IsFixed = true)
  97.       : ArgInfo(Regs, OrigValue.getType(), OrigIndex, Flags, IsFixed, &OrigValue) {}
  98.  
  99.     ArgInfo() = default;
  100.   };
  101.  
  102.   struct CallLoweringInfo {
  103.     /// Calling convention to be used for the call.
  104.     CallingConv::ID CallConv = CallingConv::C;
  105.  
  106.     /// Destination of the call. It should be either a register, globaladdress,
  107.     /// or externalsymbol.
  108.     MachineOperand Callee = MachineOperand::CreateImm(0);
  109.  
  110.     /// Descriptor for the return type of the function.
  111.     ArgInfo OrigRet;
  112.  
  113.     /// List of descriptors of the arguments passed to the function.
  114.     SmallVector<ArgInfo, 32> OrigArgs;
  115.  
  116.     /// Valid if the call has a swifterror inout parameter, and contains the
  117.     /// vreg that the swifterror should be copied into after the call.
  118.     Register SwiftErrorVReg;
  119.  
  120.     /// Original IR callsite corresponding to this call, if available.
  121.     const CallBase *CB = nullptr;
  122.  
  123.     MDNode *KnownCallees = nullptr;
  124.  
  125.     /// True if the call must be tail call optimized.
  126.     bool IsMustTailCall = false;
  127.  
  128.     /// True if the call passes all target-independent checks for tail call
  129.     /// optimization.
  130.     bool IsTailCall = false;
  131.  
  132.     /// True if the call was lowered as a tail call. This is consumed by the
  133.     /// legalizer. This allows the legalizer to lower libcalls as tail calls.
  134.     bool LoweredTailCall = false;
  135.  
  136.     /// True if the call is to a vararg function.
  137.     bool IsVarArg = false;
  138.  
  139.     /// True if the function's return value can be lowered to registers.
  140.     bool CanLowerReturn = true;
  141.  
  142.     /// VReg to hold the hidden sret parameter.
  143.     Register DemoteRegister;
  144.  
  145.     /// The stack index for sret demotion.
  146.     int DemoteStackIndex;
  147.  
  148.     /// Expected type identifier for indirect calls with a CFI check.
  149.     const ConstantInt *CFIType = nullptr;
  150.   };
  151.  
  152.   /// Argument handling is mostly uniform between the four places that
  153.   /// make these decisions: function formal arguments, call
  154.   /// instruction args, call instruction returns and function
  155.   /// returns. However, once a decision has been made on where an
  156.   /// argument should go, exactly what happens can vary slightly. This
  157.   /// class abstracts the differences.
  158.   ///
  159.   /// ValueAssigner should not depend on any specific function state, and
  160.   /// only determine the types and locations for arguments.
  161.   struct ValueAssigner {
  162.     ValueAssigner(bool IsIncoming, CCAssignFn *AssignFn_,
  163.                   CCAssignFn *AssignFnVarArg_ = nullptr)
  164.         : AssignFn(AssignFn_), AssignFnVarArg(AssignFnVarArg_),
  165.           IsIncomingArgumentHandler(IsIncoming) {
  166.  
  167.       // Some targets change the handler depending on whether the call is
  168.       // varargs or not. If
  169.       if (!AssignFnVarArg)
  170.         AssignFnVarArg = AssignFn;
  171.     }
  172.  
  173.     virtual ~ValueAssigner() = default;
  174.  
  175.     /// Returns true if the handler is dealing with incoming arguments,
  176.     /// i.e. those that move values from some physical location to vregs.
  177.     bool isIncomingArgumentHandler() const {
  178.       return IsIncomingArgumentHandler;
  179.     }
  180.  
  181.     /// Wrap call to (typically tablegenerated CCAssignFn). This may be
  182.     /// overridden to track additional state information as arguments are
  183.     /// assigned or apply target specific hacks around the legacy
  184.     /// infrastructure.
  185.     virtual bool assignArg(unsigned ValNo, EVT OrigVT, MVT ValVT, MVT LocVT,
  186.                            CCValAssign::LocInfo LocInfo, const ArgInfo &Info,
  187.                            ISD::ArgFlagsTy Flags, CCState &State) {
  188.       if (getAssignFn(State.isVarArg())(ValNo, ValVT, LocVT, LocInfo, Flags,
  189.                                         State))
  190.         return true;
  191.       StackOffset = State.getNextStackOffset();
  192.       return false;
  193.     }
  194.  
  195.     /// Assignment function to use for a general call.
  196.     CCAssignFn *AssignFn;
  197.  
  198.     /// Assignment function to use for a variadic call. This is usually the same
  199.     /// as AssignFn on most targets.
  200.     CCAssignFn *AssignFnVarArg;
  201.  
  202.     /// Stack offset for next argument. At the end of argument evaluation, this
  203.     /// is typically the total stack size.
  204.     uint64_t StackOffset = 0;
  205.  
  206.     /// Select the appropriate assignment function depending on whether this is
  207.     /// a variadic call.
  208.     CCAssignFn *getAssignFn(bool IsVarArg) const {
  209.       return IsVarArg ? AssignFnVarArg : AssignFn;
  210.     }
  211.  
  212.   private:
  213.     const bool IsIncomingArgumentHandler;
  214.     virtual void anchor();
  215.   };
  216.  
  217.   struct IncomingValueAssigner : public ValueAssigner {
  218.     IncomingValueAssigner(CCAssignFn *AssignFn_,
  219.                           CCAssignFn *AssignFnVarArg_ = nullptr)
  220.         : ValueAssigner(true, AssignFn_, AssignFnVarArg_) {}
  221.   };
  222.  
  223.   struct OutgoingValueAssigner : public ValueAssigner {
  224.     OutgoingValueAssigner(CCAssignFn *AssignFn_,
  225.                           CCAssignFn *AssignFnVarArg_ = nullptr)
  226.         : ValueAssigner(false, AssignFn_, AssignFnVarArg_) {}
  227.   };
  228.  
  229.   struct ValueHandler {
  230.     MachineIRBuilder &MIRBuilder;
  231.     MachineRegisterInfo &MRI;
  232.     const bool IsIncomingArgumentHandler;
  233.  
  234.     ValueHandler(bool IsIncoming, MachineIRBuilder &MIRBuilder,
  235.                  MachineRegisterInfo &MRI)
  236.         : MIRBuilder(MIRBuilder), MRI(MRI),
  237.           IsIncomingArgumentHandler(IsIncoming) {}
  238.  
  239.     virtual ~ValueHandler() = default;
  240.  
  241.     /// Returns true if the handler is dealing with incoming arguments,
  242.     /// i.e. those that move values from some physical location to vregs.
  243.     bool isIncomingArgumentHandler() const {
  244.       return IsIncomingArgumentHandler;
  245.     }
  246.  
  247.     /// Materialize a VReg containing the address of the specified
  248.     /// stack-based object. This is either based on a FrameIndex or
  249.     /// direct SP manipulation, depending on the context. \p MPO
  250.     /// should be initialized to an appropriate description of the
  251.     /// address created.
  252.     virtual Register getStackAddress(uint64_t MemSize, int64_t Offset,
  253.                                      MachinePointerInfo &MPO,
  254.                                      ISD::ArgFlagsTy Flags) = 0;
  255.  
  256.     /// Return the in-memory size to write for the argument at \p VA. This may
  257.     /// be smaller than the allocated stack slot size.
  258.     ///
  259.     /// This is overridable primarily for targets to maintain compatibility with
  260.     /// hacks around the existing DAG call lowering infrastructure.
  261.     virtual LLT getStackValueStoreType(const DataLayout &DL,
  262.                                        const CCValAssign &VA,
  263.                                        ISD::ArgFlagsTy Flags) const;
  264.  
  265.     /// The specified value has been assigned to a physical register,
  266.     /// handle the appropriate COPY (either to or from) and mark any
  267.     /// relevant uses/defines as needed.
  268.     virtual void assignValueToReg(Register ValVReg, Register PhysReg,
  269.                                   CCValAssign VA) = 0;
  270.  
  271.     /// The specified value has been assigned to a stack
  272.     /// location. Load or store it there, with appropriate extension
  273.     /// if necessary.
  274.     virtual void assignValueToAddress(Register ValVReg, Register Addr,
  275.                                       LLT MemTy, MachinePointerInfo &MPO,
  276.                                       CCValAssign &VA) = 0;
  277.  
  278.     /// An overload which takes an ArgInfo if additional information about the
  279.     /// arg is needed. \p ValRegIndex is the index in \p Arg.Regs for the value
  280.     /// to store.
  281.     virtual void assignValueToAddress(const ArgInfo &Arg, unsigned ValRegIndex,
  282.                                       Register Addr, LLT MemTy,
  283.                                       MachinePointerInfo &MPO,
  284.                                       CCValAssign &VA) {
  285.       assignValueToAddress(Arg.Regs[ValRegIndex], Addr, MemTy, MPO, VA);
  286.     }
  287.  
  288.     /// Handle custom values, which may be passed into one or more of \p VAs.
  289.     /// \p If the handler wants the assignments to be delayed until after
  290.     /// mem loc assignments, then it sets \p Thunk to the thunk to do the
  291.     /// assignment.
  292.     /// \return The number of \p VAs that have been assigned after the first
  293.     ///         one, and which should therefore be skipped from further
  294.     ///         processing.
  295.     virtual unsigned assignCustomValue(ArgInfo &Arg, ArrayRef<CCValAssign> VAs,
  296.                                        std::function<void()> *Thunk = nullptr) {
  297.       // This is not a pure virtual method because not all targets need to worry
  298.       // about custom values.
  299.       llvm_unreachable("Custom values not supported");
  300.     }
  301.  
  302.     /// Do a memory copy of \p MemSize bytes from \p SrcPtr to \p DstPtr. This
  303.     /// is necessary for outgoing stack-passed byval arguments.
  304.     void
  305.     copyArgumentMemory(const ArgInfo &Arg, Register DstPtr, Register SrcPtr,
  306.                        const MachinePointerInfo &DstPtrInfo, Align DstAlign,
  307.                        const MachinePointerInfo &SrcPtrInfo, Align SrcAlign,
  308.                        uint64_t MemSize, CCValAssign &VA) const;
  309.  
  310.     /// Extend a register to the location type given in VA, capped at extending
  311.     /// to at most MaxSize bits. If MaxSizeBits is 0 then no maximum is set.
  312.     Register extendRegister(Register ValReg, CCValAssign &VA,
  313.                             unsigned MaxSizeBits = 0);
  314.   };
  315.  
  316.   /// Base class for ValueHandlers used for arguments coming into the current
  317.   /// function, or for return values received from a call.
  318.   struct IncomingValueHandler : public ValueHandler {
  319.     IncomingValueHandler(MachineIRBuilder &MIRBuilder, MachineRegisterInfo &MRI)
  320.         : ValueHandler(/*IsIncoming*/ true, MIRBuilder, MRI) {}
  321.  
  322.     /// Insert G_ASSERT_ZEXT/G_ASSERT_SEXT or other hint instruction based on \p
  323.     /// VA, returning the new register if a hint was inserted.
  324.     Register buildExtensionHint(CCValAssign &VA, Register SrcReg, LLT NarrowTy);
  325.  
  326.     /// Provides a default implementation for argument handling.
  327.     void assignValueToReg(Register ValVReg, Register PhysReg,
  328.                           CCValAssign VA) override;
  329.   };
  330.  
  331.   /// Base class for ValueHandlers used for arguments passed to a function call,
  332.   /// or for return values.
  333.   struct OutgoingValueHandler : public ValueHandler {
  334.     OutgoingValueHandler(MachineIRBuilder &MIRBuilder, MachineRegisterInfo &MRI)
  335.         : ValueHandler(/*IsIncoming*/ false, MIRBuilder, MRI) {}
  336.   };
  337.  
  338. protected:
  339.   /// Getter for generic TargetLowering class.
  340.   const TargetLowering *getTLI() const {
  341.     return TLI;
  342.   }
  343.  
  344.   /// Getter for target specific TargetLowering class.
  345.   template <class XXXTargetLowering>
  346.     const XXXTargetLowering *getTLI() const {
  347.     return static_cast<const XXXTargetLowering *>(TLI);
  348.   }
  349.  
  350.   /// \returns Flags corresponding to the attributes on the \p ArgIdx-th
  351.   /// parameter of \p Call.
  352.   ISD::ArgFlagsTy getAttributesForArgIdx(const CallBase &Call,
  353.                                          unsigned ArgIdx) const;
  354.  
  355.   /// \returns Flags corresponding to the attributes on the return from \p Call.
  356.   ISD::ArgFlagsTy getAttributesForReturn(const CallBase &Call) const;
  357.  
  358.   /// Adds flags to \p Flags based off of the attributes in \p Attrs.
  359.   /// \p OpIdx is the index in \p Attrs to add flags from.
  360.   void addArgFlagsFromAttributes(ISD::ArgFlagsTy &Flags,
  361.                                  const AttributeList &Attrs,
  362.                                  unsigned OpIdx) const;
  363.  
  364.   template <typename FuncInfoTy>
  365.   void setArgFlags(ArgInfo &Arg, unsigned OpIdx, const DataLayout &DL,
  366.                    const FuncInfoTy &FuncInfo) const;
  367.  
  368.   /// Break \p OrigArgInfo into one or more pieces the calling convention can
  369.   /// process, returned in \p SplitArgs. For example, this should break structs
  370.   /// down into individual fields.
  371.   ///
  372.   /// If \p Offsets is non-null, it points to a vector to be filled in
  373.   /// with the in-memory offsets of each of the individual values.
  374.   void splitToValueTypes(const ArgInfo &OrigArgInfo,
  375.                          SmallVectorImpl<ArgInfo> &SplitArgs,
  376.                          const DataLayout &DL, CallingConv::ID CallConv,
  377.                          SmallVectorImpl<uint64_t> *Offsets = nullptr) const;
  378.  
  379.   /// Analyze the argument list in \p Args, using \p Assigner to populate \p
  380.   /// CCInfo. This will determine the types and locations to use for passed or
  381.   /// returned values. This may resize fields in \p Args if the value is split
  382.   /// across multiple registers or stack slots.
  383.   ///
  384.   /// This is independent of the function state and can be used
  385.   /// to determine how a call would pass arguments without needing to change the
  386.   /// function. This can be used to check if arguments are suitable for tail
  387.   /// call lowering.
  388.   ///
  389.   /// \return True if everything has succeeded, false otherwise.
  390.   bool determineAssignments(ValueAssigner &Assigner,
  391.                             SmallVectorImpl<ArgInfo> &Args,
  392.                             CCState &CCInfo) const;
  393.  
  394.   /// Invoke ValueAssigner::assignArg on each of the given \p Args and then use
  395.   /// \p Handler to move them to the assigned locations.
  396.   ///
  397.   /// \return True if everything has succeeded, false otherwise.
  398.   bool determineAndHandleAssignments(
  399.       ValueHandler &Handler, ValueAssigner &Assigner,
  400.       SmallVectorImpl<ArgInfo> &Args, MachineIRBuilder &MIRBuilder,
  401.       CallingConv::ID CallConv, bool IsVarArg,
  402.       ArrayRef<Register> ThisReturnRegs = std::nullopt) const;
  403.  
  404.   /// Use \p Handler to insert code to handle the argument/return values
  405.   /// represented by \p Args. It's expected determineAssignments previously
  406.   /// processed these arguments to populate \p CCState and \p ArgLocs.
  407.   bool
  408.   handleAssignments(ValueHandler &Handler, SmallVectorImpl<ArgInfo> &Args,
  409.                     CCState &CCState, SmallVectorImpl<CCValAssign> &ArgLocs,
  410.                     MachineIRBuilder &MIRBuilder,
  411.                     ArrayRef<Register> ThisReturnRegs = std::nullopt) const;
  412.  
  413.   /// Check whether parameters to a call that are passed in callee saved
  414.   /// registers are the same as from the calling function.  This needs to be
  415.   /// checked for tail call eligibility.
  416.   bool parametersInCSRMatch(const MachineRegisterInfo &MRI,
  417.                             const uint32_t *CallerPreservedMask,
  418.                             const SmallVectorImpl<CCValAssign> &ArgLocs,
  419.                             const SmallVectorImpl<ArgInfo> &OutVals) const;
  420.  
  421.   /// \returns True if the calling convention for a callee and its caller pass
  422.   /// results in the same way. Typically used for tail call eligibility checks.
  423.   ///
  424.   /// \p Info is the CallLoweringInfo for the call.
  425.   /// \p MF is the MachineFunction for the caller.
  426.   /// \p InArgs contains the results of the call.
  427.   /// \p CalleeAssigner specifies the target's handling of the argument types
  428.   /// for the callee.
  429.   /// \p CallerAssigner specifies the target's handling of the
  430.   /// argument types for the caller.
  431.   bool resultsCompatible(CallLoweringInfo &Info, MachineFunction &MF,
  432.                          SmallVectorImpl<ArgInfo> &InArgs,
  433.                          ValueAssigner &CalleeAssigner,
  434.                          ValueAssigner &CallerAssigner) const;
  435.  
  436. public:
  437.   CallLowering(const TargetLowering *TLI) : TLI(TLI) {}
  438.   virtual ~CallLowering() = default;
  439.  
  440.   /// \return true if the target is capable of handling swifterror values that
  441.   /// have been promoted to a specified register. The extended versions of
  442.   /// lowerReturn and lowerCall should be implemented.
  443.   virtual bool supportSwiftError() const {
  444.     return false;
  445.   }
  446.  
  447.   /// Load the returned value from the stack into virtual registers in \p VRegs.
  448.   /// It uses the frame index \p FI and the start offset from \p DemoteReg.
  449.   /// The loaded data size will be determined from \p RetTy.
  450.   void insertSRetLoads(MachineIRBuilder &MIRBuilder, Type *RetTy,
  451.                        ArrayRef<Register> VRegs, Register DemoteReg,
  452.                        int FI) const;
  453.  
  454.   /// Store the return value given by \p VRegs into stack starting at the offset
  455.   /// specified in \p DemoteReg.
  456.   void insertSRetStores(MachineIRBuilder &MIRBuilder, Type *RetTy,
  457.                         ArrayRef<Register> VRegs, Register DemoteReg) const;
  458.  
  459.   /// Insert the hidden sret ArgInfo to the beginning of \p SplitArgs.
  460.   /// This function should be called from the target specific
  461.   /// lowerFormalArguments when \p F requires the sret demotion.
  462.   void insertSRetIncomingArgument(const Function &F,
  463.                                   SmallVectorImpl<ArgInfo> &SplitArgs,
  464.                                   Register &DemoteReg, MachineRegisterInfo &MRI,
  465.                                   const DataLayout &DL) const;
  466.  
  467.   /// For the call-base described by \p CB, insert the hidden sret ArgInfo to
  468.   /// the OrigArgs field of \p Info.
  469.   void insertSRetOutgoingArgument(MachineIRBuilder &MIRBuilder,
  470.                                   const CallBase &CB,
  471.                                   CallLoweringInfo &Info) const;
  472.  
  473.   /// \return True if the return type described by \p Outs can be returned
  474.   /// without performing sret demotion.
  475.   bool checkReturn(CCState &CCInfo, SmallVectorImpl<BaseArgInfo> &Outs,
  476.                    CCAssignFn *Fn) const;
  477.  
  478.   /// Get the type and the ArgFlags for the split components of \p RetTy as
  479.   /// returned by \c ComputeValueVTs.
  480.   void getReturnInfo(CallingConv::ID CallConv, Type *RetTy, AttributeList Attrs,
  481.                      SmallVectorImpl<BaseArgInfo> &Outs,
  482.                      const DataLayout &DL) const;
  483.  
  484.   /// Toplevel function to check the return type based on the target calling
  485.   /// convention. \return True if the return value of \p MF can be returned
  486.   /// without performing sret demotion.
  487.   bool checkReturnTypeForCallConv(MachineFunction &MF) const;
  488.  
  489.   /// This hook must be implemented to check whether the return values
  490.   /// described by \p Outs can fit into the return registers. If false
  491.   /// is returned, an sret-demotion is performed.
  492.   virtual bool canLowerReturn(MachineFunction &MF, CallingConv::ID CallConv,
  493.                               SmallVectorImpl<BaseArgInfo> &Outs,
  494.                               bool IsVarArg) const {
  495.     return true;
  496.   }
  497.  
  498.   /// This hook must be implemented to lower outgoing return values, described
  499.   /// by \p Val, into the specified virtual registers \p VRegs.
  500.   /// This hook is used by GlobalISel.
  501.   ///
  502.   /// \p FLI is required for sret demotion.
  503.   ///
  504.   /// \p SwiftErrorVReg is non-zero if the function has a swifterror parameter
  505.   /// that needs to be implicitly returned.
  506.   ///
  507.   /// \return True if the lowering succeeds, false otherwise.
  508.   virtual bool lowerReturn(MachineIRBuilder &MIRBuilder, const Value *Val,
  509.                            ArrayRef<Register> VRegs, FunctionLoweringInfo &FLI,
  510.                            Register SwiftErrorVReg) const {
  511.     if (!supportSwiftError()) {
  512.       assert(SwiftErrorVReg == 0 && "attempt to use unsupported swifterror");
  513.       return lowerReturn(MIRBuilder, Val, VRegs, FLI);
  514.     }
  515.     return false;
  516.   }
  517.  
  518.   /// This hook behaves as the extended lowerReturn function, but for targets
  519.   /// that do not support swifterror value promotion.
  520.   virtual bool lowerReturn(MachineIRBuilder &MIRBuilder, const Value *Val,
  521.                            ArrayRef<Register> VRegs,
  522.                            FunctionLoweringInfo &FLI) const {
  523.     return false;
  524.   }
  525.  
  526.   virtual bool fallBackToDAGISel(const MachineFunction &MF) const {
  527.     return false;
  528.   }
  529.  
  530.   /// This hook must be implemented to lower the incoming (formal)
  531.   /// arguments, described by \p VRegs, for GlobalISel. Each argument
  532.   /// must end up in the related virtual registers described by \p VRegs.
  533.   /// In other words, the first argument should end up in \c VRegs[0],
  534.   /// the second in \c VRegs[1], and so on. For each argument, there will be one
  535.   /// register for each non-aggregate type, as returned by \c computeValueLLTs.
  536.   /// \p MIRBuilder is set to the proper insertion for the argument
  537.   /// lowering. \p FLI is required for sret demotion.
  538.   ///
  539.   /// \return True if the lowering succeeded, false otherwise.
  540.   virtual bool lowerFormalArguments(MachineIRBuilder &MIRBuilder,
  541.                                     const Function &F,
  542.                                     ArrayRef<ArrayRef<Register>> VRegs,
  543.                                     FunctionLoweringInfo &FLI) const {
  544.     return false;
  545.   }
  546.  
  547.   /// This hook must be implemented to lower the given call instruction,
  548.   /// including argument and return value marshalling.
  549.   ///
  550.   ///
  551.   /// \return true if the lowering succeeded, false otherwise.
  552.   virtual bool lowerCall(MachineIRBuilder &MIRBuilder,
  553.                          CallLoweringInfo &Info) const {
  554.     return false;
  555.   }
  556.  
  557.   /// Lower the given call instruction, including argument and return value
  558.   /// marshalling.
  559.   ///
  560.   /// \p CI is the call/invoke instruction.
  561.   ///
  562.   /// \p ResRegs are the registers where the call's return value should be
  563.   /// stored (or 0 if there is no return value). There will be one register for
  564.   /// each non-aggregate type, as returned by \c computeValueLLTs.
  565.   ///
  566.   /// \p ArgRegs is a list of lists of virtual registers containing each
  567.   /// argument that needs to be passed (argument \c i should be placed in \c
  568.   /// ArgRegs[i]). For each argument, there will be one register for each
  569.   /// non-aggregate type, as returned by \c computeValueLLTs.
  570.   ///
  571.   /// \p SwiftErrorVReg is non-zero if the call has a swifterror inout
  572.   /// parameter, and contains the vreg that the swifterror should be copied into
  573.   /// after the call.
  574.   ///
  575.   /// \p GetCalleeReg is a callback to materialize a register for the callee if
  576.   /// the target determines it cannot jump to the destination based purely on \p
  577.   /// CI. This might be because \p CI is indirect, or because of the limited
  578.   /// range of an immediate jump.
  579.   ///
  580.   /// \return true if the lowering succeeded, false otherwise.
  581.   bool lowerCall(MachineIRBuilder &MIRBuilder, const CallBase &Call,
  582.                  ArrayRef<Register> ResRegs,
  583.                  ArrayRef<ArrayRef<Register>> ArgRegs, Register SwiftErrorVReg,
  584.                  std::function<unsigned()> GetCalleeReg) const;
  585.  
  586.   /// For targets which want to use big-endian can enable it with
  587.   /// enableBigEndian() hook
  588.   virtual bool enableBigEndian() const { return false; }
  589.  
  590.   /// For targets which support the "returned" parameter attribute, returns
  591.   /// true if the given type is a valid one to use with "returned".
  592.   virtual bool isTypeIsValidForThisReturn(EVT Ty) const { return false; }
  593. };
  594.  
  595. } // end namespace llvm
  596.  
  597. #endif // LLVM_CODEGEN_GLOBALISEL_CALLLOWERING_H
  598.