Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 1 | //===- llvm/CodeGen/GlobalISel/CallLowering.h - Call lowering ---*- C++ -*-===// |
| 2 | // |
Andrew Walbran | 16937d0 | 2019-10-22 13:54:20 +0100 | [diff] [blame] | 3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
| 4 | // See https://llvm.org/LICENSE.txt for license information. |
| 5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 6 | // |
| 7 | //===----------------------------------------------------------------------===// |
| 8 | /// |
| 9 | /// \file |
| 10 | /// This file describes how to lower LLVM calls to machine code calls. |
| 11 | /// |
| 12 | //===----------------------------------------------------------------------===// |
| 13 | |
| 14 | #ifndef LLVM_CODEGEN_GLOBALISEL_CALLLOWERING_H |
| 15 | #define LLVM_CODEGEN_GLOBALISEL_CALLLOWERING_H |
| 16 | |
| 17 | #include "llvm/ADT/ArrayRef.h" |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame^] | 18 | #include "llvm/ADT/SmallVector.h" |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 19 | #include "llvm/CodeGen/CallingConvLower.h" |
| 20 | #include "llvm/CodeGen/TargetCallingConv.h" |
| 21 | #include "llvm/IR/CallSite.h" |
| 22 | #include "llvm/IR/CallingConv.h" |
| 23 | #include "llvm/Support/ErrorHandling.h" |
| 24 | #include "llvm/Support/MachineValueType.h" |
| 25 | #include <cstdint> |
| 26 | #include <functional> |
| 27 | |
| 28 | namespace llvm { |
| 29 | |
| 30 | class DataLayout; |
| 31 | class Function; |
| 32 | class MachineIRBuilder; |
| 33 | class MachineOperand; |
| 34 | struct MachinePointerInfo; |
| 35 | class MachineRegisterInfo; |
| 36 | class TargetLowering; |
| 37 | class Type; |
| 38 | class Value; |
| 39 | |
| 40 | class CallLowering { |
| 41 | const TargetLowering *TLI; |
| 42 | |
Andrew Walbran | 16937d0 | 2019-10-22 13:54:20 +0100 | [diff] [blame] | 43 | virtual void anchor(); |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 44 | public: |
| 45 | struct ArgInfo { |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame^] | 46 | SmallVector<Register, 4> Regs; |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 47 | Type *Ty; |
| 48 | ISD::ArgFlagsTy Flags; |
| 49 | bool IsFixed; |
| 50 | |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame^] | 51 | ArgInfo(ArrayRef<Register> Regs, Type *Ty, |
| 52 | ISD::ArgFlagsTy Flags = ISD::ArgFlagsTy{}, bool IsFixed = true) |
| 53 | : Regs(Regs.begin(), Regs.end()), Ty(Ty), Flags(Flags), |
| 54 | IsFixed(IsFixed) { |
| 55 | // FIXME: We should have just one way of saying "no register". |
| 56 | assert((Ty->isVoidTy() == (Regs.empty() || Regs[0] == 0)) && |
| 57 | "only void types should have no register"); |
| 58 | } |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 59 | }; |
| 60 | |
| 61 | /// Argument handling is mostly uniform between the four places that |
| 62 | /// make these decisions: function formal arguments, call |
| 63 | /// instruction args, call instruction returns and function |
| 64 | /// returns. However, once a decision has been made on where an |
| 65 | /// arugment should go, exactly what happens can vary slightly. This |
| 66 | /// class abstracts the differences. |
| 67 | struct ValueHandler { |
| 68 | ValueHandler(MachineIRBuilder &MIRBuilder, MachineRegisterInfo &MRI, |
| 69 | CCAssignFn *AssignFn) |
| 70 | : MIRBuilder(MIRBuilder), MRI(MRI), AssignFn(AssignFn) {} |
| 71 | |
| 72 | virtual ~ValueHandler() = default; |
| 73 | |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame^] | 74 | /// Returns true if the handler is dealing with formal arguments, |
| 75 | /// not with return values etc. |
| 76 | virtual bool isArgumentHandler() const { return false; } |
| 77 | |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 78 | /// Materialize a VReg containing the address of the specified |
| 79 | /// stack-based object. This is either based on a FrameIndex or |
| 80 | /// direct SP manipulation, depending on the context. \p MPO |
| 81 | /// should be initialized to an appropriate description of the |
| 82 | /// address created. |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame^] | 83 | virtual Register getStackAddress(uint64_t Size, int64_t Offset, |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 84 | MachinePointerInfo &MPO) = 0; |
| 85 | |
| 86 | /// The specified value has been assigned to a physical register, |
| 87 | /// handle the appropriate COPY (either to or from) and mark any |
| 88 | /// relevant uses/defines as needed. |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame^] | 89 | virtual void assignValueToReg(Register ValVReg, Register PhysReg, |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 90 | CCValAssign &VA) = 0; |
| 91 | |
| 92 | /// The specified value has been assigned to a stack |
| 93 | /// location. Load or store it there, with appropriate extension |
| 94 | /// if necessary. |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame^] | 95 | virtual void assignValueToAddress(Register ValVReg, Register Addr, |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 96 | uint64_t Size, MachinePointerInfo &MPO, |
| 97 | CCValAssign &VA) = 0; |
| 98 | |
| 99 | /// Handle custom values, which may be passed into one or more of \p VAs. |
| 100 | /// \return The number of \p VAs that have been assigned after the first |
| 101 | /// one, and which should therefore be skipped from further |
| 102 | /// processing. |
| 103 | virtual unsigned assignCustomValue(const ArgInfo &Arg, |
| 104 | ArrayRef<CCValAssign> VAs) { |
| 105 | // This is not a pure virtual method because not all targets need to worry |
| 106 | // about custom values. |
| 107 | llvm_unreachable("Custom values not supported"); |
| 108 | } |
| 109 | |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame^] | 110 | Register extendRegister(Register ValReg, CCValAssign &VA); |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 111 | |
| 112 | virtual bool assignArg(unsigned ValNo, MVT ValVT, MVT LocVT, |
| 113 | CCValAssign::LocInfo LocInfo, const ArgInfo &Info, |
| 114 | CCState &State) { |
| 115 | return AssignFn(ValNo, ValVT, LocVT, LocInfo, Info.Flags, State); |
| 116 | } |
| 117 | |
| 118 | MachineIRBuilder &MIRBuilder; |
| 119 | MachineRegisterInfo &MRI; |
| 120 | CCAssignFn *AssignFn; |
Andrew Walbran | 16937d0 | 2019-10-22 13:54:20 +0100 | [diff] [blame] | 121 | |
| 122 | private: |
| 123 | virtual void anchor(); |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 124 | }; |
| 125 | |
| 126 | protected: |
| 127 | /// Getter for generic TargetLowering class. |
| 128 | const TargetLowering *getTLI() const { |
| 129 | return TLI; |
| 130 | } |
| 131 | |
| 132 | /// Getter for target specific TargetLowering class. |
| 133 | template <class XXXTargetLowering> |
| 134 | const XXXTargetLowering *getTLI() const { |
| 135 | return static_cast<const XXXTargetLowering *>(TLI); |
| 136 | } |
| 137 | |
| 138 | template <typename FuncInfoTy> |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 139 | void setArgFlags(ArgInfo &Arg, unsigned OpIdx, const DataLayout &DL, |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 140 | const FuncInfoTy &FuncInfo) const; |
| 141 | |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame^] | 142 | /// Generate instructions for packing \p SrcRegs into one big register |
| 143 | /// corresponding to the aggregate type \p PackedTy. |
| 144 | /// |
| 145 | /// \param SrcRegs should contain one virtual register for each base type in |
| 146 | /// \p PackedTy, as returned by computeValueLLTs. |
| 147 | /// |
| 148 | /// \return The packed register. |
| 149 | Register packRegs(ArrayRef<Register> SrcRegs, Type *PackedTy, |
| 150 | MachineIRBuilder &MIRBuilder) const; |
| 151 | |
| 152 | /// Generate instructions for unpacking \p SrcReg into the \p DstRegs |
| 153 | /// corresponding to the aggregate type \p PackedTy. |
| 154 | /// |
| 155 | /// \param DstRegs should contain one virtual register for each base type in |
| 156 | /// \p PackedTy, as returned by computeValueLLTs. |
| 157 | void unpackRegs(ArrayRef<Register> DstRegs, Register SrcReg, Type *PackedTy, |
| 158 | MachineIRBuilder &MIRBuilder) const; |
| 159 | |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 160 | /// Invoke Handler::assignArg on each of the given \p Args and then use |
| 161 | /// \p Callback to move them to the assigned locations. |
| 162 | /// |
| 163 | /// \return True if everything has succeeded, false otherwise. |
| 164 | bool handleAssignments(MachineIRBuilder &MIRBuilder, ArrayRef<ArgInfo> Args, |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 165 | ValueHandler &Handler) const; |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 166 | |
| 167 | public: |
| 168 | CallLowering(const TargetLowering *TLI) : TLI(TLI) {} |
| 169 | virtual ~CallLowering() = default; |
| 170 | |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame^] | 171 | /// \return true if the target is capable of handling swifterror values that |
| 172 | /// have been promoted to a specified register. The extended versions of |
| 173 | /// lowerReturn and lowerCall should be implemented. |
| 174 | virtual bool supportSwiftError() const { |
| 175 | return false; |
| 176 | } |
| 177 | |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 178 | /// This hook must be implemented to lower outgoing return values, described |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 179 | /// by \p Val, into the specified virtual registers \p VRegs. |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 180 | /// This hook is used by GlobalISel. |
| 181 | /// |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame^] | 182 | /// \p SwiftErrorVReg is non-zero if the function has a swifterror parameter |
| 183 | /// that needs to be implicitly returned. |
| 184 | /// |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 185 | /// \return True if the lowering succeeds, false otherwise. |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 186 | virtual bool lowerReturn(MachineIRBuilder &MIRBuilder, const Value *Val, |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame^] | 187 | ArrayRef<Register> VRegs, |
| 188 | Register SwiftErrorVReg) const { |
| 189 | if (!supportSwiftError()) { |
| 190 | assert(SwiftErrorVReg == 0 && "attempt to use unsupported swifterror"); |
| 191 | return lowerReturn(MIRBuilder, Val, VRegs); |
| 192 | } |
| 193 | return false; |
| 194 | } |
| 195 | |
| 196 | /// This hook behaves as the extended lowerReturn function, but for targets |
| 197 | /// that do not support swifterror value promotion. |
| 198 | virtual bool lowerReturn(MachineIRBuilder &MIRBuilder, const Value *Val, |
| 199 | ArrayRef<Register> VRegs) const { |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 200 | return false; |
| 201 | } |
| 202 | |
| 203 | /// This hook must be implemented to lower the incoming (formal) |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame^] | 204 | /// arguments, described by \p VRegs, for GlobalISel. Each argument |
| 205 | /// must end up in the related virtual registers described by \p VRegs. |
| 206 | /// In other words, the first argument should end up in \c VRegs[0], |
| 207 | /// the second in \c VRegs[1], and so on. For each argument, there will be one |
| 208 | /// register for each non-aggregate type, as returned by \c computeValueLLTs. |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 209 | /// \p MIRBuilder is set to the proper insertion for the argument |
| 210 | /// lowering. |
| 211 | /// |
| 212 | /// \return True if the lowering succeeded, false otherwise. |
| 213 | virtual bool lowerFormalArguments(MachineIRBuilder &MIRBuilder, |
| 214 | const Function &F, |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame^] | 215 | ArrayRef<ArrayRef<Register>> VRegs) const { |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 216 | return false; |
| 217 | } |
| 218 | |
| 219 | /// This hook must be implemented to lower the given call instruction, |
| 220 | /// including argument and return value marshalling. |
| 221 | /// |
| 222 | /// \p CallConv is the calling convention to be used for the call. |
| 223 | /// |
| 224 | /// \p Callee is the destination of the call. It should be either a register, |
| 225 | /// globaladdress, or externalsymbol. |
| 226 | /// |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame^] | 227 | /// \p OrigRet is a descriptor for the return type of the function. |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 228 | /// |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame^] | 229 | /// \p OrigArgs is a list of descriptors of the arguments passed to the |
| 230 | /// function. |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 231 | /// |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame^] | 232 | /// \p SwiftErrorVReg is non-zero if the call has a swifterror inout |
| 233 | /// parameter, and contains the vreg that the swifterror should be copied into |
| 234 | /// after the call. |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 235 | /// |
| 236 | /// \return true if the lowering succeeded, false otherwise. |
| 237 | virtual bool lowerCall(MachineIRBuilder &MIRBuilder, CallingConv::ID CallConv, |
| 238 | const MachineOperand &Callee, const ArgInfo &OrigRet, |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame^] | 239 | ArrayRef<ArgInfo> OrigArgs, |
| 240 | Register SwiftErrorVReg) const { |
| 241 | if (!supportSwiftError()) { |
| 242 | assert(SwiftErrorVReg == 0 && "trying to use unsupported swifterror"); |
| 243 | return lowerCall(MIRBuilder, CallConv, Callee, OrigRet, OrigArgs); |
| 244 | } |
| 245 | return false; |
| 246 | } |
| 247 | |
| 248 | /// This hook behaves as the extended lowerCall function, but for targets that |
| 249 | /// do not support swifterror value promotion. |
| 250 | virtual bool lowerCall(MachineIRBuilder &MIRBuilder, CallingConv::ID CallConv, |
| 251 | const MachineOperand &Callee, const ArgInfo &OrigRet, |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 252 | ArrayRef<ArgInfo> OrigArgs) const { |
| 253 | return false; |
| 254 | } |
| 255 | |
| 256 | /// Lower the given call instruction, including argument and return value |
| 257 | /// marshalling. |
| 258 | /// |
| 259 | /// \p CI is the call/invoke instruction. |
| 260 | /// |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame^] | 261 | /// \p ResRegs are the registers where the call's return value should be |
| 262 | /// stored (or 0 if there is no return value). There will be one register for |
| 263 | /// each non-aggregate type, as returned by \c computeValueLLTs. |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 264 | /// |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame^] | 265 | /// \p ArgRegs is a list of lists of virtual registers containing each |
| 266 | /// argument that needs to be passed (argument \c i should be placed in \c |
| 267 | /// ArgRegs[i]). For each argument, there will be one register for each |
| 268 | /// non-aggregate type, as returned by \c computeValueLLTs. |
| 269 | /// |
| 270 | /// \p SwiftErrorVReg is non-zero if the call has a swifterror inout |
| 271 | /// parameter, and contains the vreg that the swifterror should be copied into |
| 272 | /// after the call. |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 273 | /// |
| 274 | /// \p GetCalleeReg is a callback to materialize a register for the callee if |
| 275 | /// the target determines it cannot jump to the destination based purely on \p |
| 276 | /// CI. This might be because \p CI is indirect, or because of the limited |
| 277 | /// range of an immediate jump. |
| 278 | /// |
| 279 | /// \return true if the lowering succeeded, false otherwise. |
| 280 | bool lowerCall(MachineIRBuilder &MIRBuilder, ImmutableCallSite CS, |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame^] | 281 | ArrayRef<Register> ResRegs, |
| 282 | ArrayRef<ArrayRef<Register>> ArgRegs, Register SwiftErrorVReg, |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 283 | std::function<unsigned()> GetCalleeReg) const; |
| 284 | }; |
| 285 | |
| 286 | } // end namespace llvm |
| 287 | |
| 288 | #endif // LLVM_CODEGEN_GLOBALISEL_CALLLOWERING_H |