Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 1 | //===- llvm/CodeGen/GlobalISel/CallLowering.h - Call lowering ---*- C++ -*-===// |
| 2 | // |
Andrew Walbran | 16937d0 | 2019-10-22 13:54:20 +0100 | [diff] [blame] | 3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
| 4 | // See https://llvm.org/LICENSE.txt for license information. |
| 5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 6 | // |
| 7 | //===----------------------------------------------------------------------===// |
| 8 | /// |
| 9 | /// \file |
| 10 | /// This file describes how to lower LLVM calls to machine code calls. |
| 11 | /// |
| 12 | //===----------------------------------------------------------------------===// |
| 13 | |
| 14 | #ifndef LLVM_CODEGEN_GLOBALISEL_CALLLOWERING_H |
| 15 | #define LLVM_CODEGEN_GLOBALISEL_CALLLOWERING_H |
| 16 | |
| 17 | #include "llvm/ADT/ArrayRef.h" |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame] | 18 | #include "llvm/ADT/SmallVector.h" |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 19 | #include "llvm/CodeGen/CallingConvLower.h" |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 20 | #include "llvm/CodeGen/MachineOperand.h" |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 21 | #include "llvm/CodeGen/TargetCallingConv.h" |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 22 | #include "llvm/IR/Attributes.h" |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 23 | #include "llvm/IR/CallingConv.h" |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 24 | #include "llvm/IR/Type.h" |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 25 | #include "llvm/Support/ErrorHandling.h" |
| 26 | #include "llvm/Support/MachineValueType.h" |
| 27 | #include <cstdint> |
| 28 | #include <functional> |
| 29 | |
| 30 | namespace llvm { |
| 31 | |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 32 | class CallBase; |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 33 | class DataLayout; |
| 34 | class Function; |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 35 | class FunctionLoweringInfo; |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 36 | class MachineIRBuilder; |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 37 | struct MachinePointerInfo; |
| 38 | class MachineRegisterInfo; |
| 39 | class TargetLowering; |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 40 | class Value; |
| 41 | |
| 42 | class CallLowering { |
| 43 | const TargetLowering *TLI; |
| 44 | |
Andrew Walbran | 16937d0 | 2019-10-22 13:54:20 +0100 | [diff] [blame] | 45 | virtual void anchor(); |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 46 | public: |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 47 | struct BaseArgInfo { |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 48 | Type *Ty; |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 49 | SmallVector<ISD::ArgFlagsTy, 4> Flags; |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 50 | bool IsFixed; |
| 51 | |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 52 | BaseArgInfo(Type *Ty, |
| 53 | ArrayRef<ISD::ArgFlagsTy> Flags = ArrayRef<ISD::ArgFlagsTy>(), |
| 54 | bool IsFixed = true) |
| 55 | : Ty(Ty), Flags(Flags.begin(), Flags.end()), IsFixed(IsFixed) {} |
| 56 | |
| 57 | BaseArgInfo() : Ty(nullptr), IsFixed(false) {} |
| 58 | }; |
| 59 | |
| 60 | struct ArgInfo : public BaseArgInfo { |
| 61 | SmallVector<Register, 4> Regs; |
| 62 | // If the argument had to be split into multiple parts according to the |
| 63 | // target calling convention, then this contains the original vregs |
| 64 | // if the argument was an incoming arg. |
| 65 | SmallVector<Register, 2> OrigRegs; |
| 66 | |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame] | 67 | ArgInfo(ArrayRef<Register> Regs, Type *Ty, |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 68 | ArrayRef<ISD::ArgFlagsTy> Flags = ArrayRef<ISD::ArgFlagsTy>(), |
| 69 | bool IsFixed = true) |
| 70 | : BaseArgInfo(Ty, Flags, IsFixed), Regs(Regs.begin(), Regs.end()) { |
| 71 | if (!Regs.empty() && Flags.empty()) |
| 72 | this->Flags.push_back(ISD::ArgFlagsTy()); |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame] | 73 | // FIXME: We should have just one way of saying "no register". |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 74 | assert(((Ty->isVoidTy() || Ty->isEmptyTy()) == |
| 75 | (Regs.empty() || Regs[0] == 0)) && |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame] | 76 | "only void types should have no register"); |
| 77 | } |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 78 | |
| 79 | ArgInfo() : BaseArgInfo() {} |
| 80 | }; |
| 81 | |
| 82 | struct CallLoweringInfo { |
| 83 | /// Calling convention to be used for the call. |
| 84 | CallingConv::ID CallConv = CallingConv::C; |
| 85 | |
| 86 | /// Destination of the call. It should be either a register, globaladdress, |
| 87 | /// or externalsymbol. |
| 88 | MachineOperand Callee = MachineOperand::CreateImm(0); |
| 89 | |
| 90 | /// Descriptor for the return type of the function. |
| 91 | ArgInfo OrigRet; |
| 92 | |
| 93 | /// List of descriptors of the arguments passed to the function. |
| 94 | SmallVector<ArgInfo, 8> OrigArgs; |
| 95 | |
| 96 | /// Valid if the call has a swifterror inout parameter, and contains the |
| 97 | /// vreg that the swifterror should be copied into after the call. |
| 98 | Register SwiftErrorVReg; |
| 99 | |
| 100 | MDNode *KnownCallees = nullptr; |
| 101 | |
| 102 | /// True if the call must be tail call optimized. |
| 103 | bool IsMustTailCall = false; |
| 104 | |
| 105 | /// True if the call passes all target-independent checks for tail call |
| 106 | /// optimization. |
| 107 | bool IsTailCall = false; |
| 108 | |
| 109 | /// True if the call was lowered as a tail call. This is consumed by the |
| 110 | /// legalizer. This allows the legalizer to lower libcalls as tail calls. |
| 111 | bool LoweredTailCall = false; |
| 112 | |
| 113 | /// True if the call is to a vararg function. |
| 114 | bool IsVarArg = false; |
| 115 | |
| 116 | /// True if the function's return value can be lowered to registers. |
| 117 | bool CanLowerReturn = true; |
| 118 | |
| 119 | /// VReg to hold the hidden sret parameter. |
| 120 | Register DemoteRegister; |
| 121 | |
| 122 | /// The stack index for sret demotion. |
| 123 | int DemoteStackIndex; |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 124 | }; |
| 125 | |
| 126 | /// Argument handling is mostly uniform between the four places that |
| 127 | /// make these decisions: function formal arguments, call |
| 128 | /// instruction args, call instruction returns and function |
| 129 | /// returns. However, once a decision has been made on where an |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 130 | /// argument should go, exactly what happens can vary slightly. This |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 131 | /// class abstracts the differences. |
| 132 | struct ValueHandler { |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 133 | ValueHandler(bool IsIncoming, MachineIRBuilder &MIRBuilder, |
| 134 | MachineRegisterInfo &MRI, CCAssignFn *AssignFn) |
| 135 | : MIRBuilder(MIRBuilder), MRI(MRI), AssignFn(AssignFn), |
| 136 | IsIncomingArgumentHandler(IsIncoming) {} |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 137 | |
| 138 | virtual ~ValueHandler() = default; |
| 139 | |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 140 | /// Returns true if the handler is dealing with incoming arguments, |
| 141 | /// i.e. those that move values from some physical location to vregs. |
| 142 | bool isIncomingArgumentHandler() const { |
| 143 | return IsIncomingArgumentHandler; |
| 144 | } |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame] | 145 | |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 146 | /// Materialize a VReg containing the address of the specified |
| 147 | /// stack-based object. This is either based on a FrameIndex or |
| 148 | /// direct SP manipulation, depending on the context. \p MPO |
| 149 | /// should be initialized to an appropriate description of the |
| 150 | /// address created. |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame] | 151 | virtual Register getStackAddress(uint64_t Size, int64_t Offset, |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 152 | MachinePointerInfo &MPO) = 0; |
| 153 | |
| 154 | /// The specified value has been assigned to a physical register, |
| 155 | /// handle the appropriate COPY (either to or from) and mark any |
| 156 | /// relevant uses/defines as needed. |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame] | 157 | virtual void assignValueToReg(Register ValVReg, Register PhysReg, |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 158 | CCValAssign &VA) = 0; |
| 159 | |
| 160 | /// The specified value has been assigned to a stack |
| 161 | /// location. Load or store it there, with appropriate extension |
| 162 | /// if necessary. |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame] | 163 | virtual void assignValueToAddress(Register ValVReg, Register Addr, |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 164 | uint64_t Size, MachinePointerInfo &MPO, |
| 165 | CCValAssign &VA) = 0; |
| 166 | |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 167 | /// An overload which takes an ArgInfo if additional information about |
| 168 | /// the arg is needed. |
| 169 | virtual void assignValueToAddress(const ArgInfo &Arg, Register Addr, |
| 170 | uint64_t Size, MachinePointerInfo &MPO, |
| 171 | CCValAssign &VA) { |
| 172 | assert(Arg.Regs.size() == 1); |
| 173 | assignValueToAddress(Arg.Regs[0], Addr, Size, MPO, VA); |
| 174 | } |
| 175 | |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 176 | /// Handle custom values, which may be passed into one or more of \p VAs. |
| 177 | /// \return The number of \p VAs that have been assigned after the first |
| 178 | /// one, and which should therefore be skipped from further |
| 179 | /// processing. |
| 180 | virtual unsigned assignCustomValue(const ArgInfo &Arg, |
| 181 | ArrayRef<CCValAssign> VAs) { |
| 182 | // This is not a pure virtual method because not all targets need to worry |
| 183 | // about custom values. |
| 184 | llvm_unreachable("Custom values not supported"); |
| 185 | } |
| 186 | |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 187 | /// Extend a register to the location type given in VA, capped at extending |
| 188 | /// to at most MaxSize bits. If MaxSizeBits is 0 then no maximum is set. |
| 189 | Register extendRegister(Register ValReg, CCValAssign &VA, |
| 190 | unsigned MaxSizeBits = 0); |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 191 | |
| 192 | virtual bool assignArg(unsigned ValNo, MVT ValVT, MVT LocVT, |
| 193 | CCValAssign::LocInfo LocInfo, const ArgInfo &Info, |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 194 | ISD::ArgFlagsTy Flags, CCState &State) { |
| 195 | return AssignFn(ValNo, ValVT, LocVT, LocInfo, Flags, State); |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 196 | } |
| 197 | |
| 198 | MachineIRBuilder &MIRBuilder; |
| 199 | MachineRegisterInfo &MRI; |
| 200 | CCAssignFn *AssignFn; |
Andrew Walbran | 16937d0 | 2019-10-22 13:54:20 +0100 | [diff] [blame] | 201 | |
| 202 | private: |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 203 | bool IsIncomingArgumentHandler; |
Andrew Walbran | 16937d0 | 2019-10-22 13:54:20 +0100 | [diff] [blame] | 204 | virtual void anchor(); |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 205 | }; |
| 206 | |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 207 | struct IncomingValueHandler : public ValueHandler { |
| 208 | IncomingValueHandler(MachineIRBuilder &MIRBuilder, MachineRegisterInfo &MRI, |
| 209 | CCAssignFn *AssignFn) |
| 210 | : ValueHandler(true, MIRBuilder, MRI, AssignFn) {} |
| 211 | }; |
| 212 | |
| 213 | struct OutgoingValueHandler : public ValueHandler { |
| 214 | OutgoingValueHandler(MachineIRBuilder &MIRBuilder, MachineRegisterInfo &MRI, |
| 215 | CCAssignFn *AssignFn) |
| 216 | : ValueHandler(false, MIRBuilder, MRI, AssignFn) {} |
| 217 | }; |
| 218 | |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 219 | protected: |
| 220 | /// Getter for generic TargetLowering class. |
| 221 | const TargetLowering *getTLI() const { |
| 222 | return TLI; |
| 223 | } |
| 224 | |
| 225 | /// Getter for target specific TargetLowering class. |
| 226 | template <class XXXTargetLowering> |
| 227 | const XXXTargetLowering *getTLI() const { |
| 228 | return static_cast<const XXXTargetLowering *>(TLI); |
| 229 | } |
| 230 | |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 231 | /// \returns Flags corresponding to the attributes on the \p ArgIdx-th |
| 232 | /// parameter of \p Call. |
| 233 | ISD::ArgFlagsTy getAttributesForArgIdx(const CallBase &Call, |
| 234 | unsigned ArgIdx) const; |
| 235 | |
| 236 | /// Adds flags to \p Flags based off of the attributes in \p Attrs. |
| 237 | /// \p OpIdx is the index in \p Attrs to add flags from. |
| 238 | void addArgFlagsFromAttributes(ISD::ArgFlagsTy &Flags, |
| 239 | const AttributeList &Attrs, |
| 240 | unsigned OpIdx) const; |
| 241 | |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 242 | template <typename FuncInfoTy> |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 243 | void setArgFlags(ArgInfo &Arg, unsigned OpIdx, const DataLayout &DL, |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 244 | const FuncInfoTy &FuncInfo) const; |
| 245 | |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame] | 246 | /// Generate instructions for packing \p SrcRegs into one big register |
| 247 | /// corresponding to the aggregate type \p PackedTy. |
| 248 | /// |
| 249 | /// \param SrcRegs should contain one virtual register for each base type in |
| 250 | /// \p PackedTy, as returned by computeValueLLTs. |
| 251 | /// |
| 252 | /// \return The packed register. |
| 253 | Register packRegs(ArrayRef<Register> SrcRegs, Type *PackedTy, |
| 254 | MachineIRBuilder &MIRBuilder) const; |
| 255 | |
| 256 | /// Generate instructions for unpacking \p SrcReg into the \p DstRegs |
| 257 | /// corresponding to the aggregate type \p PackedTy. |
| 258 | /// |
| 259 | /// \param DstRegs should contain one virtual register for each base type in |
| 260 | /// \p PackedTy, as returned by computeValueLLTs. |
| 261 | void unpackRegs(ArrayRef<Register> DstRegs, Register SrcReg, Type *PackedTy, |
| 262 | MachineIRBuilder &MIRBuilder) const; |
| 263 | |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 264 | /// Invoke Handler::assignArg on each of the given \p Args and then use |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 265 | /// \p Handler to move them to the assigned locations. |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 266 | /// |
| 267 | /// \return True if everything has succeeded, false otherwise. |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 268 | bool handleAssignments(MachineIRBuilder &MIRBuilder, |
| 269 | SmallVectorImpl<ArgInfo> &Args, |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 270 | ValueHandler &Handler) const; |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 271 | bool handleAssignments(CCState &CCState, |
| 272 | SmallVectorImpl<CCValAssign> &ArgLocs, |
| 273 | MachineIRBuilder &MIRBuilder, |
| 274 | SmallVectorImpl<ArgInfo> &Args, |
| 275 | ValueHandler &Handler) const; |
| 276 | |
| 277 | /// Analyze passed or returned values from a call, supplied in \p ArgInfo, |
| 278 | /// incorporating info about the passed values into \p CCState. |
| 279 | /// |
| 280 | /// Used to check if arguments are suitable for tail call lowering. |
| 281 | bool analyzeArgInfo(CCState &CCState, SmallVectorImpl<ArgInfo> &Args, |
| 282 | CCAssignFn &AssignFnFixed, |
| 283 | CCAssignFn &AssignFnVarArg) const; |
| 284 | |
| 285 | /// \returns True if the calling convention for a callee and its caller pass |
| 286 | /// results in the same way. Typically used for tail call eligibility checks. |
| 287 | /// |
| 288 | /// \p Info is the CallLoweringInfo for the call. |
| 289 | /// \p MF is the MachineFunction for the caller. |
| 290 | /// \p InArgs contains the results of the call. |
| 291 | /// \p CalleeAssignFnFixed is the CCAssignFn to be used for the callee for |
| 292 | /// fixed arguments. |
| 293 | /// \p CalleeAssignFnVarArg is similar, but for varargs. |
| 294 | /// \p CallerAssignFnFixed is the CCAssignFn to be used for the caller for |
| 295 | /// fixed arguments. |
| 296 | /// \p CallerAssignFnVarArg is similar, but for varargs. |
| 297 | bool resultsCompatible(CallLoweringInfo &Info, MachineFunction &MF, |
| 298 | SmallVectorImpl<ArgInfo> &InArgs, |
| 299 | CCAssignFn &CalleeAssignFnFixed, |
| 300 | CCAssignFn &CalleeAssignFnVarArg, |
| 301 | CCAssignFn &CallerAssignFnFixed, |
| 302 | CCAssignFn &CallerAssignFnVarArg) const; |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 303 | |
| 304 | public: |
| 305 | CallLowering(const TargetLowering *TLI) : TLI(TLI) {} |
| 306 | virtual ~CallLowering() = default; |
| 307 | |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame] | 308 | /// \return true if the target is capable of handling swifterror values that |
| 309 | /// have been promoted to a specified register. The extended versions of |
| 310 | /// lowerReturn and lowerCall should be implemented. |
| 311 | virtual bool supportSwiftError() const { |
| 312 | return false; |
| 313 | } |
| 314 | |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 315 | /// Load the returned value from the stack into virtual registers in \p VRegs. |
| 316 | /// It uses the frame index \p FI and the start offset from \p DemoteReg. |
| 317 | /// The loaded data size will be determined from \p RetTy. |
| 318 | void insertSRetLoads(MachineIRBuilder &MIRBuilder, Type *RetTy, |
| 319 | ArrayRef<Register> VRegs, Register DemoteReg, |
| 320 | int FI) const; |
| 321 | |
| 322 | /// Store the return value given by \p VRegs into stack starting at the offset |
| 323 | /// specified in \p DemoteReg. |
| 324 | void insertSRetStores(MachineIRBuilder &MIRBuilder, Type *RetTy, |
| 325 | ArrayRef<Register> VRegs, Register DemoteReg) const; |
| 326 | |
| 327 | /// Insert the hidden sret ArgInfo to the beginning of \p SplitArgs. |
| 328 | /// This function should be called from the target specific |
| 329 | /// lowerFormalArguments when \p F requires the sret demotion. |
| 330 | void insertSRetIncomingArgument(const Function &F, |
| 331 | SmallVectorImpl<ArgInfo> &SplitArgs, |
| 332 | Register &DemoteReg, MachineRegisterInfo &MRI, |
| 333 | const DataLayout &DL) const; |
| 334 | |
| 335 | /// For the call-base described by \p CB, insert the hidden sret ArgInfo to |
| 336 | /// the OrigArgs field of \p Info. |
| 337 | void insertSRetOutgoingArgument(MachineIRBuilder &MIRBuilder, |
| 338 | const CallBase &CB, |
| 339 | CallLoweringInfo &Info) const; |
| 340 | |
| 341 | /// \return True if the return type described by \p Outs can be returned |
| 342 | /// without performing sret demotion. |
| 343 | bool checkReturn(CCState &CCInfo, SmallVectorImpl<BaseArgInfo> &Outs, |
| 344 | CCAssignFn *Fn) const; |
| 345 | |
| 346 | /// Get the type and the ArgFlags for the split components of \p RetTy as |
| 347 | /// returned by \c ComputeValueVTs. |
| 348 | void getReturnInfo(CallingConv::ID CallConv, Type *RetTy, AttributeList Attrs, |
| 349 | SmallVectorImpl<BaseArgInfo> &Outs, |
| 350 | const DataLayout &DL) const; |
| 351 | |
| 352 | /// Toplevel function to check the return type based on the target calling |
| 353 | /// convention. \return True if the return value of \p MF can be returned |
| 354 | /// without performing sret demotion. |
| 355 | bool checkReturnTypeForCallConv(MachineFunction &MF) const; |
| 356 | |
| 357 | /// This hook must be implemented to check whether the return values |
| 358 | /// described by \p Outs can fit into the return registers. If false |
| 359 | /// is returned, an sret-demotion is performed. |
| 360 | virtual bool canLowerReturn(MachineFunction &MF, CallingConv::ID CallConv, |
| 361 | SmallVectorImpl<BaseArgInfo> &Outs, |
| 362 | bool IsVarArg) const { |
| 363 | return true; |
| 364 | } |
| 365 | |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 366 | /// This hook must be implemented to lower outgoing return values, described |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 367 | /// by \p Val, into the specified virtual registers \p VRegs. |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 368 | /// This hook is used by GlobalISel. |
| 369 | /// |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 370 | /// \p FLI is required for sret demotion. |
| 371 | /// |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame] | 372 | /// \p SwiftErrorVReg is non-zero if the function has a swifterror parameter |
| 373 | /// that needs to be implicitly returned. |
| 374 | /// |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 375 | /// \return True if the lowering succeeds, false otherwise. |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 376 | virtual bool lowerReturn(MachineIRBuilder &MIRBuilder, const Value *Val, |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 377 | ArrayRef<Register> VRegs, FunctionLoweringInfo &FLI, |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame] | 378 | Register SwiftErrorVReg) const { |
| 379 | if (!supportSwiftError()) { |
| 380 | assert(SwiftErrorVReg == 0 && "attempt to use unsupported swifterror"); |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 381 | return lowerReturn(MIRBuilder, Val, VRegs, FLI); |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame] | 382 | } |
| 383 | return false; |
| 384 | } |
| 385 | |
| 386 | /// This hook behaves as the extended lowerReturn function, but for targets |
| 387 | /// that do not support swifterror value promotion. |
| 388 | virtual bool lowerReturn(MachineIRBuilder &MIRBuilder, const Value *Val, |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 389 | ArrayRef<Register> VRegs, |
| 390 | FunctionLoweringInfo &FLI) const { |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 391 | return false; |
| 392 | } |
| 393 | |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 394 | virtual bool fallBackToDAGISel(const Function &F) const { return false; } |
| 395 | |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 396 | /// This hook must be implemented to lower the incoming (formal) |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame] | 397 | /// arguments, described by \p VRegs, for GlobalISel. Each argument |
| 398 | /// must end up in the related virtual registers described by \p VRegs. |
| 399 | /// In other words, the first argument should end up in \c VRegs[0], |
| 400 | /// the second in \c VRegs[1], and so on. For each argument, there will be one |
| 401 | /// register for each non-aggregate type, as returned by \c computeValueLLTs. |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 402 | /// \p MIRBuilder is set to the proper insertion for the argument |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 403 | /// lowering. \p FLI is required for sret demotion. |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 404 | /// |
| 405 | /// \return True if the lowering succeeded, false otherwise. |
| 406 | virtual bool lowerFormalArguments(MachineIRBuilder &MIRBuilder, |
| 407 | const Function &F, |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 408 | ArrayRef<ArrayRef<Register>> VRegs, |
| 409 | FunctionLoweringInfo &FLI) const { |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 410 | return false; |
| 411 | } |
| 412 | |
| 413 | /// This hook must be implemented to lower the given call instruction, |
| 414 | /// including argument and return value marshalling. |
| 415 | /// |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 416 | /// |
| 417 | /// \return true if the lowering succeeded, false otherwise. |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 418 | virtual bool lowerCall(MachineIRBuilder &MIRBuilder, |
| 419 | CallLoweringInfo &Info) const { |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 420 | return false; |
| 421 | } |
| 422 | |
| 423 | /// Lower the given call instruction, including argument and return value |
| 424 | /// marshalling. |
| 425 | /// |
| 426 | /// \p CI is the call/invoke instruction. |
| 427 | /// |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame] | 428 | /// \p ResRegs are the registers where the call's return value should be |
| 429 | /// stored (or 0 if there is no return value). There will be one register for |
| 430 | /// each non-aggregate type, as returned by \c computeValueLLTs. |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 431 | /// |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame] | 432 | /// \p ArgRegs is a list of lists of virtual registers containing each |
| 433 | /// argument that needs to be passed (argument \c i should be placed in \c |
| 434 | /// ArgRegs[i]). For each argument, there will be one register for each |
| 435 | /// non-aggregate type, as returned by \c computeValueLLTs. |
| 436 | /// |
| 437 | /// \p SwiftErrorVReg is non-zero if the call has a swifterror inout |
| 438 | /// parameter, and contains the vreg that the swifterror should be copied into |
| 439 | /// after the call. |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 440 | /// |
| 441 | /// \p GetCalleeReg is a callback to materialize a register for the callee if |
| 442 | /// the target determines it cannot jump to the destination based purely on \p |
| 443 | /// CI. This might be because \p CI is indirect, or because of the limited |
| 444 | /// range of an immediate jump. |
| 445 | /// |
| 446 | /// \return true if the lowering succeeded, false otherwise. |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 447 | bool lowerCall(MachineIRBuilder &MIRBuilder, const CallBase &Call, |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame] | 448 | ArrayRef<Register> ResRegs, |
| 449 | ArrayRef<ArrayRef<Register>> ArgRegs, Register SwiftErrorVReg, |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 450 | std::function<unsigned()> GetCalleeReg) const; |
| 451 | }; |
| 452 | |
| 453 | } // end namespace llvm |
| 454 | |
| 455 | #endif // LLVM_CODEGEN_GLOBALISEL_CALLLOWERING_H |