Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 1 | //===- llvm/CodeGen/TargetSubtargetInfo.h - Target Information --*- C++ -*-===// |
| 2 | // |
Andrew Walbran | 16937d0 | 2019-10-22 13:54:20 +0100 | [diff] [blame] | 3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
| 4 | // See https://llvm.org/LICENSE.txt for license information. |
| 5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 6 | // |
| 7 | //===----------------------------------------------------------------------===// |
| 8 | // |
| 9 | // This file describes the subtarget options of a Target machine. |
| 10 | // |
| 11 | //===----------------------------------------------------------------------===// |
| 12 | |
| 13 | #ifndef LLVM_CODEGEN_TARGETSUBTARGETINFO_H |
| 14 | #define LLVM_CODEGEN_TARGETSUBTARGETINFO_H |
| 15 | |
Andrew Scull | 0372a57 | 2018-11-16 15:47:06 +0000 | [diff] [blame] | 16 | #include "llvm/ADT/APInt.h" |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 17 | #include "llvm/ADT/ArrayRef.h" |
| 18 | #include "llvm/ADT/SmallVector.h" |
| 19 | #include "llvm/ADT/StringRef.h" |
| 20 | #include "llvm/CodeGen/PBQPRAConstraint.h" |
| 21 | #include "llvm/CodeGen/ScheduleDAGMutation.h" |
| 22 | #include "llvm/CodeGen/SchedulerRegistry.h" |
| 23 | #include "llvm/MC/MCSubtargetInfo.h" |
| 24 | #include "llvm/Support/CodeGen.h" |
| 25 | #include <memory> |
| 26 | #include <vector> |
| 27 | |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 28 | namespace llvm { |
| 29 | |
| 30 | class CallLowering; |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 31 | class InlineAsmLowering; |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 32 | class InstrItineraryData; |
| 33 | struct InstrStage; |
| 34 | class InstructionSelector; |
| 35 | class LegalizerInfo; |
| 36 | class MachineInstr; |
| 37 | struct MachineSchedPolicy; |
| 38 | struct MCReadAdvanceEntry; |
| 39 | struct MCWriteLatencyEntry; |
| 40 | struct MCWriteProcResEntry; |
| 41 | class RegisterBankInfo; |
| 42 | class SDep; |
| 43 | class SelectionDAGTargetInfo; |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 44 | class SUnit; |
| 45 | class TargetFrameLowering; |
| 46 | class TargetInstrInfo; |
| 47 | class TargetLowering; |
| 48 | class TargetRegisterClass; |
| 49 | class TargetRegisterInfo; |
| 50 | class TargetSchedModel; |
| 51 | class Triple; |
| 52 | |
| 53 | //===----------------------------------------------------------------------===// |
| 54 | /// |
| 55 | /// TargetSubtargetInfo - Generic base class for all target subtargets. All |
| 56 | /// Target-specific options that control code generation and printing should |
| 57 | /// be exposed through a TargetSubtargetInfo-derived class. |
| 58 | /// |
| 59 | class TargetSubtargetInfo : public MCSubtargetInfo { |
| 60 | protected: // Can only create subclasses... |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 61 | TargetSubtargetInfo(const Triple &TT, StringRef CPU, StringRef TuneCPU, |
| 62 | StringRef FS, ArrayRef<SubtargetFeatureKV> PF, |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame] | 63 | ArrayRef<SubtargetSubTypeKV> PD, |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 64 | const MCWriteProcResEntry *WPR, |
| 65 | const MCWriteLatencyEntry *WL, |
| 66 | const MCReadAdvanceEntry *RA, const InstrStage *IS, |
| 67 | const unsigned *OC, const unsigned *FP); |
| 68 | |
| 69 | public: |
| 70 | // AntiDepBreakMode - Type of anti-dependence breaking that should |
| 71 | // be performed before post-RA scheduling. |
| 72 | using AntiDepBreakMode = enum { ANTIDEP_NONE, ANTIDEP_CRITICAL, ANTIDEP_ALL }; |
| 73 | using RegClassVector = SmallVectorImpl<const TargetRegisterClass *>; |
| 74 | |
| 75 | TargetSubtargetInfo() = delete; |
| 76 | TargetSubtargetInfo(const TargetSubtargetInfo &) = delete; |
| 77 | TargetSubtargetInfo &operator=(const TargetSubtargetInfo &) = delete; |
| 78 | ~TargetSubtargetInfo() override; |
| 79 | |
| 80 | virtual bool isXRaySupported() const { return false; } |
| 81 | |
| 82 | // Interfaces to the major aspects of target machine information: |
| 83 | // |
| 84 | // -- Instruction opcode and operand information |
| 85 | // -- Pipelines and scheduling information |
| 86 | // -- Stack frame information |
| 87 | // -- Selection DAG lowering information |
| 88 | // -- Call lowering information |
| 89 | // |
| 90 | // N.B. These objects may change during compilation. It's not safe to cache |
| 91 | // them between functions. |
| 92 | virtual const TargetInstrInfo *getInstrInfo() const { return nullptr; } |
| 93 | virtual const TargetFrameLowering *getFrameLowering() const { |
| 94 | return nullptr; |
| 95 | } |
| 96 | virtual const TargetLowering *getTargetLowering() const { return nullptr; } |
| 97 | virtual const SelectionDAGTargetInfo *getSelectionDAGInfo() const { |
| 98 | return nullptr; |
| 99 | } |
| 100 | virtual const CallLowering *getCallLowering() const { return nullptr; } |
| 101 | |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 102 | virtual const InlineAsmLowering *getInlineAsmLowering() const { |
| 103 | return nullptr; |
| 104 | } |
| 105 | |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 106 | // FIXME: This lets targets specialize the selector by subtarget (which lets |
| 107 | // us do things like a dedicated avx512 selector). However, we might want |
| 108 | // to also specialize selectors by MachineFunction, which would let us be |
| 109 | // aware of optsize/optnone and such. |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 110 | virtual InstructionSelector *getInstructionSelector() const { |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 111 | return nullptr; |
| 112 | } |
| 113 | |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 114 | /// Target can subclass this hook to select a different DAG scheduler. |
| 115 | virtual RegisterScheduler::FunctionPassCtor |
| 116 | getDAGScheduler(CodeGenOpt::Level) const { |
| 117 | return nullptr; |
| 118 | } |
| 119 | |
| 120 | virtual const LegalizerInfo *getLegalizerInfo() const { return nullptr; } |
| 121 | |
| 122 | /// getRegisterInfo - If register information is available, return it. If |
| 123 | /// not, return null. |
| 124 | virtual const TargetRegisterInfo *getRegisterInfo() const { return nullptr; } |
| 125 | |
| 126 | /// If the information for the register banks is available, return it. |
| 127 | /// Otherwise return nullptr. |
| 128 | virtual const RegisterBankInfo *getRegBankInfo() const { return nullptr; } |
| 129 | |
| 130 | /// getInstrItineraryData - Returns instruction itinerary data for the target |
| 131 | /// or specific subtarget. |
| 132 | virtual const InstrItineraryData *getInstrItineraryData() const { |
| 133 | return nullptr; |
| 134 | } |
| 135 | |
| 136 | /// Resolve a SchedClass at runtime, where SchedClass identifies an |
| 137 | /// MCSchedClassDesc with the isVariant property. This may return the ID of |
| 138 | /// another variant SchedClass, but repeated invocation must quickly terminate |
| 139 | /// in a nonvariant SchedClass. |
| 140 | virtual unsigned resolveSchedClass(unsigned SchedClass, |
| 141 | const MachineInstr *MI, |
| 142 | const TargetSchedModel *SchedModel) const { |
| 143 | return 0; |
| 144 | } |
| 145 | |
Andrew Scull | 0372a57 | 2018-11-16 15:47:06 +0000 | [diff] [blame] | 146 | /// Returns true if MI is a dependency breaking zero-idiom instruction for the |
| 147 | /// subtarget. |
| 148 | /// |
| 149 | /// This function also sets bits in Mask related to input operands that |
| 150 | /// are not in a data dependency relationship. There is one bit for each |
| 151 | /// machine operand; implicit operands follow explicit operands in the bit |
| 152 | /// representation used for Mask. An empty (i.e. a mask with all bits |
| 153 | /// cleared) means: data dependencies are "broken" for all the explicit input |
| 154 | /// machine operands of MI. |
| 155 | virtual bool isZeroIdiom(const MachineInstr *MI, APInt &Mask) const { |
| 156 | return false; |
| 157 | } |
| 158 | |
| 159 | /// Returns true if MI is a dependency breaking instruction for the subtarget. |
| 160 | /// |
| 161 | /// Similar in behavior to `isZeroIdiom`. However, it knows how to identify |
| 162 | /// all dependency breaking instructions (i.e. not just zero-idioms). |
| 163 | /// |
| 164 | /// As for `isZeroIdiom`, this method returns a mask of "broken" dependencies. |
| 165 | /// (See method `isZeroIdiom` for a detailed description of Mask). |
| 166 | virtual bool isDependencyBreaking(const MachineInstr *MI, APInt &Mask) const { |
| 167 | return isZeroIdiom(MI, Mask); |
| 168 | } |
| 169 | |
Andrew Walbran | 16937d0 | 2019-10-22 13:54:20 +0100 | [diff] [blame] | 170 | /// Returns true if MI is a candidate for move elimination. |
| 171 | /// |
| 172 | /// A candidate for move elimination may be optimized out at register renaming |
| 173 | /// stage. Subtargets can specify the set of optimizable moves by |
| 174 | /// instantiating tablegen class `IsOptimizableRegisterMove` (see |
| 175 | /// llvm/Target/TargetInstrPredicate.td). |
| 176 | /// |
| 177 | /// SubtargetEmitter is responsible for processing all the definitions of class |
| 178 | /// IsOptimizableRegisterMove, and auto-generate an override for this method. |
| 179 | virtual bool isOptimizableRegisterMove(const MachineInstr *MI) const { |
| 180 | return false; |
| 181 | } |
| 182 | |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 183 | /// True if the subtarget should run MachineScheduler after aggressive |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 184 | /// coalescing. |
| 185 | /// |
| 186 | /// This currently replaces the SelectionDAG scheduler with the "source" order |
| 187 | /// scheduler (though see below for an option to turn this off and use the |
| 188 | /// TargetLowering preference). It does not yet disable the postRA scheduler. |
| 189 | virtual bool enableMachineScheduler() const; |
| 190 | |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 191 | /// True if the machine scheduler should disable the TLI preference |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 192 | /// for preRA scheduling with the source level scheduler. |
| 193 | virtual bool enableMachineSchedDefaultSched() const { return true; } |
| 194 | |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame] | 195 | /// True if the subtarget should run MachinePipeliner |
| 196 | virtual bool enableMachinePipeliner() const { return true; }; |
| 197 | |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 198 | /// True if the subtarget should enable joining global copies. |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 199 | /// |
| 200 | /// By default this is enabled if the machine scheduler is enabled, but |
| 201 | /// can be overridden. |
| 202 | virtual bool enableJoinGlobalCopies() const; |
| 203 | |
| 204 | /// True if the subtarget should run a scheduler after register allocation. |
| 205 | /// |
| 206 | /// By default this queries the PostRAScheduling bit in the scheduling model |
| 207 | /// which is the preferred way to influence this. |
| 208 | virtual bool enablePostRAScheduler() const; |
| 209 | |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 210 | /// True if the subtarget should run a machine scheduler after register |
| 211 | /// allocation. |
| 212 | virtual bool enablePostRAMachineScheduler() const; |
| 213 | |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 214 | /// True if the subtarget should run the atomic expansion pass. |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 215 | virtual bool enableAtomicExpand() const; |
| 216 | |
| 217 | /// True if the subtarget should run the indirectbr expansion pass. |
| 218 | virtual bool enableIndirectBrExpand() const; |
| 219 | |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 220 | /// Override generic scheduling policy within a region. |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 221 | /// |
| 222 | /// This is a convenient way for targets that don't provide any custom |
| 223 | /// scheduling heuristics (no custom MachineSchedStrategy) to make |
| 224 | /// changes to the generic scheduling policy. |
| 225 | virtual void overrideSchedPolicy(MachineSchedPolicy &Policy, |
| 226 | unsigned NumRegionInstrs) const {} |
| 227 | |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 228 | // Perform target-specific adjustments to the latency of a schedule |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 229 | // dependency. |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 230 | // If a pair of operands is associated with the schedule dependency, DefOpIdx |
| 231 | // and UseOpIdx are the indices of the operands in Def and Use, respectively. |
| 232 | // Otherwise, either may be -1. |
| 233 | virtual void adjustSchedDependency(SUnit *Def, int DefOpIdx, SUnit *Use, |
| 234 | int UseOpIdx, SDep &Dep) const {} |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 235 | |
| 236 | // For use with PostRAScheduling: get the anti-dependence breaking that should |
| 237 | // be performed before post-RA scheduling. |
| 238 | virtual AntiDepBreakMode getAntiDepBreakMode() const { return ANTIDEP_NONE; } |
| 239 | |
| 240 | // For use with PostRAScheduling: in CriticalPathRCs, return any register |
| 241 | // classes that should only be considered for anti-dependence breaking if they |
| 242 | // are on the critical path. |
| 243 | virtual void getCriticalPathRCs(RegClassVector &CriticalPathRCs) const { |
| 244 | return CriticalPathRCs.clear(); |
| 245 | } |
| 246 | |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 247 | // Provide an ordered list of schedule DAG mutations for the post-RA |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 248 | // scheduler. |
| 249 | virtual void getPostRAMutations( |
| 250 | std::vector<std::unique_ptr<ScheduleDAGMutation>> &Mutations) const { |
| 251 | } |
| 252 | |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 253 | // Provide an ordered list of schedule DAG mutations for the machine |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 254 | // pipeliner. |
| 255 | virtual void getSMSMutations( |
| 256 | std::vector<std::unique_ptr<ScheduleDAGMutation>> &Mutations) const { |
| 257 | } |
| 258 | |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame] | 259 | /// Default to DFA for resource management, return false when target will use |
| 260 | /// ProcResource in InstrSchedModel instead. |
| 261 | virtual bool useDFAforSMS() const { return true; } |
| 262 | |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 263 | // For use with PostRAScheduling: get the minimum optimization level needed |
| 264 | // to enable post-RA scheduling. |
| 265 | virtual CodeGenOpt::Level getOptLevelToEnablePostRAScheduler() const { |
| 266 | return CodeGenOpt::Default; |
| 267 | } |
| 268 | |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 269 | /// True if the subtarget should run the local reassignment |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 270 | /// heuristic of the register allocator. |
| 271 | /// This heuristic may be compile time intensive, \p OptLevel provides |
| 272 | /// a finer grain to tune the register allocator. |
| 273 | virtual bool enableRALocalReassignment(CodeGenOpt::Level OptLevel) const; |
| 274 | |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 275 | /// True if the subtarget should consider the cost of local intervals |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 276 | /// created by a split candidate when choosing the best split candidate. This |
| 277 | /// heuristic may be compile time intensive. |
| 278 | virtual bool enableAdvancedRASplitCost() const; |
| 279 | |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 280 | /// Enable use of alias analysis during code generation (during MI |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 281 | /// scheduling, DAGCombine, etc.). |
| 282 | virtual bool useAA() const; |
| 283 | |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 284 | /// \brief Sink addresses into blocks using GEP instructions rather than |
| 285 | /// pointer casts and arithmetic. |
| 286 | virtual bool addrSinkUsingGEPs() const { |
| 287 | return useAA(); |
| 288 | } |
| 289 | |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 290 | /// Enable the use of the early if conversion pass. |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 291 | virtual bool enableEarlyIfConversion() const { return false; } |
| 292 | |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 293 | /// Return PBQPConstraint(s) for the target. |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 294 | /// |
| 295 | /// Override to provide custom PBQP constraints. |
| 296 | virtual std::unique_ptr<PBQPRAConstraint> getCustomPBQPConstraints() const { |
| 297 | return nullptr; |
| 298 | } |
| 299 | |
| 300 | /// Enable tracking of subregister liveness in register allocator. |
| 301 | /// Please use MachineRegisterInfo::subRegLivenessEnabled() instead where |
| 302 | /// possible. |
| 303 | virtual bool enableSubRegLiveness() const { return false; } |
| 304 | |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 305 | /// This is called after a .mir file was loaded. |
| 306 | virtual void mirFileLoaded(MachineFunction &MF) const; |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame] | 307 | |
| 308 | /// True if the register allocator should use the allocation orders exactly as |
| 309 | /// written in the tablegen descriptions, false if it should allocate |
| 310 | /// the specified physical register later if is it callee-saved. |
| 311 | virtual bool ignoreCSRForAllocationOrder(const MachineFunction &MF, |
| 312 | unsigned PhysReg) const { |
| 313 | return false; |
| 314 | } |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 315 | }; |
| 316 | |
| 317 | } // end namespace llvm |
| 318 | |
| 319 | #endif // LLVM_CODEGEN_TARGETSUBTARGETINFO_H |