Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 1 | //===- MemorySSAUpdater.h - Memory SSA Updater-------------------*- C++ -*-===// |
| 2 | // |
Andrew Walbran | 16937d0 | 2019-10-22 13:54:20 +0100 | [diff] [blame] | 3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
| 4 | // See https://llvm.org/LICENSE.txt for license information. |
| 5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 6 | // |
| 7 | //===----------------------------------------------------------------------===// |
| 8 | // |
| 9 | // \file |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 10 | // An automatic updater for MemorySSA that handles arbitrary insertion, |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 11 | // deletion, and moves. It performs phi insertion where necessary, and |
| 12 | // automatically updates the MemorySSA IR to be correct. |
| 13 | // While updating loads or removing instructions is often easy enough to not |
| 14 | // need this, updating stores should generally not be attemped outside this |
| 15 | // API. |
| 16 | // |
| 17 | // Basic API usage: |
| 18 | // Create the memory access you want for the instruction (this is mainly so |
| 19 | // we know where it is, without having to duplicate the entire set of create |
| 20 | // functions MemorySSA supports). |
| 21 | // Call insertDef or insertUse depending on whether it's a MemoryUse or a |
| 22 | // MemoryDef. |
| 23 | // That's it. |
| 24 | // |
| 25 | // For moving, first, move the instruction itself using the normal SSA |
| 26 | // instruction moving API, then just call moveBefore, moveAfter,or moveTo with |
| 27 | // the right arguments. |
| 28 | // |
| 29 | //===----------------------------------------------------------------------===// |
| 30 | |
| 31 | #ifndef LLVM_ANALYSIS_MEMORYSSAUPDATER_H |
| 32 | #define LLVM_ANALYSIS_MEMORYSSAUPDATER_H |
| 33 | |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 34 | #include "llvm/ADT/SetVector.h" |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 35 | #include "llvm/ADT/SmallPtrSet.h" |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 36 | #include "llvm/ADT/SmallSet.h" |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 37 | #include "llvm/ADT/SmallVector.h" |
| 38 | #include "llvm/Analysis/MemorySSA.h" |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 39 | #include "llvm/IR/ValueHandle.h" |
Andrew Scull | 0372a57 | 2018-11-16 15:47:06 +0000 | [diff] [blame] | 40 | #include "llvm/IR/ValueMap.h" |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 41 | #include "llvm/Support/CFGDiff.h" |
| 42 | #include <utility> |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 43 | |
| 44 | namespace llvm { |
| 45 | |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 46 | class BasicBlock; |
| 47 | class BranchInst; |
| 48 | class DominatorTree; |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 49 | class Instruction; |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 50 | class LoopBlocksRPO; |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 51 | |
Andrew Scull | 0372a57 | 2018-11-16 15:47:06 +0000 | [diff] [blame] | 52 | using ValueToValueMapTy = ValueMap<const Value *, WeakTrackingVH>; |
| 53 | using PhiToDefMap = SmallDenseMap<MemoryPhi *, MemoryAccess *>; |
| 54 | using CFGUpdate = cfg::Update<BasicBlock *>; |
Andrew Scull | 0372a57 | 2018-11-16 15:47:06 +0000 | [diff] [blame] | 55 | |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 56 | class MemorySSAUpdater { |
| 57 | private: |
| 58 | MemorySSA *MSSA; |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 59 | |
| 60 | /// We use WeakVH rather than a costly deletion to deal with dangling pointers. |
| 61 | /// MemoryPhis are created eagerly and sometimes get zapped shortly afterwards. |
| 62 | SmallVector<WeakVH, 16> InsertedPHIs; |
| 63 | |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 64 | SmallPtrSet<BasicBlock *, 8> VisitedBlocks; |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 65 | SmallSet<AssertingVH<MemoryPhi>, 8> NonOptPhis; |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 66 | |
| 67 | public: |
| 68 | MemorySSAUpdater(MemorySSA *MSSA) : MSSA(MSSA) {} |
Andrew Scull | 0372a57 | 2018-11-16 15:47:06 +0000 | [diff] [blame] | 69 | |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 70 | /// Insert a definition into the MemorySSA IR. RenameUses will rename any use |
| 71 | /// below the new def block (and any inserted phis). RenameUses should be set |
| 72 | /// to true if the definition may cause new aliases for loads below it. This |
| 73 | /// is not the case for hoisting or sinking or other forms of code *movement*. |
| 74 | /// It *is* the case for straight code insertion. |
| 75 | /// For example: |
| 76 | /// store a |
| 77 | /// if (foo) { } |
| 78 | /// load a |
| 79 | /// |
| 80 | /// Moving the store into the if block, and calling insertDef, does not |
| 81 | /// require RenameUses. |
| 82 | /// However, changing it to: |
| 83 | /// store a |
| 84 | /// if (foo) { store b } |
| 85 | /// load a |
| 86 | /// Where a mayalias b, *does* require RenameUses be set to true. |
| 87 | void insertDef(MemoryDef *Def, bool RenameUses = false); |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 88 | void insertUse(MemoryUse *Use, bool RenameUses = false); |
Andrew Scull | 0372a57 | 2018-11-16 15:47:06 +0000 | [diff] [blame] | 89 | /// Update the MemoryPhi in `To` following an edge deletion between `From` and |
| 90 | /// `To`. If `To` becomes unreachable, a call to removeBlocks should be made. |
| 91 | void removeEdge(BasicBlock *From, BasicBlock *To); |
| 92 | /// Update the MemoryPhi in `To` to have a single incoming edge from `From`, |
| 93 | /// following a CFG change that replaced multiple edges (switch) with a direct |
| 94 | /// branch. |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame] | 95 | void removeDuplicatePhiEdgesBetween(const BasicBlock *From, |
| 96 | const BasicBlock *To); |
| 97 | /// Update MemorySSA when inserting a unique backedge block for a loop. |
| 98 | void updatePhisWhenInsertingUniqueBackedgeBlock(BasicBlock *LoopHeader, |
| 99 | BasicBlock *LoopPreheader, |
| 100 | BasicBlock *BackedgeBlock); |
Andrew Scull | 0372a57 | 2018-11-16 15:47:06 +0000 | [diff] [blame] | 101 | /// Update MemorySSA after a loop was cloned, given the blocks in RPO order, |
| 102 | /// the exit blocks and a 1:1 mapping of all blocks and instructions |
| 103 | /// cloned. This involves duplicating all defs and uses in the cloned blocks |
| 104 | /// Updating phi nodes in exit block successors is done separately. |
| 105 | void updateForClonedLoop(const LoopBlocksRPO &LoopBlocks, |
| 106 | ArrayRef<BasicBlock *> ExitBlocks, |
| 107 | const ValueToValueMapTy &VM, |
| 108 | bool IgnoreIncomingWithNoClones = false); |
| 109 | // Block BB was fully or partially cloned into its predecessor P1. Map |
| 110 | // contains the 1:1 mapping of instructions cloned and VM[BB]=P1. |
| 111 | void updateForClonedBlockIntoPred(BasicBlock *BB, BasicBlock *P1, |
| 112 | const ValueToValueMapTy &VM); |
| 113 | /// Update phi nodes in exit block successors following cloning. Exit blocks |
| 114 | /// that were not cloned don't have additional predecessors added. |
| 115 | void updateExitBlocksForClonedLoop(ArrayRef<BasicBlock *> ExitBlocks, |
| 116 | const ValueToValueMapTy &VMap, |
| 117 | DominatorTree &DT); |
| 118 | void updateExitBlocksForClonedLoop( |
| 119 | ArrayRef<BasicBlock *> ExitBlocks, |
| 120 | ArrayRef<std::unique_ptr<ValueToValueMapTy>> VMaps, DominatorTree &DT); |
| 121 | |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 122 | /// Apply CFG updates, analogous with the DT edge updates. By default, the |
| 123 | /// DT is assumed to be already up to date. If UpdateDTFirst is true, first |
| 124 | /// update the DT with the same updates. |
| 125 | void applyUpdates(ArrayRef<CFGUpdate> Updates, DominatorTree &DT, |
| 126 | bool UpdateDTFirst = false); |
Andrew Scull | 0372a57 | 2018-11-16 15:47:06 +0000 | [diff] [blame] | 127 | /// Apply CFG insert updates, analogous with the DT edge updates. |
| 128 | void applyInsertUpdates(ArrayRef<CFGUpdate> Updates, DominatorTree &DT); |
| 129 | |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 130 | void moveBefore(MemoryUseOrDef *What, MemoryUseOrDef *Where); |
| 131 | void moveAfter(MemoryUseOrDef *What, MemoryUseOrDef *Where); |
| 132 | void moveToPlace(MemoryUseOrDef *What, BasicBlock *BB, |
| 133 | MemorySSA::InsertionPlace Where); |
Andrew Scull | 0372a57 | 2018-11-16 15:47:06 +0000 | [diff] [blame] | 134 | /// `From` block was spliced into `From` and `To`. There is a CFG edge from |
| 135 | /// `From` to `To`. Move all accesses from `From` to `To` starting at |
| 136 | /// instruction `Start`. `To` is newly created BB, so empty of |
| 137 | /// MemorySSA::MemoryAccesses. Edges are already updated, so successors of |
| 138 | /// `To` with MPhi nodes need to update incoming block. |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 139 | /// |------| |------| |
| 140 | /// | From | | From | |
| 141 | /// | | |------| |
| 142 | /// | | || |
| 143 | /// | | => \/ |
| 144 | /// | | |------| <- Start |
| 145 | /// | | | To | |
| 146 | /// |------| |------| |
| 147 | void moveAllAfterSpliceBlocks(BasicBlock *From, BasicBlock *To, |
| 148 | Instruction *Start); |
Andrew Scull | 0372a57 | 2018-11-16 15:47:06 +0000 | [diff] [blame] | 149 | /// `From` block was merged into `To`. There is a CFG edge from `To` to |
| 150 | /// `From`.`To` still branches to `From`, but all instructions were moved and |
| 151 | /// `From` is now an empty block; `From` is about to be deleted. Move all |
| 152 | /// accesses from `From` to `To` starting at instruction `Start`. `To` may |
| 153 | /// have multiple successors, `From` has a single predecessor. `From` may have |
| 154 | /// successors with MPhi nodes, replace their incoming block with `To`. |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 155 | /// |------| |------| |
| 156 | /// | To | | To | |
| 157 | /// |------| | | |
| 158 | /// || => | | |
| 159 | /// \/ | | |
| 160 | /// |------| | | <- Start |
| 161 | /// | From | | | |
| 162 | /// |------| |------| |
| 163 | void moveAllAfterMergeBlocks(BasicBlock *From, BasicBlock *To, |
| 164 | Instruction *Start); |
Andrew Scull | 0372a57 | 2018-11-16 15:47:06 +0000 | [diff] [blame] | 165 | /// A new empty BasicBlock (New) now branches directly to Old. Some of |
| 166 | /// Old's predecessors (Preds) are now branching to New instead of Old. |
| 167 | /// If New is the only predecessor, move Old's Phi, if present, to New. |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 168 | /// Otherwise, add a new Phi in New with appropriate incoming values, and |
| 169 | /// update the incoming values in Old's Phi node too, if present. |
Andrew Scull | 0372a57 | 2018-11-16 15:47:06 +0000 | [diff] [blame] | 170 | void wireOldPredecessorsToNewImmediatePredecessor( |
| 171 | BasicBlock *Old, BasicBlock *New, ArrayRef<BasicBlock *> Preds, |
| 172 | bool IdenticalEdgesWereMerged = true); |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 173 | // The below are utility functions. Other than creation of accesses to pass |
| 174 | // to insertDef, and removeAccess to remove accesses, you should generally |
| 175 | // not attempt to update memoryssa yourself. It is very non-trivial to get |
| 176 | // the edge cases right, and the above calls already operate in near-optimal |
| 177 | // time bounds. |
| 178 | |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 179 | /// Create a MemoryAccess in MemorySSA at a specified point in a block, |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 180 | /// with a specified clobbering definition. |
| 181 | /// |
| 182 | /// Returns the new MemoryAccess. |
| 183 | /// This should be called when a memory instruction is created that is being |
| 184 | /// used to replace an existing memory instruction. It will *not* create PHI |
| 185 | /// nodes, or verify the clobbering definition. The insertion place is used |
| 186 | /// solely to determine where in the memoryssa access lists the instruction |
| 187 | /// will be placed. The caller is expected to keep ordering the same as |
| 188 | /// instructions. |
| 189 | /// It will return the new MemoryAccess. |
| 190 | /// Note: If a MemoryAccess already exists for I, this function will make it |
| 191 | /// inaccessible and it *must* have removeMemoryAccess called on it. |
| 192 | MemoryAccess *createMemoryAccessInBB(Instruction *I, MemoryAccess *Definition, |
| 193 | const BasicBlock *BB, |
| 194 | MemorySSA::InsertionPlace Point); |
| 195 | |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 196 | /// Create a MemoryAccess in MemorySSA before or after an existing |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 197 | /// MemoryAccess. |
| 198 | /// |
| 199 | /// Returns the new MemoryAccess. |
| 200 | /// This should be called when a memory instruction is created that is being |
| 201 | /// used to replace an existing memory instruction. It will *not* create PHI |
| 202 | /// nodes, or verify the clobbering definition. |
| 203 | /// |
| 204 | /// Note: If a MemoryAccess already exists for I, this function will make it |
| 205 | /// inaccessible and it *must* have removeMemoryAccess called on it. |
| 206 | MemoryUseOrDef *createMemoryAccessBefore(Instruction *I, |
| 207 | MemoryAccess *Definition, |
| 208 | MemoryUseOrDef *InsertPt); |
| 209 | MemoryUseOrDef *createMemoryAccessAfter(Instruction *I, |
| 210 | MemoryAccess *Definition, |
| 211 | MemoryAccess *InsertPt); |
| 212 | |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 213 | /// Remove a MemoryAccess from MemorySSA, including updating all |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 214 | /// definitions and uses. |
| 215 | /// This should be called when a memory instruction that has a MemoryAccess |
| 216 | /// associated with it is erased from the program. For example, if a store or |
| 217 | /// load is simply erased (not replaced), removeMemoryAccess should be called |
| 218 | /// on the MemoryAccess for that store/load. |
Andrew Walbran | 16937d0 | 2019-10-22 13:54:20 +0100 | [diff] [blame] | 219 | void removeMemoryAccess(MemoryAccess *, bool OptimizePhis = false); |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 220 | |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 221 | /// Remove MemoryAccess for a given instruction, if a MemoryAccess exists. |
| 222 | /// This should be called when an instruction (load/store) is deleted from |
| 223 | /// the program. |
Andrew Walbran | 16937d0 | 2019-10-22 13:54:20 +0100 | [diff] [blame] | 224 | void removeMemoryAccess(const Instruction *I, bool OptimizePhis = false) { |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 225 | if (MemoryAccess *MA = MSSA->getMemoryAccess(I)) |
Andrew Walbran | 16937d0 | 2019-10-22 13:54:20 +0100 | [diff] [blame] | 226 | removeMemoryAccess(MA, OptimizePhis); |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 227 | } |
| 228 | |
| 229 | /// Remove all MemoryAcceses in a set of BasicBlocks about to be deleted. |
| 230 | /// Assumption we make here: all uses of deleted defs and phi must either |
| 231 | /// occur in blocks about to be deleted (thus will be deleted as well), or |
| 232 | /// they occur in phis that will simply lose an incoming value. |
| 233 | /// Deleted blocks still have successor info, but their predecessor edges and |
| 234 | /// Phi nodes may already be updated. Instructions in DeadBlocks should be |
| 235 | /// deleted after this call. |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 236 | void removeBlocks(const SmallSetVector<BasicBlock *, 8> &DeadBlocks); |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 237 | |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame] | 238 | /// Instruction I will be changed to an unreachable. Remove all accesses in |
| 239 | /// I's block that follow I (inclusive), and update the Phis in the blocks' |
| 240 | /// successors. |
| 241 | void changeToUnreachable(const Instruction *I); |
| 242 | |
| 243 | /// Conditional branch BI is changed or replaced with an unconditional branch |
| 244 | /// to `To`. Update Phis in BI's successors to remove BI's BB. |
| 245 | void changeCondBranchToUnconditionalTo(const BranchInst *BI, |
| 246 | const BasicBlock *To); |
| 247 | |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 248 | /// Get handle on MemorySSA. |
| 249 | MemorySSA* getMemorySSA() const { return MSSA; } |
| 250 | |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 251 | private: |
| 252 | // Move What before Where in the MemorySSA IR. |
| 253 | template <class WhereType> |
| 254 | void moveTo(MemoryUseOrDef *What, BasicBlock *BB, WhereType Where); |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 255 | // Move all memory accesses from `From` to `To` starting at `Start`. |
| 256 | // Restrictions apply, see public wrappers of this method. |
| 257 | void moveAllAccesses(BasicBlock *From, BasicBlock *To, Instruction *Start); |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 258 | MemoryAccess *getPreviousDef(MemoryAccess *); |
| 259 | MemoryAccess *getPreviousDefInBlock(MemoryAccess *); |
| 260 | MemoryAccess * |
| 261 | getPreviousDefFromEnd(BasicBlock *, |
| 262 | DenseMap<BasicBlock *, TrackingVH<MemoryAccess>> &); |
| 263 | MemoryAccess * |
| 264 | getPreviousDefRecursive(BasicBlock *, |
| 265 | DenseMap<BasicBlock *, TrackingVH<MemoryAccess>> &); |
| 266 | MemoryAccess *recursePhi(MemoryAccess *Phi); |
Olivier Deprez | f4ef2d0 | 2021-04-20 13:36:24 +0200 | [diff] [blame] | 267 | MemoryAccess *tryRemoveTrivialPhi(MemoryPhi *Phi); |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 268 | template <class RangeType> |
| 269 | MemoryAccess *tryRemoveTrivialPhi(MemoryPhi *Phi, RangeType &Operands); |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame] | 270 | void tryRemoveTrivialPhis(ArrayRef<WeakVH> UpdatedPHIs); |
Andrew Scull | cdfcccc | 2018-10-05 20:58:37 +0100 | [diff] [blame] | 271 | void fixupDefs(const SmallVectorImpl<WeakVH> &); |
Andrew Scull | 0372a57 | 2018-11-16 15:47:06 +0000 | [diff] [blame] | 272 | // Clone all uses and defs from BB to NewBB given a 1:1 map of all |
| 273 | // instructions and blocks cloned, and a map of MemoryPhi : Definition |
| 274 | // (MemoryAccess Phi or Def). VMap maps old instructions to cloned |
| 275 | // instructions and old blocks to cloned blocks. MPhiMap, is created in the |
| 276 | // caller of this private method, and maps existing MemoryPhis to new |
| 277 | // definitions that new MemoryAccesses must point to. These definitions may |
| 278 | // not necessarily be MemoryPhis themselves, they may be MemoryDefs. As such, |
| 279 | // the map is between MemoryPhis and MemoryAccesses, where the MemoryAccesses |
| 280 | // may be MemoryPhis or MemoryDefs and not MemoryUses. |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame] | 281 | // If CloneWasSimplified = true, the clone was exact. Otherwise, assume that |
| 282 | // the clone involved simplifications that may have: (1) turned a MemoryUse |
| 283 | // into an instruction that MemorySSA has no representation for, or (2) turned |
| 284 | // a MemoryDef into a MemoryUse or an instruction that MemorySSA has no |
| 285 | // representation for. No other cases are supported. |
Andrew Scull | 0372a57 | 2018-11-16 15:47:06 +0000 | [diff] [blame] | 286 | void cloneUsesAndDefs(BasicBlock *BB, BasicBlock *NewBB, |
Andrew Walbran | 3d2c197 | 2020-04-07 12:24:26 +0100 | [diff] [blame] | 287 | const ValueToValueMapTy &VMap, PhiToDefMap &MPhiMap, |
| 288 | bool CloneWasSimplified = false); |
Andrew Scull | 0372a57 | 2018-11-16 15:47:06 +0000 | [diff] [blame] | 289 | template <typename Iter> |
| 290 | void privateUpdateExitBlocksForClonedLoop(ArrayRef<BasicBlock *> ExitBlocks, |
| 291 | Iter ValuesBegin, Iter ValuesEnd, |
| 292 | DominatorTree &DT); |
| 293 | void applyInsertUpdates(ArrayRef<CFGUpdate>, DominatorTree &DT, |
| 294 | const GraphDiff<BasicBlock *> *GD); |
Andrew Scull | 5e1ddfa | 2018-08-14 10:06:54 +0100 | [diff] [blame] | 295 | }; |
| 296 | } // end namespace llvm |
| 297 | |
| 298 | #endif // LLVM_ANALYSIS_MEMORYSSAUPDATER_H |