blob: c227875311177dd31e371411627175ea4c3f0f65 [file] [log] [blame]
Andrew Scull5e1ddfa2018-08-14 10:06:54 +01001//==- BlockFrequencyInfoImpl.h - Block Frequency Implementation --*- C++ -*-==//
2//
Andrew Walbran16937d02019-10-22 13:54:20 +01003// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
Andrew Scull5e1ddfa2018-08-14 10:06:54 +01006//
7//===----------------------------------------------------------------------===//
8//
9// Shared implementation of BlockFrequency for IR and Machine Instructions.
10// See the documentation below for BlockFrequencyInfoImpl for details.
11//
12//===----------------------------------------------------------------------===//
13
14#ifndef LLVM_ANALYSIS_BLOCKFREQUENCYINFOIMPL_H
15#define LLVM_ANALYSIS_BLOCKFREQUENCYINFOIMPL_H
16
17#include "llvm/ADT/DenseMap.h"
18#include "llvm/ADT/DenseSet.h"
19#include "llvm/ADT/GraphTraits.h"
20#include "llvm/ADT/Optional.h"
21#include "llvm/ADT/PostOrderIterator.h"
22#include "llvm/ADT/SmallVector.h"
23#include "llvm/ADT/SparseBitVector.h"
24#include "llvm/ADT/Twine.h"
25#include "llvm/ADT/iterator_range.h"
26#include "llvm/IR/BasicBlock.h"
Olivier Deprezf4ef2d02021-04-20 13:36:24 +020027#include "llvm/IR/ValueHandle.h"
Andrew Scull5e1ddfa2018-08-14 10:06:54 +010028#include "llvm/Support/BlockFrequency.h"
29#include "llvm/Support/BranchProbability.h"
Olivier Deprezf4ef2d02021-04-20 13:36:24 +020030#include "llvm/Support/CommandLine.h"
Andrew Scull5e1ddfa2018-08-14 10:06:54 +010031#include "llvm/Support/DOTGraphTraits.h"
32#include "llvm/Support/Debug.h"
33#include "llvm/Support/ErrorHandling.h"
34#include "llvm/Support/Format.h"
35#include "llvm/Support/ScaledNumber.h"
36#include "llvm/Support/raw_ostream.h"
37#include <algorithm>
38#include <cassert>
39#include <cstddef>
40#include <cstdint>
41#include <deque>
42#include <iterator>
43#include <limits>
44#include <list>
45#include <string>
46#include <utility>
47#include <vector>
48
49#define DEBUG_TYPE "block-freq"
50
Olivier Deprezf4ef2d02021-04-20 13:36:24 +020051extern llvm::cl::opt<bool> CheckBFIUnknownBlockQueries;
52
Andrew Scull5e1ddfa2018-08-14 10:06:54 +010053namespace llvm {
54
55class BranchProbabilityInfo;
56class Function;
57class Loop;
58class LoopInfo;
59class MachineBasicBlock;
60class MachineBranchProbabilityInfo;
61class MachineFunction;
62class MachineLoop;
63class MachineLoopInfo;
64
65namespace bfi_detail {
66
67struct IrreducibleGraph;
68
69// This is part of a workaround for a GCC 4.7 crash on lambdas.
70template <class BT> struct BlockEdgesAdder;
71
Andrew Scullcdfcccc2018-10-05 20:58:37 +010072/// Mass of a block.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +010073///
74/// This class implements a sort of fixed-point fraction always between 0.0 and
75/// 1.0. getMass() == std::numeric_limits<uint64_t>::max() indicates a value of
76/// 1.0.
77///
78/// Masses can be added and subtracted. Simple saturation arithmetic is used,
79/// so arithmetic operations never overflow or underflow.
80///
81/// Masses can be multiplied. Multiplication treats full mass as 1.0 and uses
82/// an inexpensive floating-point algorithm that's off-by-one (almost, but not
83/// quite, maximum precision).
84///
85/// Masses can be scaled by \a BranchProbability at maximum precision.
86class BlockMass {
87 uint64_t Mass = 0;
88
89public:
90 BlockMass() = default;
91 explicit BlockMass(uint64_t Mass) : Mass(Mass) {}
92
93 static BlockMass getEmpty() { return BlockMass(); }
94
95 static BlockMass getFull() {
96 return BlockMass(std::numeric_limits<uint64_t>::max());
97 }
98
99 uint64_t getMass() const { return Mass; }
100
101 bool isFull() const { return Mass == std::numeric_limits<uint64_t>::max(); }
102 bool isEmpty() const { return !Mass; }
103
104 bool operator!() const { return isEmpty(); }
105
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100106 /// Add another mass.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100107 ///
108 /// Adds another mass, saturating at \a isFull() rather than overflowing.
109 BlockMass &operator+=(BlockMass X) {
110 uint64_t Sum = Mass + X.Mass;
111 Mass = Sum < Mass ? std::numeric_limits<uint64_t>::max() : Sum;
112 return *this;
113 }
114
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100115 /// Subtract another mass.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100116 ///
117 /// Subtracts another mass, saturating at \a isEmpty() rather than
118 /// undeflowing.
119 BlockMass &operator-=(BlockMass X) {
120 uint64_t Diff = Mass - X.Mass;
121 Mass = Diff > Mass ? 0 : Diff;
122 return *this;
123 }
124
125 BlockMass &operator*=(BranchProbability P) {
126 Mass = P.scale(Mass);
127 return *this;
128 }
129
130 bool operator==(BlockMass X) const { return Mass == X.Mass; }
131 bool operator!=(BlockMass X) const { return Mass != X.Mass; }
132 bool operator<=(BlockMass X) const { return Mass <= X.Mass; }
133 bool operator>=(BlockMass X) const { return Mass >= X.Mass; }
134 bool operator<(BlockMass X) const { return Mass < X.Mass; }
135 bool operator>(BlockMass X) const { return Mass > X.Mass; }
136
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100137 /// Convert to scaled number.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100138 ///
139 /// Convert to \a ScaledNumber. \a isFull() gives 1.0, while \a isEmpty()
140 /// gives slightly above 0.0.
141 ScaledNumber<uint64_t> toScaled() const;
142
143 void dump() const;
144 raw_ostream &print(raw_ostream &OS) const;
145};
146
147inline BlockMass operator+(BlockMass L, BlockMass R) {
148 return BlockMass(L) += R;
149}
150inline BlockMass operator-(BlockMass L, BlockMass R) {
151 return BlockMass(L) -= R;
152}
153inline BlockMass operator*(BlockMass L, BranchProbability R) {
154 return BlockMass(L) *= R;
155}
156inline BlockMass operator*(BranchProbability L, BlockMass R) {
157 return BlockMass(R) *= L;
158}
159
160inline raw_ostream &operator<<(raw_ostream &OS, BlockMass X) {
161 return X.print(OS);
162}
163
164} // end namespace bfi_detail
165
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100166/// Base class for BlockFrequencyInfoImpl
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100167///
168/// BlockFrequencyInfoImplBase has supporting data structures and some
169/// algorithms for BlockFrequencyInfoImplBase. Only algorithms that depend on
170/// the block type (or that call such algorithms) are skipped here.
171///
Olivier Deprezf4ef2d02021-04-20 13:36:24 +0200172/// Nevertheless, the majority of the overall algorithm documentation lives with
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100173/// BlockFrequencyInfoImpl. See there for details.
174class BlockFrequencyInfoImplBase {
175public:
176 using Scaled64 = ScaledNumber<uint64_t>;
177 using BlockMass = bfi_detail::BlockMass;
178
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100179 /// Representative of a block.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100180 ///
181 /// This is a simple wrapper around an index into the reverse-post-order
182 /// traversal of the blocks.
183 ///
184 /// Unlike a block pointer, its order has meaning (location in the
185 /// topological sort) and it's class is the same regardless of block type.
186 struct BlockNode {
187 using IndexType = uint32_t;
188
Andrew Walbran16937d02019-10-22 13:54:20 +0100189 IndexType Index;
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100190
Andrew Walbran16937d02019-10-22 13:54:20 +0100191 BlockNode() : Index(std::numeric_limits<uint32_t>::max()) {}
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100192 BlockNode(IndexType Index) : Index(Index) {}
193
194 bool operator==(const BlockNode &X) const { return Index == X.Index; }
195 bool operator!=(const BlockNode &X) const { return Index != X.Index; }
196 bool operator<=(const BlockNode &X) const { return Index <= X.Index; }
197 bool operator>=(const BlockNode &X) const { return Index >= X.Index; }
198 bool operator<(const BlockNode &X) const { return Index < X.Index; }
199 bool operator>(const BlockNode &X) const { return Index > X.Index; }
200
201 bool isValid() const { return Index <= getMaxIndex(); }
202
203 static size_t getMaxIndex() {
204 return std::numeric_limits<uint32_t>::max() - 1;
205 }
206 };
207
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100208 /// Stats about a block itself.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100209 struct FrequencyData {
210 Scaled64 Scaled;
211 uint64_t Integer;
212 };
213
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100214 /// Data about a loop.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100215 ///
216 /// Contains the data necessary to represent a loop as a pseudo-node once it's
217 /// packaged.
218 struct LoopData {
219 using ExitMap = SmallVector<std::pair<BlockNode, BlockMass>, 4>;
220 using NodeList = SmallVector<BlockNode, 4>;
221 using HeaderMassList = SmallVector<BlockMass, 1>;
222
223 LoopData *Parent; ///< The parent loop.
224 bool IsPackaged = false; ///< Whether this has been packaged.
225 uint32_t NumHeaders = 1; ///< Number of headers.
226 ExitMap Exits; ///< Successor edges (and weights).
227 NodeList Nodes; ///< Header and the members of the loop.
228 HeaderMassList BackedgeMass; ///< Mass returned to each loop header.
229 BlockMass Mass;
230 Scaled64 Scale;
231
232 LoopData(LoopData *Parent, const BlockNode &Header)
233 : Parent(Parent), Nodes(1, Header), BackedgeMass(1) {}
234
235 template <class It1, class It2>
236 LoopData(LoopData *Parent, It1 FirstHeader, It1 LastHeader, It2 FirstOther,
237 It2 LastOther)
238 : Parent(Parent), Nodes(FirstHeader, LastHeader) {
239 NumHeaders = Nodes.size();
240 Nodes.insert(Nodes.end(), FirstOther, LastOther);
241 BackedgeMass.resize(NumHeaders);
242 }
243
244 bool isHeader(const BlockNode &Node) const {
245 if (isIrreducible())
246 return std::binary_search(Nodes.begin(), Nodes.begin() + NumHeaders,
247 Node);
248 return Node == Nodes[0];
249 }
250
251 BlockNode getHeader() const { return Nodes[0]; }
252 bool isIrreducible() const { return NumHeaders > 1; }
253
254 HeaderMassList::difference_type getHeaderIndex(const BlockNode &B) {
255 assert(isHeader(B) && "this is only valid on loop header blocks");
256 if (isIrreducible())
257 return std::lower_bound(Nodes.begin(), Nodes.begin() + NumHeaders, B) -
258 Nodes.begin();
259 return 0;
260 }
261
262 NodeList::const_iterator members_begin() const {
263 return Nodes.begin() + NumHeaders;
264 }
265
266 NodeList::const_iterator members_end() const { return Nodes.end(); }
267 iterator_range<NodeList::const_iterator> members() const {
268 return make_range(members_begin(), members_end());
269 }
270 };
271
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100272 /// Index of loop information.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100273 struct WorkingData {
274 BlockNode Node; ///< This node.
275 LoopData *Loop = nullptr; ///< The loop this block is inside.
276 BlockMass Mass; ///< Mass distribution from the entry block.
277
278 WorkingData(const BlockNode &Node) : Node(Node) {}
279
280 bool isLoopHeader() const { return Loop && Loop->isHeader(Node); }
281
282 bool isDoubleLoopHeader() const {
283 return isLoopHeader() && Loop->Parent && Loop->Parent->isIrreducible() &&
284 Loop->Parent->isHeader(Node);
285 }
286
287 LoopData *getContainingLoop() const {
288 if (!isLoopHeader())
289 return Loop;
290 if (!isDoubleLoopHeader())
291 return Loop->Parent;
292 return Loop->Parent->Parent;
293 }
294
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100295 /// Resolve a node to its representative.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100296 ///
297 /// Get the node currently representing Node, which could be a containing
298 /// loop.
299 ///
300 /// This function should only be called when distributing mass. As long as
301 /// there are no irreducible edges to Node, then it will have complexity
302 /// O(1) in this context.
303 ///
304 /// In general, the complexity is O(L), where L is the number of loop
305 /// headers Node has been packaged into. Since this method is called in
306 /// the context of distributing mass, L will be the number of loop headers
307 /// an early exit edge jumps out of.
308 BlockNode getResolvedNode() const {
309 auto L = getPackagedLoop();
310 return L ? L->getHeader() : Node;
311 }
312
313 LoopData *getPackagedLoop() const {
314 if (!Loop || !Loop->IsPackaged)
315 return nullptr;
316 auto L = Loop;
317 while (L->Parent && L->Parent->IsPackaged)
318 L = L->Parent;
319 return L;
320 }
321
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100322 /// Get the appropriate mass for a node.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100323 ///
324 /// Get appropriate mass for Node. If Node is a loop-header (whose loop
325 /// has been packaged), returns the mass of its pseudo-node. If it's a
326 /// node inside a packaged loop, it returns the loop's mass.
327 BlockMass &getMass() {
328 if (!isAPackage())
329 return Mass;
330 if (!isADoublePackage())
331 return Loop->Mass;
332 return Loop->Parent->Mass;
333 }
334
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100335 /// Has ContainingLoop been packaged up?
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100336 bool isPackaged() const { return getResolvedNode() != Node; }
337
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100338 /// Has Loop been packaged up?
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100339 bool isAPackage() const { return isLoopHeader() && Loop->IsPackaged; }
340
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100341 /// Has Loop been packaged up twice?
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100342 bool isADoublePackage() const {
343 return isDoubleLoopHeader() && Loop->Parent->IsPackaged;
344 }
345 };
346
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100347 /// Unscaled probability weight.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100348 ///
349 /// Probability weight for an edge in the graph (including the
350 /// successor/target node).
351 ///
352 /// All edges in the original function are 32-bit. However, exit edges from
353 /// loop packages are taken from 64-bit exit masses, so we need 64-bits of
354 /// space in general.
355 ///
356 /// In addition to the raw weight amount, Weight stores the type of the edge
357 /// in the current context (i.e., the context of the loop being processed).
358 /// Is this a local edge within the loop, an exit from the loop, or a
359 /// backedge to the loop header?
360 struct Weight {
361 enum DistType { Local, Exit, Backedge };
362 DistType Type = Local;
363 BlockNode TargetNode;
364 uint64_t Amount = 0;
365
366 Weight() = default;
367 Weight(DistType Type, BlockNode TargetNode, uint64_t Amount)
368 : Type(Type), TargetNode(TargetNode), Amount(Amount) {}
369 };
370
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100371 /// Distribution of unscaled probability weight.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100372 ///
373 /// Distribution of unscaled probability weight to a set of successors.
374 ///
375 /// This class collates the successor edge weights for later processing.
376 ///
377 /// \a DidOverflow indicates whether \a Total did overflow while adding to
378 /// the distribution. It should never overflow twice.
379 struct Distribution {
380 using WeightList = SmallVector<Weight, 4>;
381
382 WeightList Weights; ///< Individual successor weights.
383 uint64_t Total = 0; ///< Sum of all weights.
384 bool DidOverflow = false; ///< Whether \a Total did overflow.
385
386 Distribution() = default;
387
388 void addLocal(const BlockNode &Node, uint64_t Amount) {
389 add(Node, Amount, Weight::Local);
390 }
391
392 void addExit(const BlockNode &Node, uint64_t Amount) {
393 add(Node, Amount, Weight::Exit);
394 }
395
396 void addBackedge(const BlockNode &Node, uint64_t Amount) {
397 add(Node, Amount, Weight::Backedge);
398 }
399
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100400 /// Normalize the distribution.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100401 ///
402 /// Combines multiple edges to the same \a Weight::TargetNode and scales
403 /// down so that \a Total fits into 32-bits.
404 ///
405 /// This is linear in the size of \a Weights. For the vast majority of
406 /// cases, adjacent edge weights are combined by sorting WeightList and
407 /// combining adjacent weights. However, for very large edge lists an
408 /// auxiliary hash table is used.
409 void normalize();
410
411 private:
412 void add(const BlockNode &Node, uint64_t Amount, Weight::DistType Type);
413 };
414
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100415 /// Data about each block. This is used downstream.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100416 std::vector<FrequencyData> Freqs;
417
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100418 /// Whether each block is an irreducible loop header.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100419 /// This is used downstream.
420 SparseBitVector<> IsIrrLoopHeader;
421
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100422 /// Loop data: see initializeLoops().
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100423 std::vector<WorkingData> Working;
424
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100425 /// Indexed information about loops.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100426 std::list<LoopData> Loops;
427
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100428 /// Virtual destructor.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100429 ///
430 /// Need a virtual destructor to mask the compiler warning about
431 /// getBlockName().
432 virtual ~BlockFrequencyInfoImplBase() = default;
433
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100434 /// Add all edges out of a packaged loop to the distribution.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100435 ///
436 /// Adds all edges from LocalLoopHead to Dist. Calls addToDist() to add each
437 /// successor edge.
438 ///
439 /// \return \c true unless there's an irreducible backedge.
440 bool addLoopSuccessorsToDist(const LoopData *OuterLoop, LoopData &Loop,
441 Distribution &Dist);
442
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100443 /// Add an edge to the distribution.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100444 ///
445 /// Adds an edge to Succ to Dist. If \c LoopHead.isValid(), then whether the
446 /// edge is local/exit/backedge is in the context of LoopHead. Otherwise,
447 /// every edge should be a local edge (since all the loops are packaged up).
448 ///
449 /// \return \c true unless aborted due to an irreducible backedge.
450 bool addToDist(Distribution &Dist, const LoopData *OuterLoop,
451 const BlockNode &Pred, const BlockNode &Succ, uint64_t Weight);
452
453 LoopData &getLoopPackage(const BlockNode &Head) {
454 assert(Head.Index < Working.size());
455 assert(Working[Head.Index].isLoopHeader());
456 return *Working[Head.Index].Loop;
457 }
458
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100459 /// Analyze irreducible SCCs.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100460 ///
Olivier Deprezf4ef2d02021-04-20 13:36:24 +0200461 /// Separate irreducible SCCs from \c G, which is an explicit graph of \c
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100462 /// OuterLoop (or the top-level function, if \c OuterLoop is \c nullptr).
463 /// Insert them into \a Loops before \c Insert.
464 ///
465 /// \return the \c LoopData nodes representing the irreducible SCCs.
466 iterator_range<std::list<LoopData>::iterator>
467 analyzeIrreducible(const bfi_detail::IrreducibleGraph &G, LoopData *OuterLoop,
468 std::list<LoopData>::iterator Insert);
469
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100470 /// Update a loop after packaging irreducible SCCs inside of it.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100471 ///
472 /// Update \c OuterLoop. Before finding irreducible control flow, it was
473 /// partway through \a computeMassInLoop(), so \a LoopData::Exits and \a
474 /// LoopData::BackedgeMass need to be reset. Also, nodes that were packaged
475 /// up need to be removed from \a OuterLoop::Nodes.
476 void updateLoopWithIrreducible(LoopData &OuterLoop);
477
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100478 /// Distribute mass according to a distribution.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100479 ///
480 /// Distributes the mass in Source according to Dist. If LoopHead.isValid(),
481 /// backedges and exits are stored in its entry in Loops.
482 ///
483 /// Mass is distributed in parallel from two copies of the source mass.
484 void distributeMass(const BlockNode &Source, LoopData *OuterLoop,
485 Distribution &Dist);
486
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100487 /// Compute the loop scale for a loop.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100488 void computeLoopScale(LoopData &Loop);
489
490 /// Adjust the mass of all headers in an irreducible loop.
491 ///
492 /// Initially, irreducible loops are assumed to distribute their mass
493 /// equally among its headers. This can lead to wrong frequency estimates
494 /// since some headers may be executed more frequently than others.
495 ///
496 /// This adjusts header mass distribution so it matches the weights of
497 /// the backedges going into each of the loop headers.
498 void adjustLoopHeaderMass(LoopData &Loop);
499
500 void distributeIrrLoopHeaderMass(Distribution &Dist);
501
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100502 /// Package up a loop.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100503 void packageLoop(LoopData &Loop);
504
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100505 /// Unwrap loops.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100506 void unwrapLoops();
507
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100508 /// Finalize frequency metrics.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100509 ///
510 /// Calculates final frequencies and cleans up no-longer-needed data
511 /// structures.
512 void finalizeMetrics();
513
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100514 /// Clear all memory.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100515 void clear();
516
517 virtual std::string getBlockName(const BlockNode &Node) const;
518 std::string getLoopName(const LoopData &Loop) const;
519
520 virtual raw_ostream &print(raw_ostream &OS) const { return OS; }
521 void dump() const { print(dbgs()); }
522
523 Scaled64 getFloatingBlockFreq(const BlockNode &Node) const;
524
525 BlockFrequency getBlockFreq(const BlockNode &Node) const;
526 Optional<uint64_t> getBlockProfileCount(const Function &F,
Andrew Walbran3d2c1972020-04-07 12:24:26 +0100527 const BlockNode &Node,
528 bool AllowSynthetic = false) const;
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100529 Optional<uint64_t> getProfileCountFromFreq(const Function &F,
Andrew Walbran3d2c1972020-04-07 12:24:26 +0100530 uint64_t Freq,
531 bool AllowSynthetic = false) const;
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100532 bool isIrrLoopHeader(const BlockNode &Node);
533
534 void setBlockFreq(const BlockNode &Node, uint64_t Freq);
535
536 raw_ostream &printBlockFreq(raw_ostream &OS, const BlockNode &Node) const;
537 raw_ostream &printBlockFreq(raw_ostream &OS,
538 const BlockFrequency &Freq) const;
539
540 uint64_t getEntryFreq() const {
541 assert(!Freqs.empty());
542 return Freqs[0].Integer;
543 }
544};
545
546namespace bfi_detail {
547
548template <class BlockT> struct TypeMap {};
549template <> struct TypeMap<BasicBlock> {
550 using BlockT = BasicBlock;
Olivier Deprezf4ef2d02021-04-20 13:36:24 +0200551 using BlockKeyT = AssertingVH<const BasicBlock>;
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100552 using FunctionT = Function;
553 using BranchProbabilityInfoT = BranchProbabilityInfo;
554 using LoopT = Loop;
555 using LoopInfoT = LoopInfo;
556};
557template <> struct TypeMap<MachineBasicBlock> {
558 using BlockT = MachineBasicBlock;
Olivier Deprezf4ef2d02021-04-20 13:36:24 +0200559 using BlockKeyT = const MachineBasicBlock *;
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100560 using FunctionT = MachineFunction;
561 using BranchProbabilityInfoT = MachineBranchProbabilityInfo;
562 using LoopT = MachineLoop;
563 using LoopInfoT = MachineLoopInfo;
564};
565
Olivier Deprezf4ef2d02021-04-20 13:36:24 +0200566template <class BlockT, class BFIImplT>
567class BFICallbackVH;
568
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100569/// Get the name of a MachineBasicBlock.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100570///
571/// Get the name of a MachineBasicBlock. It's templated so that including from
572/// CodeGen is unnecessary (that would be a layering issue).
573///
574/// This is used mainly for debug output. The name is similar to
575/// MachineBasicBlock::getFullName(), but skips the name of the function.
576template <class BlockT> std::string getBlockName(const BlockT *BB) {
577 assert(BB && "Unexpected nullptr");
578 auto MachineName = "BB" + Twine(BB->getNumber());
579 if (BB->getBasicBlock())
580 return (MachineName + "[" + BB->getName() + "]").str();
581 return MachineName.str();
582}
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100583/// Get the name of a BasicBlock.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100584template <> inline std::string getBlockName(const BasicBlock *BB) {
585 assert(BB && "Unexpected nullptr");
586 return BB->getName().str();
587}
588
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100589/// Graph of irreducible control flow.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100590///
591/// This graph is used for determining the SCCs in a loop (or top-level
592/// function) that has irreducible control flow.
593///
594/// During the block frequency algorithm, the local graphs are defined in a
595/// light-weight way, deferring to the \a BasicBlock or \a MachineBasicBlock
596/// graphs for most edges, but getting others from \a LoopData::ExitMap. The
597/// latter only has successor information.
598///
599/// \a IrreducibleGraph makes this graph explicit. It's in a form that can use
600/// \a GraphTraits (so that \a analyzeIrreducible() can use \a scc_iterator),
601/// and it explicitly lists predecessors and successors. The initialization
602/// that relies on \c MachineBasicBlock is defined in the header.
603struct IrreducibleGraph {
604 using BFIBase = BlockFrequencyInfoImplBase;
605
606 BFIBase &BFI;
607
608 using BlockNode = BFIBase::BlockNode;
609 struct IrrNode {
610 BlockNode Node;
611 unsigned NumIn = 0;
612 std::deque<const IrrNode *> Edges;
613
614 IrrNode(const BlockNode &Node) : Node(Node) {}
615
616 using iterator = std::deque<const IrrNode *>::const_iterator;
617
618 iterator pred_begin() const { return Edges.begin(); }
619 iterator succ_begin() const { return Edges.begin() + NumIn; }
620 iterator pred_end() const { return succ_begin(); }
621 iterator succ_end() const { return Edges.end(); }
622 };
623 BlockNode Start;
624 const IrrNode *StartIrr = nullptr;
625 std::vector<IrrNode> Nodes;
626 SmallDenseMap<uint32_t, IrrNode *, 4> Lookup;
627
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100628 /// Construct an explicit graph containing irreducible control flow.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100629 ///
630 /// Construct an explicit graph of the control flow in \c OuterLoop (or the
631 /// top-level function, if \c OuterLoop is \c nullptr). Uses \c
632 /// addBlockEdges to add block successors that have not been packaged into
633 /// loops.
634 ///
635 /// \a BlockFrequencyInfoImpl::computeIrreducibleMass() is the only expected
636 /// user of this.
637 template <class BlockEdgesAdder>
638 IrreducibleGraph(BFIBase &BFI, const BFIBase::LoopData *OuterLoop,
639 BlockEdgesAdder addBlockEdges) : BFI(BFI) {
640 initialize(OuterLoop, addBlockEdges);
641 }
642
643 template <class BlockEdgesAdder>
644 void initialize(const BFIBase::LoopData *OuterLoop,
645 BlockEdgesAdder addBlockEdges);
646 void addNodesInLoop(const BFIBase::LoopData &OuterLoop);
647 void addNodesInFunction();
648
649 void addNode(const BlockNode &Node) {
650 Nodes.emplace_back(Node);
651 BFI.Working[Node.Index].getMass() = BlockMass::getEmpty();
652 }
653
654 void indexNodes();
655 template <class BlockEdgesAdder>
656 void addEdges(const BlockNode &Node, const BFIBase::LoopData *OuterLoop,
657 BlockEdgesAdder addBlockEdges);
658 void addEdge(IrrNode &Irr, const BlockNode &Succ,
659 const BFIBase::LoopData *OuterLoop);
660};
661
662template <class BlockEdgesAdder>
663void IrreducibleGraph::initialize(const BFIBase::LoopData *OuterLoop,
664 BlockEdgesAdder addBlockEdges) {
665 if (OuterLoop) {
666 addNodesInLoop(*OuterLoop);
667 for (auto N : OuterLoop->Nodes)
668 addEdges(N, OuterLoop, addBlockEdges);
669 } else {
670 addNodesInFunction();
671 for (uint32_t Index = 0; Index < BFI.Working.size(); ++Index)
672 addEdges(Index, OuterLoop, addBlockEdges);
673 }
674 StartIrr = Lookup[Start.Index];
675}
676
677template <class BlockEdgesAdder>
678void IrreducibleGraph::addEdges(const BlockNode &Node,
679 const BFIBase::LoopData *OuterLoop,
680 BlockEdgesAdder addBlockEdges) {
681 auto L = Lookup.find(Node.Index);
682 if (L == Lookup.end())
683 return;
684 IrrNode &Irr = *L->second;
685 const auto &Working = BFI.Working[Node.Index];
686
687 if (Working.isAPackage())
688 for (const auto &I : Working.Loop->Exits)
689 addEdge(Irr, I.first, OuterLoop);
690 else
691 addBlockEdges(*this, Irr, OuterLoop);
692}
693
694} // end namespace bfi_detail
695
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100696/// Shared implementation for block frequency analysis.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100697///
698/// This is a shared implementation of BlockFrequencyInfo and
699/// MachineBlockFrequencyInfo, and calculates the relative frequencies of
700/// blocks.
701///
702/// LoopInfo defines a loop as a "non-trivial" SCC dominated by a single block,
703/// which is called the header. A given loop, L, can have sub-loops, which are
704/// loops within the subgraph of L that exclude its header. (A "trivial" SCC
705/// consists of a single block that does not have a self-edge.)
706///
707/// In addition to loops, this algorithm has limited support for irreducible
708/// SCCs, which are SCCs with multiple entry blocks. Irreducible SCCs are
Olivier Deprezf4ef2d02021-04-20 13:36:24 +0200709/// discovered on the fly, and modelled as loops with multiple headers.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100710///
711/// The headers of irreducible sub-SCCs consist of its entry blocks and all
712/// nodes that are targets of a backedge within it (excluding backedges within
713/// true sub-loops). Block frequency calculations act as if a block is
714/// inserted that intercepts all the edges to the headers. All backedges and
715/// entries point to this block. Its successors are the headers, which split
716/// the frequency evenly.
717///
718/// This algorithm leverages BlockMass and ScaledNumber to maintain precision,
719/// separates mass distribution from loop scaling, and dithers to eliminate
720/// probability mass loss.
721///
722/// The implementation is split between BlockFrequencyInfoImpl, which knows the
723/// type of graph being modelled (BasicBlock vs. MachineBasicBlock), and
724/// BlockFrequencyInfoImplBase, which doesn't. The base class uses \a
725/// BlockNode, a wrapper around a uint32_t. BlockNode is numbered from 0 in
726/// reverse-post order. This gives two advantages: it's easy to compare the
727/// relative ordering of two nodes, and maps keyed on BlockT can be represented
728/// by vectors.
729///
730/// This algorithm is O(V+E), unless there is irreducible control flow, in
731/// which case it's O(V*E) in the worst case.
732///
733/// These are the main stages:
734///
735/// 0. Reverse post-order traversal (\a initializeRPOT()).
736///
737/// Run a single post-order traversal and save it (in reverse) in RPOT.
738/// All other stages make use of this ordering. Save a lookup from BlockT
739/// to BlockNode (the index into RPOT) in Nodes.
740///
741/// 1. Loop initialization (\a initializeLoops()).
742///
743/// Translate LoopInfo/MachineLoopInfo into a form suitable for the rest of
744/// the algorithm. In particular, store the immediate members of each loop
745/// in reverse post-order.
746///
747/// 2. Calculate mass and scale in loops (\a computeMassInLoops()).
748///
749/// For each loop (bottom-up), distribute mass through the DAG resulting
750/// from ignoring backedges and treating sub-loops as a single pseudo-node.
751/// Track the backedge mass distributed to the loop header, and use it to
752/// calculate the loop scale (number of loop iterations). Immediate
753/// members that represent sub-loops will already have been visited and
754/// packaged into a pseudo-node.
755///
756/// Distributing mass in a loop is a reverse-post-order traversal through
757/// the loop. Start by assigning full mass to the Loop header. For each
758/// node in the loop:
759///
760/// - Fetch and categorize the weight distribution for its successors.
761/// If this is a packaged-subloop, the weight distribution is stored
762/// in \a LoopData::Exits. Otherwise, fetch it from
763/// BranchProbabilityInfo.
764///
765/// - Each successor is categorized as \a Weight::Local, a local edge
766/// within the current loop, \a Weight::Backedge, a backedge to the
767/// loop header, or \a Weight::Exit, any successor outside the loop.
768/// The weight, the successor, and its category are stored in \a
769/// Distribution. There can be multiple edges to each successor.
770///
771/// - If there's a backedge to a non-header, there's an irreducible SCC.
772/// The usual flow is temporarily aborted. \a
773/// computeIrreducibleMass() finds the irreducible SCCs within the
774/// loop, packages them up, and restarts the flow.
775///
776/// - Normalize the distribution: scale weights down so that their sum
777/// is 32-bits, and coalesce multiple edges to the same node.
778///
779/// - Distribute the mass accordingly, dithering to minimize mass loss,
780/// as described in \a distributeMass().
781///
782/// In the case of irreducible loops, instead of a single loop header,
783/// there will be several. The computation of backedge masses is similar
784/// but instead of having a single backedge mass, there will be one
785/// backedge per loop header. In these cases, each backedge will carry
786/// a mass proportional to the edge weights along the corresponding
787/// path.
788///
789/// At the end of propagation, the full mass assigned to the loop will be
790/// distributed among the loop headers proportionally according to the
791/// mass flowing through their backedges.
792///
793/// Finally, calculate the loop scale from the accumulated backedge mass.
794///
795/// 3. Distribute mass in the function (\a computeMassInFunction()).
796///
797/// Finally, distribute mass through the DAG resulting from packaging all
798/// loops in the function. This uses the same algorithm as distributing
799/// mass in a loop, except that there are no exit or backedge edges.
800///
801/// 4. Unpackage loops (\a unwrapLoops()).
802///
803/// Initialize each block's frequency to a floating point representation of
804/// its mass.
805///
806/// Visit loops top-down, scaling the frequencies of its immediate members
807/// by the loop's pseudo-node's frequency.
808///
809/// 5. Convert frequencies to a 64-bit range (\a finalizeMetrics()).
810///
811/// Using the min and max frequencies as a guide, translate floating point
812/// frequencies to an appropriate range in uint64_t.
813///
814/// It has some known flaws.
815///
816/// - The model of irreducible control flow is a rough approximation.
817///
818/// Modelling irreducible control flow exactly involves setting up and
819/// solving a group of infinite geometric series. Such precision is
820/// unlikely to be worthwhile, since most of our algorithms give up on
821/// irreducible control flow anyway.
822///
823/// Nevertheless, we might find that we need to get closer. Here's a sort
824/// of TODO list for the model with diminishing returns, to be completed as
825/// necessary.
826///
827/// - The headers for the \a LoopData representing an irreducible SCC
828/// include non-entry blocks. When these extra blocks exist, they
829/// indicate a self-contained irreducible sub-SCC. We could treat them
830/// as sub-loops, rather than arbitrarily shoving the problematic
831/// blocks into the headers of the main irreducible SCC.
832///
833/// - Entry frequencies are assumed to be evenly split between the
834/// headers of a given irreducible SCC, which is the only option if we
835/// need to compute mass in the SCC before its parent loop. Instead,
836/// we could partially compute mass in the parent loop, and stop when
837/// we get to the SCC. Here, we have the correct ratio of entry
838/// masses, which we can use to adjust their relative frequencies.
839/// Compute mass in the SCC, and then continue propagation in the
840/// parent.
841///
842/// - We can propagate mass iteratively through the SCC, for some fixed
843/// number of iterations. Each iteration starts by assigning the entry
844/// blocks their backedge mass from the prior iteration. The final
845/// mass for each block (and each exit, and the total backedge mass
846/// used for computing loop scale) is the sum of all iterations.
847/// (Running this until fixed point would "solve" the geometric
848/// series by simulation.)
849template <class BT> class BlockFrequencyInfoImpl : BlockFrequencyInfoImplBase {
850 // This is part of a workaround for a GCC 4.7 crash on lambdas.
851 friend struct bfi_detail::BlockEdgesAdder<BT>;
852
853 using BlockT = typename bfi_detail::TypeMap<BT>::BlockT;
Olivier Deprezf4ef2d02021-04-20 13:36:24 +0200854 using BlockKeyT = typename bfi_detail::TypeMap<BT>::BlockKeyT;
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100855 using FunctionT = typename bfi_detail::TypeMap<BT>::FunctionT;
856 using BranchProbabilityInfoT =
857 typename bfi_detail::TypeMap<BT>::BranchProbabilityInfoT;
858 using LoopT = typename bfi_detail::TypeMap<BT>::LoopT;
859 using LoopInfoT = typename bfi_detail::TypeMap<BT>::LoopInfoT;
860 using Successor = GraphTraits<const BlockT *>;
861 using Predecessor = GraphTraits<Inverse<const BlockT *>>;
Olivier Deprezf4ef2d02021-04-20 13:36:24 +0200862 using BFICallbackVH =
863 bfi_detail::BFICallbackVH<BlockT, BlockFrequencyInfoImpl>;
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100864
865 const BranchProbabilityInfoT *BPI = nullptr;
866 const LoopInfoT *LI = nullptr;
867 const FunctionT *F = nullptr;
868
869 // All blocks in reverse postorder.
870 std::vector<const BlockT *> RPOT;
Olivier Deprezf4ef2d02021-04-20 13:36:24 +0200871 DenseMap<BlockKeyT, std::pair<BlockNode, BFICallbackVH>> Nodes;
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100872
873 using rpot_iterator = typename std::vector<const BlockT *>::const_iterator;
874
875 rpot_iterator rpot_begin() const { return RPOT.begin(); }
876 rpot_iterator rpot_end() const { return RPOT.end(); }
877
878 size_t getIndex(const rpot_iterator &I) const { return I - rpot_begin(); }
879
880 BlockNode getNode(const rpot_iterator &I) const {
881 return BlockNode(getIndex(I));
882 }
Olivier Deprezf4ef2d02021-04-20 13:36:24 +0200883
884 BlockNode getNode(const BlockT *BB) const { return Nodes.lookup(BB).first; }
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100885
886 const BlockT *getBlock(const BlockNode &Node) const {
887 assert(Node.Index < RPOT.size());
888 return RPOT[Node.Index];
889 }
890
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100891 /// Run (and save) a post-order traversal.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100892 ///
893 /// Saves a reverse post-order traversal of all the nodes in \a F.
894 void initializeRPOT();
895
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100896 /// Initialize loop data.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100897 ///
898 /// Build up \a Loops using \a LoopInfo. \a LoopInfo gives us a mapping from
899 /// each block to the deepest loop it's in, but we need the inverse. For each
900 /// loop, we store in reverse post-order its "immediate" members, defined as
901 /// the header, the headers of immediate sub-loops, and all other blocks in
902 /// the loop that are not in sub-loops.
903 void initializeLoops();
904
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100905 /// Propagate to a block's successors.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100906 ///
907 /// In the context of distributing mass through \c OuterLoop, divide the mass
908 /// currently assigned to \c Node between its successors.
909 ///
910 /// \return \c true unless there's an irreducible backedge.
911 bool propagateMassToSuccessors(LoopData *OuterLoop, const BlockNode &Node);
912
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100913 /// Compute mass in a particular loop.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100914 ///
915 /// Assign mass to \c Loop's header, and then for each block in \c Loop in
916 /// reverse post-order, distribute mass to its successors. Only visits nodes
917 /// that have not been packaged into sub-loops.
918 ///
919 /// \pre \a computeMassInLoop() has been called for each subloop of \c Loop.
920 /// \return \c true unless there's an irreducible backedge.
921 bool computeMassInLoop(LoopData &Loop);
922
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100923 /// Try to compute mass in the top-level function.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100924 ///
925 /// Assign mass to the entry block, and then for each block in reverse
926 /// post-order, distribute mass to its successors. Skips nodes that have
927 /// been packaged into loops.
928 ///
929 /// \pre \a computeMassInLoops() has been called.
930 /// \return \c true unless there's an irreducible backedge.
931 bool tryToComputeMassInFunction();
932
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100933 /// Compute mass in (and package up) irreducible SCCs.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100934 ///
935 /// Find the irreducible SCCs in \c OuterLoop, add them to \a Loops (in front
936 /// of \c Insert), and call \a computeMassInLoop() on each of them.
937 ///
938 /// If \c OuterLoop is \c nullptr, it refers to the top-level function.
939 ///
940 /// \pre \a computeMassInLoop() has been called for each subloop of \c
941 /// OuterLoop.
942 /// \pre \c Insert points at the last loop successfully processed by \a
943 /// computeMassInLoop().
944 /// \pre \c OuterLoop has irreducible SCCs.
945 void computeIrreducibleMass(LoopData *OuterLoop,
946 std::list<LoopData>::iterator Insert);
947
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100948 /// Compute mass in all loops.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100949 ///
950 /// For each loop bottom-up, call \a computeMassInLoop().
951 ///
952 /// \a computeMassInLoop() aborts (and returns \c false) on loops that
953 /// contain a irreducible sub-SCCs. Use \a computeIrreducibleMass() and then
954 /// re-enter \a computeMassInLoop().
955 ///
956 /// \post \a computeMassInLoop() has returned \c true for every loop.
957 void computeMassInLoops();
958
Andrew Scullcdfcccc2018-10-05 20:58:37 +0100959 /// Compute mass in the top-level function.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100960 ///
961 /// Uses \a tryToComputeMassInFunction() and \a computeIrreducibleMass() to
962 /// compute mass in the top-level function.
963 ///
964 /// \post \a tryToComputeMassInFunction() has returned \c true.
965 void computeMassInFunction();
966
967 std::string getBlockName(const BlockNode &Node) const override {
968 return bfi_detail::getBlockName(getBlock(Node));
969 }
970
971public:
972 BlockFrequencyInfoImpl() = default;
973
974 const FunctionT *getFunction() const { return F; }
975
976 void calculate(const FunctionT &F, const BranchProbabilityInfoT &BPI,
977 const LoopInfoT &LI);
978
979 using BlockFrequencyInfoImplBase::getEntryFreq;
980
981 BlockFrequency getBlockFreq(const BlockT *BB) const {
982 return BlockFrequencyInfoImplBase::getBlockFreq(getNode(BB));
983 }
984
985 Optional<uint64_t> getBlockProfileCount(const Function &F,
Andrew Walbran3d2c1972020-04-07 12:24:26 +0100986 const BlockT *BB,
987 bool AllowSynthetic = false) const {
988 return BlockFrequencyInfoImplBase::getBlockProfileCount(F, getNode(BB),
989 AllowSynthetic);
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100990 }
991
992 Optional<uint64_t> getProfileCountFromFreq(const Function &F,
Andrew Walbran3d2c1972020-04-07 12:24:26 +0100993 uint64_t Freq,
994 bool AllowSynthetic = false) const {
995 return BlockFrequencyInfoImplBase::getProfileCountFromFreq(F, Freq,
996 AllowSynthetic);
Andrew Scull5e1ddfa2018-08-14 10:06:54 +0100997 }
998
999 bool isIrrLoopHeader(const BlockT *BB) {
1000 return BlockFrequencyInfoImplBase::isIrrLoopHeader(getNode(BB));
1001 }
1002
1003 void setBlockFreq(const BlockT *BB, uint64_t Freq);
1004
Olivier Deprezf4ef2d02021-04-20 13:36:24 +02001005 void forgetBlock(const BlockT *BB) {
1006 // We don't erase corresponding items from `Freqs`, `RPOT` and other to
1007 // avoid invalidating indices. Doing so would have saved some memory, but
1008 // it's not worth it.
1009 Nodes.erase(BB);
1010 }
1011
Andrew Scull5e1ddfa2018-08-14 10:06:54 +01001012 Scaled64 getFloatingBlockFreq(const BlockT *BB) const {
1013 return BlockFrequencyInfoImplBase::getFloatingBlockFreq(getNode(BB));
1014 }
1015
1016 const BranchProbabilityInfoT &getBPI() const { return *BPI; }
1017
Andrew Scullcdfcccc2018-10-05 20:58:37 +01001018 /// Print the frequencies for the current function.
Andrew Scull5e1ddfa2018-08-14 10:06:54 +01001019 ///
1020 /// Prints the frequencies for the blocks in the current function.
1021 ///
1022 /// Blocks are printed in the natural iteration order of the function, rather
1023 /// than reverse post-order. This provides two advantages: writing -analyze
1024 /// tests is easier (since blocks come out in source order), and even
1025 /// unreachable blocks are printed.
1026 ///
1027 /// \a BlockFrequencyInfoImplBase::print() only knows reverse post-order, so
1028 /// we need to override it here.
1029 raw_ostream &print(raw_ostream &OS) const override;
1030
1031 using BlockFrequencyInfoImplBase::dump;
1032 using BlockFrequencyInfoImplBase::printBlockFreq;
1033
1034 raw_ostream &printBlockFreq(raw_ostream &OS, const BlockT *BB) const {
1035 return BlockFrequencyInfoImplBase::printBlockFreq(OS, getNode(BB));
1036 }
Olivier Deprezf4ef2d02021-04-20 13:36:24 +02001037
1038 void verifyMatch(BlockFrequencyInfoImpl<BT> &Other) const;
Andrew Scull5e1ddfa2018-08-14 10:06:54 +01001039};
1040
Olivier Deprezf4ef2d02021-04-20 13:36:24 +02001041namespace bfi_detail {
1042
1043template <class BFIImplT>
1044class BFICallbackVH<BasicBlock, BFIImplT> : public CallbackVH {
1045 BFIImplT *BFIImpl;
1046
1047public:
1048 BFICallbackVH() = default;
1049
1050 BFICallbackVH(const BasicBlock *BB, BFIImplT *BFIImpl)
1051 : CallbackVH(BB), BFIImpl(BFIImpl) {}
1052
1053 virtual ~BFICallbackVH() = default;
1054
1055 void deleted() override {
1056 BFIImpl->forgetBlock(cast<BasicBlock>(getValPtr()));
1057 }
1058};
1059
1060/// Dummy implementation since MachineBasicBlocks aren't Values, so ValueHandles
1061/// don't apply to them.
1062template <class BFIImplT>
1063class BFICallbackVH<MachineBasicBlock, BFIImplT> {
1064public:
1065 BFICallbackVH() = default;
1066 BFICallbackVH(const MachineBasicBlock *, BFIImplT *) {}
1067};
1068
1069} // end namespace bfi_detail
1070
Andrew Scull5e1ddfa2018-08-14 10:06:54 +01001071template <class BT>
1072void BlockFrequencyInfoImpl<BT>::calculate(const FunctionT &F,
1073 const BranchProbabilityInfoT &BPI,
1074 const LoopInfoT &LI) {
1075 // Save the parameters.
1076 this->BPI = &BPI;
1077 this->LI = &LI;
1078 this->F = &F;
1079
1080 // Clean up left-over data structures.
1081 BlockFrequencyInfoImplBase::clear();
1082 RPOT.clear();
1083 Nodes.clear();
1084
1085 // Initialize.
Andrew Scullcdfcccc2018-10-05 20:58:37 +01001086 LLVM_DEBUG(dbgs() << "\nblock-frequency: " << F.getName()
1087 << "\n================="
1088 << std::string(F.getName().size(), '=') << "\n");
Andrew Scull5e1ddfa2018-08-14 10:06:54 +01001089 initializeRPOT();
1090 initializeLoops();
1091
1092 // Visit loops in post-order to find the local mass distribution, and then do
1093 // the full function.
1094 computeMassInLoops();
1095 computeMassInFunction();
1096 unwrapLoops();
1097 finalizeMetrics();
Olivier Deprezf4ef2d02021-04-20 13:36:24 +02001098
1099 if (CheckBFIUnknownBlockQueries) {
1100 // To detect BFI queries for unknown blocks, add entries for unreachable
1101 // blocks, if any. This is to distinguish between known/existing unreachable
1102 // blocks and unknown blocks.
1103 for (const BlockT &BB : F)
1104 if (!Nodes.count(&BB))
1105 setBlockFreq(&BB, 0);
1106 }
Andrew Scull5e1ddfa2018-08-14 10:06:54 +01001107}
1108
1109template <class BT>
1110void BlockFrequencyInfoImpl<BT>::setBlockFreq(const BlockT *BB, uint64_t Freq) {
1111 if (Nodes.count(BB))
1112 BlockFrequencyInfoImplBase::setBlockFreq(getNode(BB), Freq);
1113 else {
1114 // If BB is a newly added block after BFI is done, we need to create a new
1115 // BlockNode for it assigned with a new index. The index can be determined
1116 // by the size of Freqs.
1117 BlockNode NewNode(Freqs.size());
Olivier Deprezf4ef2d02021-04-20 13:36:24 +02001118 Nodes[BB] = {NewNode, BFICallbackVH(BB, this)};
Andrew Scull5e1ddfa2018-08-14 10:06:54 +01001119 Freqs.emplace_back();
1120 BlockFrequencyInfoImplBase::setBlockFreq(NewNode, Freq);
1121 }
1122}
1123
1124template <class BT> void BlockFrequencyInfoImpl<BT>::initializeRPOT() {
1125 const BlockT *Entry = &F->front();
1126 RPOT.reserve(F->size());
1127 std::copy(po_begin(Entry), po_end(Entry), std::back_inserter(RPOT));
1128 std::reverse(RPOT.begin(), RPOT.end());
1129
1130 assert(RPOT.size() - 1 <= BlockNode::getMaxIndex() &&
1131 "More nodes in function than Block Frequency Info supports");
1132
Andrew Scullcdfcccc2018-10-05 20:58:37 +01001133 LLVM_DEBUG(dbgs() << "reverse-post-order-traversal\n");
Andrew Scull5e1ddfa2018-08-14 10:06:54 +01001134 for (rpot_iterator I = rpot_begin(), E = rpot_end(); I != E; ++I) {
1135 BlockNode Node = getNode(I);
Andrew Scullcdfcccc2018-10-05 20:58:37 +01001136 LLVM_DEBUG(dbgs() << " - " << getIndex(I) << ": " << getBlockName(Node)
1137 << "\n");
Olivier Deprezf4ef2d02021-04-20 13:36:24 +02001138 Nodes[*I] = {Node, BFICallbackVH(*I, this)};
Andrew Scull5e1ddfa2018-08-14 10:06:54 +01001139 }
1140
1141 Working.reserve(RPOT.size());
1142 for (size_t Index = 0; Index < RPOT.size(); ++Index)
1143 Working.emplace_back(Index);
1144 Freqs.resize(RPOT.size());
1145}
1146
1147template <class BT> void BlockFrequencyInfoImpl<BT>::initializeLoops() {
Andrew Scullcdfcccc2018-10-05 20:58:37 +01001148 LLVM_DEBUG(dbgs() << "loop-detection\n");
Andrew Scull5e1ddfa2018-08-14 10:06:54 +01001149 if (LI->empty())
1150 return;
1151
1152 // Visit loops top down and assign them an index.
1153 std::deque<std::pair<const LoopT *, LoopData *>> Q;
1154 for (const LoopT *L : *LI)
1155 Q.emplace_back(L, nullptr);
1156 while (!Q.empty()) {
1157 const LoopT *Loop = Q.front().first;
1158 LoopData *Parent = Q.front().second;
1159 Q.pop_front();
1160
1161 BlockNode Header = getNode(Loop->getHeader());
1162 assert(Header.isValid());
1163
1164 Loops.emplace_back(Parent, Header);
1165 Working[Header.Index].Loop = &Loops.back();
Andrew Scullcdfcccc2018-10-05 20:58:37 +01001166 LLVM_DEBUG(dbgs() << " - loop = " << getBlockName(Header) << "\n");
Andrew Scull5e1ddfa2018-08-14 10:06:54 +01001167
1168 for (const LoopT *L : *Loop)
1169 Q.emplace_back(L, &Loops.back());
1170 }
1171
1172 // Visit nodes in reverse post-order and add them to their deepest containing
1173 // loop.
1174 for (size_t Index = 0; Index < RPOT.size(); ++Index) {
1175 // Loop headers have already been mostly mapped.
1176 if (Working[Index].isLoopHeader()) {
1177 LoopData *ContainingLoop = Working[Index].getContainingLoop();
1178 if (ContainingLoop)
1179 ContainingLoop->Nodes.push_back(Index);
1180 continue;
1181 }
1182
1183 const LoopT *Loop = LI->getLoopFor(RPOT[Index]);
1184 if (!Loop)
1185 continue;
1186
1187 // Add this node to its containing loop's member list.
1188 BlockNode Header = getNode(Loop->getHeader());
1189 assert(Header.isValid());
1190 const auto &HeaderData = Working[Header.Index];
1191 assert(HeaderData.isLoopHeader());
1192
1193 Working[Index].Loop = HeaderData.Loop;
1194 HeaderData.Loop->Nodes.push_back(Index);
Andrew Scullcdfcccc2018-10-05 20:58:37 +01001195 LLVM_DEBUG(dbgs() << " - loop = " << getBlockName(Header)
1196 << ": member = " << getBlockName(Index) << "\n");
Andrew Scull5e1ddfa2018-08-14 10:06:54 +01001197 }
1198}
1199
1200template <class BT> void BlockFrequencyInfoImpl<BT>::computeMassInLoops() {
1201 // Visit loops with the deepest first, and the top-level loops last.
1202 for (auto L = Loops.rbegin(), E = Loops.rend(); L != E; ++L) {
1203 if (computeMassInLoop(*L))
1204 continue;
1205 auto Next = std::next(L);
1206 computeIrreducibleMass(&*L, L.base());
1207 L = std::prev(Next);
1208 if (computeMassInLoop(*L))
1209 continue;
1210 llvm_unreachable("unhandled irreducible control flow");
1211 }
1212}
1213
1214template <class BT>
1215bool BlockFrequencyInfoImpl<BT>::computeMassInLoop(LoopData &Loop) {
1216 // Compute mass in loop.
Andrew Scullcdfcccc2018-10-05 20:58:37 +01001217 LLVM_DEBUG(dbgs() << "compute-mass-in-loop: " << getLoopName(Loop) << "\n");
Andrew Scull5e1ddfa2018-08-14 10:06:54 +01001218
1219 if (Loop.isIrreducible()) {
Andrew Scullcdfcccc2018-10-05 20:58:37 +01001220 LLVM_DEBUG(dbgs() << "isIrreducible = true\n");
Andrew Scull5e1ddfa2018-08-14 10:06:54 +01001221 Distribution Dist;
1222 unsigned NumHeadersWithWeight = 0;
1223 Optional<uint64_t> MinHeaderWeight;
1224 DenseSet<uint32_t> HeadersWithoutWeight;
1225 HeadersWithoutWeight.reserve(Loop.NumHeaders);
1226 for (uint32_t H = 0; H < Loop.NumHeaders; ++H) {
1227 auto &HeaderNode = Loop.Nodes[H];
1228 const BlockT *Block = getBlock(HeaderNode);
1229 IsIrrLoopHeader.set(Loop.Nodes[H].Index);
1230 Optional<uint64_t> HeaderWeight = Block->getIrrLoopHeaderWeight();
1231 if (!HeaderWeight) {
Andrew Scullcdfcccc2018-10-05 20:58:37 +01001232 LLVM_DEBUG(dbgs() << "Missing irr loop header metadata on "
1233 << getBlockName(HeaderNode) << "\n");
Andrew Scull5e1ddfa2018-08-14 10:06:54 +01001234 HeadersWithoutWeight.insert(H);
1235 continue;
1236 }
Andrew Scullcdfcccc2018-10-05 20:58:37 +01001237 LLVM_DEBUG(dbgs() << getBlockName(HeaderNode)
1238 << " has irr loop header weight "
1239 << HeaderWeight.getValue() << "\n");
Andrew Scull5e1ddfa2018-08-14 10:06:54 +01001240 NumHeadersWithWeight++;
1241 uint64_t HeaderWeightValue = HeaderWeight.getValue();
1242 if (!MinHeaderWeight || HeaderWeightValue < MinHeaderWeight)
1243 MinHeaderWeight = HeaderWeightValue;
1244 if (HeaderWeightValue) {
1245 Dist.addLocal(HeaderNode, HeaderWeightValue);
1246 }
1247 }
1248 // As a heuristic, if some headers don't have a weight, give them the
Olivier Deprezf4ef2d02021-04-20 13:36:24 +02001249 // minimum weight seen (not to disrupt the existing trends too much by
Andrew Scull5e1ddfa2018-08-14 10:06:54 +01001250 // using a weight that's in the general range of the other headers' weights,
1251 // and the minimum seems to perform better than the average.)
1252 // FIXME: better update in the passes that drop the header weight.
1253 // If no headers have a weight, give them even weight (use weight 1).
1254 if (!MinHeaderWeight)
1255 MinHeaderWeight = 1;
1256 for (uint32_t H : HeadersWithoutWeight) {
1257 auto &HeaderNode = Loop.Nodes[H];
1258 assert(!getBlock(HeaderNode)->getIrrLoopHeaderWeight() &&
1259 "Shouldn't have a weight metadata");
1260 uint64_t MinWeight = MinHeaderWeight.getValue();
Andrew Scullcdfcccc2018-10-05 20:58:37 +01001261 LLVM_DEBUG(dbgs() << "Giving weight " << MinWeight << " to "
1262 << getBlockName(HeaderNode) << "\n");
Andrew Scull5e1ddfa2018-08-14 10:06:54 +01001263 if (MinWeight)
1264 Dist.addLocal(HeaderNode, MinWeight);
1265 }
1266 distributeIrrLoopHeaderMass(Dist);
1267 for (const BlockNode &M : Loop.Nodes)
1268 if (!propagateMassToSuccessors(&Loop, M))
1269 llvm_unreachable("unhandled irreducible control flow");
1270 if (NumHeadersWithWeight == 0)
1271 // No headers have a metadata. Adjust header mass.
1272 adjustLoopHeaderMass(Loop);
1273 } else {
1274 Working[Loop.getHeader().Index].getMass() = BlockMass::getFull();
1275 if (!propagateMassToSuccessors(&Loop, Loop.getHeader()))
1276 llvm_unreachable("irreducible control flow to loop header!?");
1277 for (const BlockNode &M : Loop.members())
1278 if (!propagateMassToSuccessors(&Loop, M))
1279 // Irreducible backedge.
1280 return false;
1281 }
1282
1283 computeLoopScale(Loop);
1284 packageLoop(Loop);
1285 return true;
1286}
1287
1288template <class BT>
1289bool BlockFrequencyInfoImpl<BT>::tryToComputeMassInFunction() {
1290 // Compute mass in function.
Andrew Scullcdfcccc2018-10-05 20:58:37 +01001291 LLVM_DEBUG(dbgs() << "compute-mass-in-function\n");
Andrew Scull5e1ddfa2018-08-14 10:06:54 +01001292 assert(!Working.empty() && "no blocks in function");
1293 assert(!Working[0].isLoopHeader() && "entry block is a loop header");
1294
1295 Working[0].getMass() = BlockMass::getFull();
1296 for (rpot_iterator I = rpot_begin(), IE = rpot_end(); I != IE; ++I) {
1297 // Check for nodes that have been packaged.
1298 BlockNode Node = getNode(I);
1299 if (Working[Node.Index].isPackaged())
1300 continue;
1301
1302 if (!propagateMassToSuccessors(nullptr, Node))
1303 return false;
1304 }
1305 return true;
1306}
1307
1308template <class BT> void BlockFrequencyInfoImpl<BT>::computeMassInFunction() {
1309 if (tryToComputeMassInFunction())
1310 return;
1311 computeIrreducibleMass(nullptr, Loops.begin());
1312 if (tryToComputeMassInFunction())
1313 return;
1314 llvm_unreachable("unhandled irreducible control flow");
1315}
1316
1317/// \note This should be a lambda, but that crashes GCC 4.7.
1318namespace bfi_detail {
1319
1320template <class BT> struct BlockEdgesAdder {
1321 using BlockT = BT;
1322 using LoopData = BlockFrequencyInfoImplBase::LoopData;
1323 using Successor = GraphTraits<const BlockT *>;
1324
1325 const BlockFrequencyInfoImpl<BT> &BFI;
1326
1327 explicit BlockEdgesAdder(const BlockFrequencyInfoImpl<BT> &BFI)
1328 : BFI(BFI) {}
1329
1330 void operator()(IrreducibleGraph &G, IrreducibleGraph::IrrNode &Irr,
1331 const LoopData *OuterLoop) {
1332 const BlockT *BB = BFI.RPOT[Irr.Node.Index];
1333 for (const auto Succ : children<const BlockT *>(BB))
1334 G.addEdge(Irr, BFI.getNode(Succ), OuterLoop);
1335 }
1336};
1337
1338} // end namespace bfi_detail
1339
1340template <class BT>
1341void BlockFrequencyInfoImpl<BT>::computeIrreducibleMass(
1342 LoopData *OuterLoop, std::list<LoopData>::iterator Insert) {
Andrew Scullcdfcccc2018-10-05 20:58:37 +01001343 LLVM_DEBUG(dbgs() << "analyze-irreducible-in-";
1344 if (OuterLoop) dbgs()
1345 << "loop: " << getLoopName(*OuterLoop) << "\n";
1346 else dbgs() << "function\n");
Andrew Scull5e1ddfa2018-08-14 10:06:54 +01001347
1348 using namespace bfi_detail;
1349
1350 // Ideally, addBlockEdges() would be declared here as a lambda, but that
1351 // crashes GCC 4.7.
1352 BlockEdgesAdder<BT> addBlockEdges(*this);
1353 IrreducibleGraph G(*this, OuterLoop, addBlockEdges);
1354
1355 for (auto &L : analyzeIrreducible(G, OuterLoop, Insert))
1356 computeMassInLoop(L);
1357
1358 if (!OuterLoop)
1359 return;
1360 updateLoopWithIrreducible(*OuterLoop);
1361}
1362
1363// A helper function that converts a branch probability into weight.
1364inline uint32_t getWeightFromBranchProb(const BranchProbability Prob) {
1365 return Prob.getNumerator();
1366}
1367
1368template <class BT>
1369bool
1370BlockFrequencyInfoImpl<BT>::propagateMassToSuccessors(LoopData *OuterLoop,
1371 const BlockNode &Node) {
Andrew Scullcdfcccc2018-10-05 20:58:37 +01001372 LLVM_DEBUG(dbgs() << " - node: " << getBlockName(Node) << "\n");
Andrew Scull5e1ddfa2018-08-14 10:06:54 +01001373 // Calculate probability for successors.
1374 Distribution Dist;
1375 if (auto *Loop = Working[Node.Index].getPackagedLoop()) {
1376 assert(Loop != OuterLoop && "Cannot propagate mass in a packaged loop");
1377 if (!addLoopSuccessorsToDist(OuterLoop, *Loop, Dist))
1378 // Irreducible backedge.
1379 return false;
1380 } else {
1381 const BlockT *BB = getBlock(Node);
1382 for (auto SI = GraphTraits<const BlockT *>::child_begin(BB),
1383 SE = GraphTraits<const BlockT *>::child_end(BB);
1384 SI != SE; ++SI)
1385 if (!addToDist(
1386 Dist, OuterLoop, Node, getNode(*SI),
1387 getWeightFromBranchProb(BPI->getEdgeProbability(BB, SI))))
1388 // Irreducible backedge.
1389 return false;
1390 }
1391
1392 // Distribute mass to successors, saving exit and backedge data in the
1393 // loop header.
1394 distributeMass(Node, OuterLoop, Dist);
1395 return true;
1396}
1397
1398template <class BT>
1399raw_ostream &BlockFrequencyInfoImpl<BT>::print(raw_ostream &OS) const {
1400 if (!F)
1401 return OS;
1402 OS << "block-frequency-info: " << F->getName() << "\n";
1403 for (const BlockT &BB : *F) {
1404 OS << " - " << bfi_detail::getBlockName(&BB) << ": float = ";
1405 getFloatingBlockFreq(&BB).print(OS, 5)
1406 << ", int = " << getBlockFreq(&BB).getFrequency();
1407 if (Optional<uint64_t> ProfileCount =
1408 BlockFrequencyInfoImplBase::getBlockProfileCount(
1409 F->getFunction(), getNode(&BB)))
1410 OS << ", count = " << ProfileCount.getValue();
1411 if (Optional<uint64_t> IrrLoopHeaderWeight =
1412 BB.getIrrLoopHeaderWeight())
1413 OS << ", irr_loop_header_weight = " << IrrLoopHeaderWeight.getValue();
1414 OS << "\n";
1415 }
1416
1417 // Add an extra newline for readability.
1418 OS << "\n";
1419 return OS;
1420}
1421
Olivier Deprezf4ef2d02021-04-20 13:36:24 +02001422template <class BT>
1423void BlockFrequencyInfoImpl<BT>::verifyMatch(
1424 BlockFrequencyInfoImpl<BT> &Other) const {
1425 bool Match = true;
1426 DenseMap<const BlockT *, BlockNode> ValidNodes;
1427 DenseMap<const BlockT *, BlockNode> OtherValidNodes;
1428 for (auto &Entry : Nodes) {
1429 const BlockT *BB = Entry.first;
1430 if (BB) {
1431 ValidNodes[BB] = Entry.second.first;
1432 }
1433 }
1434 for (auto &Entry : Other.Nodes) {
1435 const BlockT *BB = Entry.first;
1436 if (BB) {
1437 OtherValidNodes[BB] = Entry.second.first;
1438 }
1439 }
1440 unsigned NumValidNodes = ValidNodes.size();
1441 unsigned NumOtherValidNodes = OtherValidNodes.size();
1442 if (NumValidNodes != NumOtherValidNodes) {
1443 Match = false;
1444 dbgs() << "Number of blocks mismatch: " << NumValidNodes << " vs "
1445 << NumOtherValidNodes << "\n";
1446 } else {
1447 for (auto &Entry : ValidNodes) {
1448 const BlockT *BB = Entry.first;
1449 BlockNode Node = Entry.second;
1450 if (OtherValidNodes.count(BB)) {
1451 BlockNode OtherNode = OtherValidNodes[BB];
1452 const auto &Freq = Freqs[Node.Index];
1453 const auto &OtherFreq = Other.Freqs[OtherNode.Index];
1454 if (Freq.Integer != OtherFreq.Integer) {
1455 Match = false;
1456 dbgs() << "Freq mismatch: " << bfi_detail::getBlockName(BB) << " "
1457 << Freq.Integer << " vs " << OtherFreq.Integer << "\n";
1458 }
1459 } else {
1460 Match = false;
1461 dbgs() << "Block " << bfi_detail::getBlockName(BB) << " index "
1462 << Node.Index << " does not exist in Other.\n";
1463 }
1464 }
1465 // If there's a valid node in OtherValidNodes that's not in ValidNodes,
1466 // either the above num check or the check on OtherValidNodes will fail.
1467 }
1468 if (!Match) {
1469 dbgs() << "This\n";
1470 print(dbgs());
1471 dbgs() << "Other\n";
1472 Other.print(dbgs());
1473 }
1474 assert(Match && "BFI mismatch");
1475}
1476
Andrew Scull5e1ddfa2018-08-14 10:06:54 +01001477// Graph trait base class for block frequency information graph
1478// viewer.
1479
1480enum GVDAGType { GVDT_None, GVDT_Fraction, GVDT_Integer, GVDT_Count };
1481
1482template <class BlockFrequencyInfoT, class BranchProbabilityInfoT>
1483struct BFIDOTGraphTraitsBase : public DefaultDOTGraphTraits {
1484 using GTraits = GraphTraits<BlockFrequencyInfoT *>;
1485 using NodeRef = typename GTraits::NodeRef;
1486 using EdgeIter = typename GTraits::ChildIteratorType;
1487 using NodeIter = typename GTraits::nodes_iterator;
1488
1489 uint64_t MaxFrequency = 0;
1490
1491 explicit BFIDOTGraphTraitsBase(bool isSimple = false)
1492 : DefaultDOTGraphTraits(isSimple) {}
1493
Olivier Deprezf4ef2d02021-04-20 13:36:24 +02001494 static StringRef getGraphName(const BlockFrequencyInfoT *G) {
Andrew Scull5e1ddfa2018-08-14 10:06:54 +01001495 return G->getFunction()->getName();
1496 }
1497
1498 std::string getNodeAttributes(NodeRef Node, const BlockFrequencyInfoT *Graph,
1499 unsigned HotPercentThreshold = 0) {
1500 std::string Result;
1501 if (!HotPercentThreshold)
1502 return Result;
1503
1504 // Compute MaxFrequency on the fly:
1505 if (!MaxFrequency) {
1506 for (NodeIter I = GTraits::nodes_begin(Graph),
1507 E = GTraits::nodes_end(Graph);
1508 I != E; ++I) {
1509 NodeRef N = *I;
1510 MaxFrequency =
1511 std::max(MaxFrequency, Graph->getBlockFreq(N).getFrequency());
1512 }
1513 }
1514 BlockFrequency Freq = Graph->getBlockFreq(Node);
1515 BlockFrequency HotFreq =
1516 (BlockFrequency(MaxFrequency) *
1517 BranchProbability::getBranchProbability(HotPercentThreshold, 100));
1518
1519 if (Freq < HotFreq)
1520 return Result;
1521
1522 raw_string_ostream OS(Result);
1523 OS << "color=\"red\"";
1524 OS.flush();
1525 return Result;
1526 }
1527
1528 std::string getNodeLabel(NodeRef Node, const BlockFrequencyInfoT *Graph,
1529 GVDAGType GType, int layout_order = -1) {
1530 std::string Result;
1531 raw_string_ostream OS(Result);
1532
1533 if (layout_order != -1)
1534 OS << Node->getName() << "[" << layout_order << "] : ";
1535 else
1536 OS << Node->getName() << " : ";
1537 switch (GType) {
1538 case GVDT_Fraction:
1539 Graph->printBlockFreq(OS, Node);
1540 break;
1541 case GVDT_Integer:
1542 OS << Graph->getBlockFreq(Node).getFrequency();
1543 break;
1544 case GVDT_Count: {
1545 auto Count = Graph->getBlockProfileCount(Node);
1546 if (Count)
1547 OS << Count.getValue();
1548 else
1549 OS << "Unknown";
1550 break;
1551 }
1552 case GVDT_None:
1553 llvm_unreachable("If we are not supposed to render a graph we should "
1554 "never reach this point.");
1555 }
1556 return Result;
1557 }
1558
1559 std::string getEdgeAttributes(NodeRef Node, EdgeIter EI,
1560 const BlockFrequencyInfoT *BFI,
1561 const BranchProbabilityInfoT *BPI,
1562 unsigned HotPercentThreshold = 0) {
1563 std::string Str;
1564 if (!BPI)
1565 return Str;
1566
1567 BranchProbability BP = BPI->getEdgeProbability(Node, EI);
1568 uint32_t N = BP.getNumerator();
1569 uint32_t D = BP.getDenominator();
1570 double Percent = 100.0 * N / D;
1571 raw_string_ostream OS(Str);
1572 OS << format("label=\"%.1f%%\"", Percent);
1573
1574 if (HotPercentThreshold) {
1575 BlockFrequency EFreq = BFI->getBlockFreq(Node) * BP;
1576 BlockFrequency HotFreq = BlockFrequency(MaxFrequency) *
1577 BranchProbability(HotPercentThreshold, 100);
1578
1579 if (EFreq >= HotFreq) {
1580 OS << ",color=\"red\"";
1581 }
1582 }
1583
1584 OS.flush();
1585 return Str;
1586 }
1587};
1588
1589} // end namespace llvm
1590
1591#undef DEBUG_TYPE
1592
1593#endif // LLVM_ANALYSIS_BLOCKFREQUENCYINFOIMPL_H