Subversion Repositories QNX 8.QNX8 LLVM/Clang compiler suite

Rev

Blame | Last modification | View Log | Download | RSS feed

  1. //===- LazyCallGraph.h - Analysis of a Module's call graph ------*- C++ -*-===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. /// \file
  9. ///
  10. /// Implements a lazy call graph analysis and related passes for the new pass
  11. /// manager.
  12. ///
  13. /// NB: This is *not* a traditional call graph! It is a graph which models both
  14. /// the current calls and potential calls. As a consequence there are many
  15. /// edges in this call graph that do not correspond to a 'call' or 'invoke'
  16. /// instruction.
  17. ///
  18. /// The primary use cases of this graph analysis is to facilitate iterating
  19. /// across the functions of a module in ways that ensure all callees are
  20. /// visited prior to a caller (given any SCC constraints), or vice versa. As
  21. /// such is it particularly well suited to organizing CGSCC optimizations such
  22. /// as inlining, outlining, argument promotion, etc. That is its primary use
  23. /// case and motivates the design. It may not be appropriate for other
  24. /// purposes. The use graph of functions or some other conservative analysis of
  25. /// call instructions may be interesting for optimizations and subsequent
  26. /// analyses which don't work in the context of an overly specified
  27. /// potential-call-edge graph.
  28. ///
  29. /// To understand the specific rules and nature of this call graph analysis,
  30. /// see the documentation of the \c LazyCallGraph below.
  31. ///
  32. //===----------------------------------------------------------------------===//
  33.  
  34. #ifndef LLVM_ANALYSIS_LAZYCALLGRAPH_H
  35. #define LLVM_ANALYSIS_LAZYCALLGRAPH_H
  36.  
  37. #include "llvm/ADT/ArrayRef.h"
  38. #include "llvm/ADT/DenseMap.h"
  39. #include "llvm/ADT/PointerIntPair.h"
  40. #include "llvm/ADT/SetVector.h"
  41. #include "llvm/ADT/SmallVector.h"
  42. #include "llvm/ADT/StringRef.h"
  43. #include "llvm/ADT/iterator.h"
  44. #include "llvm/ADT/iterator_range.h"
  45. #include "llvm/Analysis/TargetLibraryInfo.h"
  46. #include "llvm/IR/PassManager.h"
  47. #include "llvm/Support/Allocator.h"
  48. #include "llvm/Support/raw_ostream.h"
  49. #include <cassert>
  50. #include <iterator>
  51. #include <optional>
  52. #include <string>
  53. #include <utility>
  54.  
  55. namespace llvm {
  56.  
  57. class Constant;
  58. class Function;
  59. template <class GraphType> struct GraphTraits;
  60. class Module;
  61. class TargetLibraryInfo;
  62. class Value;
  63.  
  64. /// A lazily constructed view of the call graph of a module.
  65. ///
  66. /// With the edges of this graph, the motivating constraint that we are
  67. /// attempting to maintain is that function-local optimization, CGSCC-local
  68. /// optimizations, and optimizations transforming a pair of functions connected
  69. /// by an edge in the graph, do not invalidate a bottom-up traversal of the SCC
  70. /// DAG. That is, no optimizations will delete, remove, or add an edge such
  71. /// that functions already visited in a bottom-up order of the SCC DAG are no
  72. /// longer valid to have visited, or such that functions not yet visited in
  73. /// a bottom-up order of the SCC DAG are not required to have already been
  74. /// visited.
  75. ///
  76. /// Within this constraint, the desire is to minimize the merge points of the
  77. /// SCC DAG. The greater the fanout of the SCC DAG and the fewer merge points
  78. /// in the SCC DAG, the more independence there is in optimizing within it.
  79. /// There is a strong desire to enable parallelization of optimizations over
  80. /// the call graph, and both limited fanout and merge points will (artificially
  81. /// in some cases) limit the scaling of such an effort.
  82. ///
  83. /// To this end, graph represents both direct and any potential resolution to
  84. /// an indirect call edge. Another way to think about it is that it represents
  85. /// both the direct call edges and any direct call edges that might be formed
  86. /// through static optimizations. Specifically, it considers taking the address
  87. /// of a function to be an edge in the call graph because this might be
  88. /// forwarded to become a direct call by some subsequent function-local
  89. /// optimization. The result is that the graph closely follows the use-def
  90. /// edges for functions. Walking "up" the graph can be done by looking at all
  91. /// of the uses of a function.
  92. ///
  93. /// The roots of the call graph are the external functions and functions
  94. /// escaped into global variables. Those functions can be called from outside
  95. /// of the module or via unknowable means in the IR -- we may not be able to
  96. /// form even a potential call edge from a function body which may dynamically
  97. /// load the function and call it.
  98. ///
  99. /// This analysis still requires updates to remain valid after optimizations
  100. /// which could potentially change the set of potential callees. The
  101. /// constraints it operates under only make the traversal order remain valid.
  102. ///
  103. /// The entire analysis must be re-computed if full interprocedural
  104. /// optimizations run at any point. For example, globalopt completely
  105. /// invalidates the information in this analysis.
  106. ///
  107. /// FIXME: This class is named LazyCallGraph in a lame attempt to distinguish
  108. /// it from the existing CallGraph. At some point, it is expected that this
  109. /// will be the only call graph and it will be renamed accordingly.
  110. class LazyCallGraph {
  111. public:
  112.   class Node;
  113.   class EdgeSequence;
  114.   class SCC;
  115.   class RefSCC;
  116.  
  117.   /// A class used to represent edges in the call graph.
  118.   ///
  119.   /// The lazy call graph models both *call* edges and *reference* edges. Call
  120.   /// edges are much what you would expect, and exist when there is a 'call' or
  121.   /// 'invoke' instruction of some function. Reference edges are also tracked
  122.   /// along side these, and exist whenever any instruction (transitively
  123.   /// through its operands) references a function. All call edges are
  124.   /// inherently reference edges, and so the reference graph forms a superset
  125.   /// of the formal call graph.
  126.   ///
  127.   /// All of these forms of edges are fundamentally represented as outgoing
  128.   /// edges. The edges are stored in the source node and point at the target
  129.   /// node. This allows the edge structure itself to be a very compact data
  130.   /// structure: essentially a tagged pointer.
  131.   class Edge {
  132.   public:
  133.     /// The kind of edge in the graph.
  134.     enum Kind : bool { Ref = false, Call = true };
  135.  
  136.     Edge();
  137.     explicit Edge(Node &N, Kind K);
  138.  
  139.     /// Test whether the edge is null.
  140.     ///
  141.     /// This happens when an edge has been deleted. We leave the edge objects
  142.     /// around but clear them.
  143.     explicit operator bool() const;
  144.  
  145.     /// Returns the \c Kind of the edge.
  146.     Kind getKind() const;
  147.  
  148.     /// Test whether the edge represents a direct call to a function.
  149.     ///
  150.     /// This requires that the edge is not null.
  151.     bool isCall() const;
  152.  
  153.     /// Get the call graph node referenced by this edge.
  154.     ///
  155.     /// This requires that the edge is not null.
  156.     Node &getNode() const;
  157.  
  158.     /// Get the function referenced by this edge.
  159.     ///
  160.     /// This requires that the edge is not null.
  161.     Function &getFunction() const;
  162.  
  163.   private:
  164.     friend class LazyCallGraph::EdgeSequence;
  165.     friend class LazyCallGraph::RefSCC;
  166.  
  167.     PointerIntPair<Node *, 1, Kind> Value;
  168.  
  169.     void setKind(Kind K) { Value.setInt(K); }
  170.   };
  171.  
  172.   /// The edge sequence object.
  173.   ///
  174.   /// This typically exists entirely within the node but is exposed as
  175.   /// a separate type because a node doesn't initially have edges. An explicit
  176.   /// population step is required to produce this sequence at first and it is
  177.   /// then cached in the node. It is also used to represent edges entering the
  178.   /// graph from outside the module to model the graph's roots.
  179.   ///
  180.   /// The sequence itself both iterable and indexable. The indexes remain
  181.   /// stable even as the sequence mutates (including removal).
  182.   class EdgeSequence {
  183.     friend class LazyCallGraph;
  184.     friend class LazyCallGraph::Node;
  185.     friend class LazyCallGraph::RefSCC;
  186.  
  187.     using VectorT = SmallVector<Edge, 4>;
  188.     using VectorImplT = SmallVectorImpl<Edge>;
  189.  
  190.   public:
  191.     /// An iterator used for the edges to both entry nodes and child nodes.
  192.     class iterator
  193.         : public iterator_adaptor_base<iterator, VectorImplT::iterator,
  194.                                        std::forward_iterator_tag> {
  195.       friend class LazyCallGraph;
  196.       friend class LazyCallGraph::Node;
  197.  
  198.       VectorImplT::iterator E;
  199.  
  200.       // Build the iterator for a specific position in the edge list.
  201.       iterator(VectorImplT::iterator BaseI, VectorImplT::iterator E)
  202.           : iterator_adaptor_base(BaseI), E(E) {
  203.         while (I != E && !*I)
  204.           ++I;
  205.       }
  206.  
  207.     public:
  208.       iterator() = default;
  209.  
  210.       using iterator_adaptor_base::operator++;
  211.       iterator &operator++() {
  212.         do {
  213.           ++I;
  214.         } while (I != E && !*I);
  215.         return *this;
  216.       }
  217.     };
  218.  
  219.     /// An iterator over specifically call edges.
  220.     ///
  221.     /// This has the same iteration properties as the \c iterator, but
  222.     /// restricts itself to edges which represent actual calls.
  223.     class call_iterator
  224.         : public iterator_adaptor_base<call_iterator, VectorImplT::iterator,
  225.                                        std::forward_iterator_tag> {
  226.       friend class LazyCallGraph;
  227.       friend class LazyCallGraph::Node;
  228.  
  229.       VectorImplT::iterator E;
  230.  
  231.       /// Advance the iterator to the next valid, call edge.
  232.       void advanceToNextEdge() {
  233.         while (I != E && (!*I || !I->isCall()))
  234.           ++I;
  235.       }
  236.  
  237.       // Build the iterator for a specific position in the edge list.
  238.       call_iterator(VectorImplT::iterator BaseI, VectorImplT::iterator E)
  239.           : iterator_adaptor_base(BaseI), E(E) {
  240.         advanceToNextEdge();
  241.       }
  242.  
  243.     public:
  244.       call_iterator() = default;
  245.  
  246.       using iterator_adaptor_base::operator++;
  247.       call_iterator &operator++() {
  248.         ++I;
  249.         advanceToNextEdge();
  250.         return *this;
  251.       }
  252.     };
  253.  
  254.     iterator begin() { return iterator(Edges.begin(), Edges.end()); }
  255.     iterator end() { return iterator(Edges.end(), Edges.end()); }
  256.  
  257.     Edge &operator[](Node &N) {
  258.       assert(EdgeIndexMap.find(&N) != EdgeIndexMap.end() && "No such edge!");
  259.       auto &E = Edges[EdgeIndexMap.find(&N)->second];
  260.       assert(E && "Dead or null edge!");
  261.       return E;
  262.     }
  263.  
  264.     Edge *lookup(Node &N) {
  265.       auto EI = EdgeIndexMap.find(&N);
  266.       if (EI == EdgeIndexMap.end())
  267.         return nullptr;
  268.       auto &E = Edges[EI->second];
  269.       return E ? &E : nullptr;
  270.     }
  271.  
  272.     call_iterator call_begin() {
  273.       return call_iterator(Edges.begin(), Edges.end());
  274.     }
  275.     call_iterator call_end() { return call_iterator(Edges.end(), Edges.end()); }
  276.  
  277.     iterator_range<call_iterator> calls() {
  278.       return make_range(call_begin(), call_end());
  279.     }
  280.  
  281.     bool empty() {
  282.       for (auto &E : Edges)
  283.         if (E)
  284.           return false;
  285.  
  286.       return true;
  287.     }
  288.  
  289.   private:
  290.     VectorT Edges;
  291.     DenseMap<Node *, int> EdgeIndexMap;
  292.  
  293.     EdgeSequence() = default;
  294.  
  295.     /// Internal helper to insert an edge to a node.
  296.     void insertEdgeInternal(Node &ChildN, Edge::Kind EK);
  297.  
  298.     /// Internal helper to change an edge kind.
  299.     void setEdgeKind(Node &ChildN, Edge::Kind EK);
  300.  
  301.     /// Internal helper to remove the edge to the given function.
  302.     bool removeEdgeInternal(Node &ChildN);
  303.   };
  304.  
  305.   /// A node in the call graph.
  306.   ///
  307.   /// This represents a single node. Its primary roles are to cache the list of
  308.   /// callees, de-duplicate and provide fast testing of whether a function is a
  309.   /// callee, and facilitate iteration of child nodes in the graph.
  310.   ///
  311.   /// The node works much like an optional in order to lazily populate the
  312.   /// edges of each node. Until populated, there are no edges. Once populated,
  313.   /// you can access the edges by dereferencing the node or using the `->`
  314.   /// operator as if the node was an `std::optional<EdgeSequence>`.
  315.   class Node {
  316.     friend class LazyCallGraph;
  317.     friend class LazyCallGraph::RefSCC;
  318.  
  319.   public:
  320.     LazyCallGraph &getGraph() const { return *G; }
  321.  
  322.     Function &getFunction() const { return *F; }
  323.  
  324.     StringRef getName() const { return F->getName(); }
  325.  
  326.     /// Equality is defined as address equality.
  327.     bool operator==(const Node &N) const { return this == &N; }
  328.     bool operator!=(const Node &N) const { return !operator==(N); }
  329.  
  330.     /// Tests whether the node has been populated with edges.
  331.     bool isPopulated() const { return Edges.has_value(); }
  332.  
  333.     /// Tests whether this is actually a dead node and no longer valid.
  334.     ///
  335.     /// Users rarely interact with nodes in this state and other methods are
  336.     /// invalid. This is used to model a node in an edge list where the
  337.     /// function has been completely removed.
  338.     bool isDead() const {
  339.       assert(!G == !F &&
  340.              "Both graph and function pointers should be null or non-null.");
  341.       return !G;
  342.     }
  343.  
  344.     // We allow accessing the edges by dereferencing or using the arrow
  345.     // operator, essentially wrapping the internal optional.
  346.     EdgeSequence &operator*() const {
  347.       // Rip const off because the node itself isn't changing here.
  348.       return const_cast<EdgeSequence &>(*Edges);
  349.     }
  350.     EdgeSequence *operator->() const { return &**this; }
  351.  
  352.     /// Populate the edges of this node if necessary.
  353.     ///
  354.     /// The first time this is called it will populate the edges for this node
  355.     /// in the graph. It does this by scanning the underlying function, so once
  356.     /// this is done, any changes to that function must be explicitly reflected
  357.     /// in updates to the graph.
  358.     ///
  359.     /// \returns the populated \c EdgeSequence to simplify walking it.
  360.     ///
  361.     /// This will not update or re-scan anything if called repeatedly. Instead,
  362.     /// the edge sequence is cached and returned immediately on subsequent
  363.     /// calls.
  364.     EdgeSequence &populate() {
  365.       if (Edges)
  366.         return *Edges;
  367.  
  368.       return populateSlow();
  369.     }
  370.  
  371.   private:
  372.     LazyCallGraph *G;
  373.     Function *F;
  374.  
  375.     // We provide for the DFS numbering and Tarjan walk lowlink numbers to be
  376.     // stored directly within the node. These are both '-1' when nodes are part
  377.     // of an SCC (or RefSCC), or '0' when not yet reached in a DFS walk.
  378.     int DFSNumber = 0;
  379.     int LowLink = 0;
  380.  
  381.     std::optional<EdgeSequence> Edges;
  382.  
  383.     /// Basic constructor implements the scanning of F into Edges and
  384.     /// EdgeIndexMap.
  385.     Node(LazyCallGraph &G, Function &F) : G(&G), F(&F) {}
  386.  
  387.     /// Implementation of the scan when populating.
  388.     EdgeSequence &populateSlow();
  389.  
  390.     /// Internal helper to directly replace the function with a new one.
  391.     ///
  392.     /// This is used to facilitate transformations which need to replace the
  393.     /// formal Function object but directly move the body and users from one to
  394.     /// the other.
  395.     void replaceFunction(Function &NewF);
  396.  
  397.     void clear() { Edges.reset(); }
  398.  
  399.     /// Print the name of this node's function.
  400.     friend raw_ostream &operator<<(raw_ostream &OS, const Node &N) {
  401.       return OS << N.F->getName();
  402.     }
  403.  
  404.     /// Dump the name of this node's function to stderr.
  405.     void dump() const;
  406.   };
  407.  
  408.   /// An SCC of the call graph.
  409.   ///
  410.   /// This represents a Strongly Connected Component of the direct call graph
  411.   /// -- ignoring indirect calls and function references. It stores this as
  412.   /// a collection of call graph nodes. While the order of nodes in the SCC is
  413.   /// stable, it is not any particular order.
  414.   ///
  415.   /// The SCCs are nested within a \c RefSCC, see below for details about that
  416.   /// outer structure. SCCs do not support mutation of the call graph, that
  417.   /// must be done through the containing \c RefSCC in order to fully reason
  418.   /// about the ordering and connections of the graph.
  419.   class LLVM_EXTERNAL_VISIBILITY SCC {
  420.     friend class LazyCallGraph;
  421.     friend class LazyCallGraph::Node;
  422.  
  423.     RefSCC *OuterRefSCC;
  424.     SmallVector<Node *, 1> Nodes;
  425.  
  426.     template <typename NodeRangeT>
  427.     SCC(RefSCC &OuterRefSCC, NodeRangeT &&Nodes)
  428.         : OuterRefSCC(&OuterRefSCC), Nodes(std::forward<NodeRangeT>(Nodes)) {}
  429.  
  430.     void clear() {
  431.       OuterRefSCC = nullptr;
  432.       Nodes.clear();
  433.     }
  434.  
  435.     /// Print a short description useful for debugging or logging.
  436.     ///
  437.     /// We print the function names in the SCC wrapped in '()'s and skipping
  438.     /// the middle functions if there are a large number.
  439.     //
  440.     // Note: this is defined inline to dodge issues with GCC's interpretation
  441.     // of enclosing namespaces for friend function declarations.
  442.     friend raw_ostream &operator<<(raw_ostream &OS, const SCC &C) {
  443.       OS << '(';
  444.       int I = 0;
  445.       for (LazyCallGraph::Node &N : C) {
  446.         if (I > 0)
  447.           OS << ", ";
  448.         // Elide the inner elements if there are too many.
  449.         if (I > 8) {
  450.           OS << "..., " << *C.Nodes.back();
  451.           break;
  452.         }
  453.         OS << N;
  454.         ++I;
  455.       }
  456.       OS << ')';
  457.       return OS;
  458.     }
  459.  
  460.     /// Dump a short description of this SCC to stderr.
  461.     void dump() const;
  462.  
  463. #if !defined(NDEBUG) || defined(EXPENSIVE_CHECKS)
  464.     /// Verify invariants about the SCC.
  465.     ///
  466.     /// This will attempt to validate all of the basic invariants within an
  467.     /// SCC, but not that it is a strongly connected component per se.
  468.     /// Primarily useful while building and updating the graph to check that
  469.     /// basic properties are in place rather than having inexplicable crashes
  470.     /// later.
  471.     void verify();
  472. #endif
  473.  
  474.   public:
  475.     using iterator = pointee_iterator<SmallVectorImpl<Node *>::const_iterator>;
  476.  
  477.     iterator begin() const { return Nodes.begin(); }
  478.     iterator end() const { return Nodes.end(); }
  479.  
  480.     int size() const { return Nodes.size(); }
  481.  
  482.     RefSCC &getOuterRefSCC() const { return *OuterRefSCC; }
  483.  
  484.     /// Test if this SCC is a parent of \a C.
  485.     ///
  486.     /// Note that this is linear in the number of edges departing the current
  487.     /// SCC.
  488.     bool isParentOf(const SCC &C) const;
  489.  
  490.     /// Test if this SCC is an ancestor of \a C.
  491.     ///
  492.     /// Note that in the worst case this is linear in the number of edges
  493.     /// departing the current SCC and every SCC in the entire graph reachable
  494.     /// from this SCC. Thus this very well may walk every edge in the entire
  495.     /// call graph! Do not call this in a tight loop!
  496.     bool isAncestorOf(const SCC &C) const;
  497.  
  498.     /// Test if this SCC is a child of \a C.
  499.     ///
  500.     /// See the comments for \c isParentOf for detailed notes about the
  501.     /// complexity of this routine.
  502.     bool isChildOf(const SCC &C) const { return C.isParentOf(*this); }
  503.  
  504.     /// Test if this SCC is a descendant of \a C.
  505.     ///
  506.     /// See the comments for \c isParentOf for detailed notes about the
  507.     /// complexity of this routine.
  508.     bool isDescendantOf(const SCC &C) const { return C.isAncestorOf(*this); }
  509.  
  510.     /// Provide a short name by printing this SCC to a std::string.
  511.     ///
  512.     /// This copes with the fact that we don't have a name per se for an SCC
  513.     /// while still making the use of this in debugging and logging useful.
  514.     std::string getName() const {
  515.       std::string Name;
  516.       raw_string_ostream OS(Name);
  517.       OS << *this;
  518.       OS.flush();
  519.       return Name;
  520.     }
  521.   };
  522.  
  523.   /// A RefSCC of the call graph.
  524.   ///
  525.   /// This models a Strongly Connected Component of function reference edges in
  526.   /// the call graph. As opposed to actual SCCs, these can be used to scope
  527.   /// subgraphs of the module which are independent from other subgraphs of the
  528.   /// module because they do not reference it in any way. This is also the unit
  529.   /// where we do mutation of the graph in order to restrict mutations to those
  530.   /// which don't violate this independence.
  531.   ///
  532.   /// A RefSCC contains a DAG of actual SCCs. All the nodes within the RefSCC
  533.   /// are necessarily within some actual SCC that nests within it. Since
  534.   /// a direct call *is* a reference, there will always be at least one RefSCC
  535.   /// around any SCC.
  536.   ///
  537.   /// Spurious ref edges, meaning ref edges that still exist in the call graph
  538.   /// even though the corresponding IR reference no longer exists, are allowed.
  539.   /// This is mostly to support argument promotion, which can modify a caller to
  540.   /// no longer pass a function. The only place that needs to specially handle
  541.   /// this is deleting a dead function/node, otherwise the dead ref edges are
  542.   /// automatically removed when visiting the function/node no longer containing
  543.   /// the ref edge.
  544.   class RefSCC {
  545.     friend class LazyCallGraph;
  546.     friend class LazyCallGraph::Node;
  547.  
  548.     LazyCallGraph *G;
  549.  
  550.     /// A postorder list of the inner SCCs.
  551.     SmallVector<SCC *, 4> SCCs;
  552.  
  553.     /// A map from SCC to index in the postorder list.
  554.     SmallDenseMap<SCC *, int, 4> SCCIndices;
  555.  
  556.     /// Fast-path constructor. RefSCCs should instead be constructed by calling
  557.     /// formRefSCCFast on the graph itself.
  558.     RefSCC(LazyCallGraph &G);
  559.  
  560.     void clear() {
  561.       SCCs.clear();
  562.       SCCIndices.clear();
  563.     }
  564.  
  565.     /// Print a short description useful for debugging or logging.
  566.     ///
  567.     /// We print the SCCs wrapped in '[]'s and skipping the middle SCCs if
  568.     /// there are a large number.
  569.     //
  570.     // Note: this is defined inline to dodge issues with GCC's interpretation
  571.     // of enclosing namespaces for friend function declarations.
  572.     friend raw_ostream &operator<<(raw_ostream &OS, const RefSCC &RC) {
  573.       OS << '[';
  574.       int I = 0;
  575.       for (LazyCallGraph::SCC &C : RC) {
  576.         if (I > 0)
  577.           OS << ", ";
  578.         // Elide the inner elements if there are too many.
  579.         if (I > 4) {
  580.           OS << "..., " << *RC.SCCs.back();
  581.           break;
  582.         }
  583.         OS << C;
  584.         ++I;
  585.       }
  586.       OS << ']';
  587.       return OS;
  588.     }
  589.  
  590.     /// Dump a short description of this RefSCC to stderr.
  591.     void dump() const;
  592.  
  593. #if !defined(NDEBUG) || defined(EXPENSIVE_CHECKS)
  594.     /// Verify invariants about the RefSCC and all its SCCs.
  595.     ///
  596.     /// This will attempt to validate all of the invariants *within* the
  597.     /// RefSCC, but not that it is a strongly connected component of the larger
  598.     /// graph. This makes it useful even when partially through an update.
  599.     ///
  600.     /// Invariants checked:
  601.     /// - SCCs and their indices match.
  602.     /// - The SCCs list is in fact in post-order.
  603.     void verify();
  604. #endif
  605.  
  606.   public:
  607.     using iterator = pointee_iterator<SmallVectorImpl<SCC *>::const_iterator>;
  608.     using range = iterator_range<iterator>;
  609.     using parent_iterator =
  610.         pointee_iterator<SmallPtrSetImpl<RefSCC *>::const_iterator>;
  611.  
  612.     iterator begin() const { return SCCs.begin(); }
  613.     iterator end() const { return SCCs.end(); }
  614.  
  615.     ssize_t size() const { return SCCs.size(); }
  616.  
  617.     SCC &operator[](int Idx) { return *SCCs[Idx]; }
  618.  
  619.     iterator find(SCC &C) const {
  620.       return SCCs.begin() + SCCIndices.find(&C)->second;
  621.     }
  622.  
  623.     /// Test if this RefSCC is a parent of \a RC.
  624.     ///
  625.     /// CAUTION: This method walks every edge in the \c RefSCC, it can be very
  626.     /// expensive.
  627.     bool isParentOf(const RefSCC &RC) const;
  628.  
  629.     /// Test if this RefSCC is an ancestor of \a RC.
  630.     ///
  631.     /// CAUTION: This method walks the directed graph of edges as far as
  632.     /// necessary to find a possible path to the argument. In the worst case
  633.     /// this may walk the entire graph and can be extremely expensive.
  634.     bool isAncestorOf(const RefSCC &RC) const;
  635.  
  636.     /// Test if this RefSCC is a child of \a RC.
  637.     ///
  638.     /// CAUTION: This method walks every edge in the argument \c RefSCC, it can
  639.     /// be very expensive.
  640.     bool isChildOf(const RefSCC &RC) const { return RC.isParentOf(*this); }
  641.  
  642.     /// Test if this RefSCC is a descendant of \a RC.
  643.     ///
  644.     /// CAUTION: This method walks the directed graph of edges as far as
  645.     /// necessary to find a possible path from the argument. In the worst case
  646.     /// this may walk the entire graph and can be extremely expensive.
  647.     bool isDescendantOf(const RefSCC &RC) const {
  648.       return RC.isAncestorOf(*this);
  649.     }
  650.  
  651.     /// Provide a short name by printing this RefSCC to a std::string.
  652.     ///
  653.     /// This copes with the fact that we don't have a name per se for an RefSCC
  654.     /// while still making the use of this in debugging and logging useful.
  655.     std::string getName() const {
  656.       std::string Name;
  657.       raw_string_ostream OS(Name);
  658.       OS << *this;
  659.       OS.flush();
  660.       return Name;
  661.     }
  662.  
  663.     ///@{
  664.     /// \name Mutation API
  665.     ///
  666.     /// These methods provide the core API for updating the call graph in the
  667.     /// presence of (potentially still in-flight) DFS-found RefSCCs and SCCs.
  668.     ///
  669.     /// Note that these methods sometimes have complex runtimes, so be careful
  670.     /// how you call them.
  671.  
  672.     /// Make an existing internal ref edge into a call edge.
  673.     ///
  674.     /// This may form a larger cycle and thus collapse SCCs into TargetN's SCC.
  675.     /// If that happens, the optional callback \p MergedCB will be invoked (if
  676.     /// provided) on the SCCs being merged away prior to actually performing
  677.     /// the merge. Note that this will never include the target SCC as that
  678.     /// will be the SCC functions are merged into to resolve the cycle. Once
  679.     /// this function returns, these merged SCCs are not in a valid state but
  680.     /// the pointers will remain valid until destruction of the parent graph
  681.     /// instance for the purpose of clearing cached information. This function
  682.     /// also returns 'true' if a cycle was formed and some SCCs merged away as
  683.     /// a convenience.
  684.     ///
  685.     /// After this operation, both SourceN's SCC and TargetN's SCC may move
  686.     /// position within this RefSCC's postorder list. Any SCCs merged are
  687.     /// merged into the TargetN's SCC in order to preserve reachability analyses
  688.     /// which took place on that SCC.
  689.     bool switchInternalEdgeToCall(
  690.         Node &SourceN, Node &TargetN,
  691.         function_ref<void(ArrayRef<SCC *> MergedSCCs)> MergeCB = {});
  692.  
  693.     /// Make an existing internal call edge between separate SCCs into a ref
  694.     /// edge.
  695.     ///
  696.     /// If SourceN and TargetN in separate SCCs within this RefSCC, changing
  697.     /// the call edge between them to a ref edge is a trivial operation that
  698.     /// does not require any structural changes to the call graph.
  699.     void switchTrivialInternalEdgeToRef(Node &SourceN, Node &TargetN);
  700.  
  701.     /// Make an existing internal call edge within a single SCC into a ref
  702.     /// edge.
  703.     ///
  704.     /// Since SourceN and TargetN are part of a single SCC, this SCC may be
  705.     /// split up due to breaking a cycle in the call edges that formed it. If
  706.     /// that happens, then this routine will insert new SCCs into the postorder
  707.     /// list *before* the SCC of TargetN (previously the SCC of both). This
  708.     /// preserves postorder as the TargetN can reach all of the other nodes by
  709.     /// definition of previously being in a single SCC formed by the cycle from
  710.     /// SourceN to TargetN.
  711.     ///
  712.     /// The newly added SCCs are added *immediately* and contiguously
  713.     /// prior to the TargetN SCC and return the range covering the new SCCs in
  714.     /// the RefSCC's postorder sequence. You can directly iterate the returned
  715.     /// range to observe all of the new SCCs in postorder.
  716.     ///
  717.     /// Note that if SourceN and TargetN are in separate SCCs, the simpler
  718.     /// routine `switchTrivialInternalEdgeToRef` should be used instead.
  719.     iterator_range<iterator> switchInternalEdgeToRef(Node &SourceN,
  720.                                                      Node &TargetN);
  721.  
  722.     /// Make an existing outgoing ref edge into a call edge.
  723.     ///
  724.     /// Note that this is trivial as there are no cyclic impacts and there
  725.     /// remains a reference edge.
  726.     void switchOutgoingEdgeToCall(Node &SourceN, Node &TargetN);
  727.  
  728.     /// Make an existing outgoing call edge into a ref edge.
  729.     ///
  730.     /// This is trivial as there are no cyclic impacts and there remains
  731.     /// a reference edge.
  732.     void switchOutgoingEdgeToRef(Node &SourceN, Node &TargetN);
  733.  
  734.     /// Insert a ref edge from one node in this RefSCC to another in this
  735.     /// RefSCC.
  736.     ///
  737.     /// This is always a trivial operation as it doesn't change any part of the
  738.     /// graph structure besides connecting the two nodes.
  739.     ///
  740.     /// Note that we don't support directly inserting internal *call* edges
  741.     /// because that could change the graph structure and requires returning
  742.     /// information about what became invalid. As a consequence, the pattern
  743.     /// should be to first insert the necessary ref edge, and then to switch it
  744.     /// to a call edge if needed and handle any invalidation that results. See
  745.     /// the \c switchInternalEdgeToCall routine for details.
  746.     void insertInternalRefEdge(Node &SourceN, Node &TargetN);
  747.  
  748.     /// Insert an edge whose parent is in this RefSCC and child is in some
  749.     /// child RefSCC.
  750.     ///
  751.     /// There must be an existing path from the \p SourceN to the \p TargetN.
  752.     /// This operation is inexpensive and does not change the set of SCCs and
  753.     /// RefSCCs in the graph.
  754.     void insertOutgoingEdge(Node &SourceN, Node &TargetN, Edge::Kind EK);
  755.  
  756.     /// Insert an edge whose source is in a descendant RefSCC and target is in
  757.     /// this RefSCC.
  758.     ///
  759.     /// There must be an existing path from the target to the source in this
  760.     /// case.
  761.     ///
  762.     /// NB! This is has the potential to be a very expensive function. It
  763.     /// inherently forms a cycle in the prior RefSCC DAG and we have to merge
  764.     /// RefSCCs to resolve that cycle. But finding all of the RefSCCs which
  765.     /// participate in the cycle can in the worst case require traversing every
  766.     /// RefSCC in the graph. Every attempt is made to avoid that, but passes
  767.     /// must still exercise caution calling this routine repeatedly.
  768.     ///
  769.     /// Also note that this can only insert ref edges. In order to insert
  770.     /// a call edge, first insert a ref edge and then switch it to a call edge.
  771.     /// These are intentionally kept as separate interfaces because each step
  772.     /// of the operation invalidates a different set of data structures.
  773.     ///
  774.     /// This returns all the RefSCCs which were merged into the this RefSCC
  775.     /// (the target's). This allows callers to invalidate any cached
  776.     /// information.
  777.     ///
  778.     /// FIXME: We could possibly optimize this quite a bit for cases where the
  779.     /// caller and callee are very nearby in the graph. See comments in the
  780.     /// implementation for details, but that use case might impact users.
  781.     SmallVector<RefSCC *, 1> insertIncomingRefEdge(Node &SourceN,
  782.                                                    Node &TargetN);
  783.  
  784.     /// Remove an edge whose source is in this RefSCC and target is *not*.
  785.     ///
  786.     /// This removes an inter-RefSCC edge. All inter-RefSCC edges originating
  787.     /// from this SCC have been fully explored by any in-flight DFS graph
  788.     /// formation, so this is always safe to call once you have the source
  789.     /// RefSCC.
  790.     ///
  791.     /// This operation does not change the cyclic structure of the graph and so
  792.     /// is very inexpensive. It may change the connectivity graph of the SCCs
  793.     /// though, so be careful calling this while iterating over them.
  794.     void removeOutgoingEdge(Node &SourceN, Node &TargetN);
  795.  
  796.     /// Remove a list of ref edges which are entirely within this RefSCC.
  797.     ///
  798.     /// Both the \a SourceN and all of the \a TargetNs must be within this
  799.     /// RefSCC. Removing these edges may break cycles that form this RefSCC and
  800.     /// thus this operation may change the RefSCC graph significantly. In
  801.     /// particular, this operation will re-form new RefSCCs based on the
  802.     /// remaining connectivity of the graph. The following invariants are
  803.     /// guaranteed to hold after calling this method:
  804.     ///
  805.     /// 1) If a ref-cycle remains after removal, it leaves this RefSCC intact
  806.     ///    and in the graph. No new RefSCCs are built.
  807.     /// 2) Otherwise, this RefSCC will be dead after this call and no longer in
  808.     ///    the graph or the postorder traversal of the call graph. Any iterator
  809.     ///    pointing at this RefSCC will become invalid.
  810.     /// 3) All newly formed RefSCCs will be returned and the order of the
  811.     ///    RefSCCs returned will be a valid postorder traversal of the new
  812.     ///    RefSCCs.
  813.     /// 4) No RefSCC other than this RefSCC has its member set changed (this is
  814.     ///    inherent in the definition of removing such an edge).
  815.     ///
  816.     /// These invariants are very important to ensure that we can build
  817.     /// optimization pipelines on top of the CGSCC pass manager which
  818.     /// intelligently update the RefSCC graph without invalidating other parts
  819.     /// of the RefSCC graph.
  820.     ///
  821.     /// Note that we provide no routine to remove a *call* edge. Instead, you
  822.     /// must first switch it to a ref edge using \c switchInternalEdgeToRef.
  823.     /// This split API is intentional as each of these two steps can invalidate
  824.     /// a different aspect of the graph structure and needs to have the
  825.     /// invalidation handled independently.
  826.     ///
  827.     /// The runtime complexity of this method is, in the worst case, O(V+E)
  828.     /// where V is the number of nodes in this RefSCC and E is the number of
  829.     /// edges leaving the nodes in this RefSCC. Note that E includes both edges
  830.     /// within this RefSCC and edges from this RefSCC to child RefSCCs. Some
  831.     /// effort has been made to minimize the overhead of common cases such as
  832.     /// self-edges and edge removals which result in a spanning tree with no
  833.     /// more cycles.
  834.     [[nodiscard]] SmallVector<RefSCC *, 1>
  835.     removeInternalRefEdge(Node &SourceN, ArrayRef<Node *> TargetNs);
  836.  
  837.     /// A convenience wrapper around the above to handle trivial cases of
  838.     /// inserting a new call edge.
  839.     ///
  840.     /// This is trivial whenever the target is in the same SCC as the source or
  841.     /// the edge is an outgoing edge to some descendant SCC. In these cases
  842.     /// there is no change to the cyclic structure of SCCs or RefSCCs.
  843.     ///
  844.     /// To further make calling this convenient, it also handles inserting
  845.     /// already existing edges.
  846.     void insertTrivialCallEdge(Node &SourceN, Node &TargetN);
  847.  
  848.     /// A convenience wrapper around the above to handle trivial cases of
  849.     /// inserting a new ref edge.
  850.     ///
  851.     /// This is trivial whenever the target is in the same RefSCC as the source
  852.     /// or the edge is an outgoing edge to some descendant RefSCC. In these
  853.     /// cases there is no change to the cyclic structure of the RefSCCs.
  854.     ///
  855.     /// To further make calling this convenient, it also handles inserting
  856.     /// already existing edges.
  857.     void insertTrivialRefEdge(Node &SourceN, Node &TargetN);
  858.  
  859.     /// Directly replace a node's function with a new function.
  860.     ///
  861.     /// This should be used when moving the body and users of a function to
  862.     /// a new formal function object but not otherwise changing the call graph
  863.     /// structure in any way.
  864.     ///
  865.     /// It requires that the old function in the provided node have zero uses
  866.     /// and the new function must have calls and references to it establishing
  867.     /// an equivalent graph.
  868.     void replaceNodeFunction(Node &N, Function &NewF);
  869.  
  870.     ///@}
  871.   };
  872.  
  873.   /// A post-order depth-first RefSCC iterator over the call graph.
  874.   ///
  875.   /// This iterator walks the cached post-order sequence of RefSCCs. However,
  876.   /// it trades stability for flexibility. It is restricted to a forward
  877.   /// iterator but will survive mutations which insert new RefSCCs and continue
  878.   /// to point to the same RefSCC even if it moves in the post-order sequence.
  879.   class postorder_ref_scc_iterator
  880.       : public iterator_facade_base<postorder_ref_scc_iterator,
  881.                                     std::forward_iterator_tag, RefSCC> {
  882.     friend class LazyCallGraph;
  883.     friend class LazyCallGraph::Node;
  884.  
  885.     /// Nonce type to select the constructor for the end iterator.
  886.     struct IsAtEndT {};
  887.  
  888.     LazyCallGraph *G;
  889.     RefSCC *RC = nullptr;
  890.  
  891.     /// Build the begin iterator for a node.
  892.     postorder_ref_scc_iterator(LazyCallGraph &G) : G(&G), RC(getRC(G, 0)) {
  893.       incrementUntilNonEmptyRefSCC();
  894.     }
  895.  
  896.     /// Build the end iterator for a node. This is selected purely by overload.
  897.     postorder_ref_scc_iterator(LazyCallGraph &G, IsAtEndT /*Nonce*/) : G(&G) {}
  898.  
  899.     /// Get the post-order RefSCC at the given index of the postorder walk,
  900.     /// populating it if necessary.
  901.     static RefSCC *getRC(LazyCallGraph &G, int Index) {
  902.       if (Index == (int)G.PostOrderRefSCCs.size())
  903.         // We're at the end.
  904.         return nullptr;
  905.  
  906.       return G.PostOrderRefSCCs[Index];
  907.     }
  908.  
  909.     // Keep incrementing until RC is non-empty (or null).
  910.     void incrementUntilNonEmptyRefSCC() {
  911.       while (RC && RC->size() == 0)
  912.         increment();
  913.     }
  914.  
  915.     void increment() {
  916.       assert(RC && "Cannot increment the end iterator!");
  917.       RC = getRC(*G, G->RefSCCIndices.find(RC)->second + 1);
  918.     }
  919.  
  920.   public:
  921.     bool operator==(const postorder_ref_scc_iterator &Arg) const {
  922.       return G == Arg.G && RC == Arg.RC;
  923.     }
  924.  
  925.     reference operator*() const { return *RC; }
  926.  
  927.     using iterator_facade_base::operator++;
  928.     postorder_ref_scc_iterator &operator++() {
  929.       increment();
  930.       incrementUntilNonEmptyRefSCC();
  931.       return *this;
  932.     }
  933.   };
  934.  
  935.   /// Construct a graph for the given module.
  936.   ///
  937.   /// This sets up the graph and computes all of the entry points of the graph.
  938.   /// No function definitions are scanned until their nodes in the graph are
  939.   /// requested during traversal.
  940.   LazyCallGraph(Module &M,
  941.                 function_ref<TargetLibraryInfo &(Function &)> GetTLI);
  942.  
  943.   LazyCallGraph(LazyCallGraph &&G);
  944.   LazyCallGraph &operator=(LazyCallGraph &&RHS);
  945.  
  946.   bool invalidate(Module &, const PreservedAnalyses &PA,
  947.                   ModuleAnalysisManager::Invalidator &);
  948.  
  949.   EdgeSequence::iterator begin() { return EntryEdges.begin(); }
  950.   EdgeSequence::iterator end() { return EntryEdges.end(); }
  951.  
  952.   void buildRefSCCs();
  953.  
  954.   postorder_ref_scc_iterator postorder_ref_scc_begin() {
  955.     if (!EntryEdges.empty())
  956.       assert(!PostOrderRefSCCs.empty() &&
  957.              "Must form RefSCCs before iterating them!");
  958.     return postorder_ref_scc_iterator(*this);
  959.   }
  960.   postorder_ref_scc_iterator postorder_ref_scc_end() {
  961.     if (!EntryEdges.empty())
  962.       assert(!PostOrderRefSCCs.empty() &&
  963.              "Must form RefSCCs before iterating them!");
  964.     return postorder_ref_scc_iterator(*this,
  965.                                       postorder_ref_scc_iterator::IsAtEndT());
  966.   }
  967.  
  968.   iterator_range<postorder_ref_scc_iterator> postorder_ref_sccs() {
  969.     return make_range(postorder_ref_scc_begin(), postorder_ref_scc_end());
  970.   }
  971.  
  972.   /// Lookup a function in the graph which has already been scanned and added.
  973.   Node *lookup(const Function &F) const { return NodeMap.lookup(&F); }
  974.  
  975.   /// Lookup a function's SCC in the graph.
  976.   ///
  977.   /// \returns null if the function hasn't been assigned an SCC via the RefSCC
  978.   /// iterator walk.
  979.   SCC *lookupSCC(Node &N) const { return SCCMap.lookup(&N); }
  980.  
  981.   /// Lookup a function's RefSCC in the graph.
  982.   ///
  983.   /// \returns null if the function hasn't been assigned a RefSCC via the
  984.   /// RefSCC iterator walk.
  985.   RefSCC *lookupRefSCC(Node &N) const {
  986.     if (SCC *C = lookupSCC(N))
  987.       return &C->getOuterRefSCC();
  988.  
  989.     return nullptr;
  990.   }
  991.  
  992.   /// Get a graph node for a given function, scanning it to populate the graph
  993.   /// data as necessary.
  994.   Node &get(Function &F) {
  995.     Node *&N = NodeMap[&F];
  996.     if (N)
  997.       return *N;
  998.  
  999.     return insertInto(F, N);
  1000.   }
  1001.  
  1002.   /// Get the sequence of known and defined library functions.
  1003.   ///
  1004.   /// These functions, because they are known to LLVM, can have calls
  1005.   /// introduced out of thin air from arbitrary IR.
  1006.   ArrayRef<Function *> getLibFunctions() const {
  1007.     return LibFunctions.getArrayRef();
  1008.   }
  1009.  
  1010.   /// Test whether a function is a known and defined library function tracked by
  1011.   /// the call graph.
  1012.   ///
  1013.   /// Because these functions are known to LLVM they are specially modeled in
  1014.   /// the call graph and even when all IR-level references have been removed
  1015.   /// remain active and reachable.
  1016.   bool isLibFunction(Function &F) const { return LibFunctions.count(&F); }
  1017.  
  1018.   ///@{
  1019.   /// \name Pre-SCC Mutation API
  1020.   ///
  1021.   /// These methods are only valid to call prior to forming any SCCs for this
  1022.   /// call graph. They can be used to update the core node-graph during
  1023.   /// a node-based inorder traversal that precedes any SCC-based traversal.
  1024.   ///
  1025.   /// Once you begin manipulating a call graph's SCCs, most mutation of the
  1026.   /// graph must be performed via a RefSCC method. There are some exceptions
  1027.   /// below.
  1028.  
  1029.   /// Update the call graph after inserting a new edge.
  1030.   void insertEdge(Node &SourceN, Node &TargetN, Edge::Kind EK);
  1031.  
  1032.   /// Update the call graph after inserting a new edge.
  1033.   void insertEdge(Function &Source, Function &Target, Edge::Kind EK) {
  1034.     return insertEdge(get(Source), get(Target), EK);
  1035.   }
  1036.  
  1037.   /// Update the call graph after deleting an edge.
  1038.   void removeEdge(Node &SourceN, Node &TargetN);
  1039.  
  1040.   /// Update the call graph after deleting an edge.
  1041.   void removeEdge(Function &Source, Function &Target) {
  1042.     return removeEdge(get(Source), get(Target));
  1043.   }
  1044.  
  1045.   ///@}
  1046.  
  1047.   ///@{
  1048.   /// \name General Mutation API
  1049.   ///
  1050.   /// There are a very limited set of mutations allowed on the graph as a whole
  1051.   /// once SCCs have started to be formed. These routines have strict contracts
  1052.   /// but may be called at any point.
  1053.  
  1054.   /// Remove a dead function from the call graph (typically to delete it).
  1055.   ///
  1056.   /// Note that the function must have an empty use list, and the call graph
  1057.   /// must be up-to-date prior to calling this. That means it is by itself in
  1058.   /// a maximal SCC which is by itself in a maximal RefSCC, etc. No structural
  1059.   /// changes result from calling this routine other than potentially removing
  1060.   /// entry points into the call graph.
  1061.   ///
  1062.   /// If SCC formation has begun, this function must not be part of the current
  1063.   /// DFS in order to call this safely. Typically, the function will have been
  1064.   /// fully visited by the DFS prior to calling this routine.
  1065.   void removeDeadFunction(Function &F);
  1066.  
  1067.   /// Add a new function split/outlined from an existing function.
  1068.   ///
  1069.   /// The new function may only reference other functions that the original
  1070.   /// function did.
  1071.   ///
  1072.   /// The original function must reference (either directly or indirectly) the
  1073.   /// new function.
  1074.   ///
  1075.   /// The new function may also reference the original function.
  1076.   /// It may end up in a parent SCC in the case that the original function's
  1077.   /// edge to the new function is a ref edge, and the edge back is a call edge.
  1078.   void addSplitFunction(Function &OriginalFunction, Function &NewFunction);
  1079.  
  1080.   /// Add new ref-recursive functions split/outlined from an existing function.
  1081.   ///
  1082.   /// The new functions may only reference other functions that the original
  1083.   /// function did. The new functions may reference (not call) the original
  1084.   /// function.
  1085.   ///
  1086.   /// The original function must reference (not call) all new functions.
  1087.   /// All new functions must reference (not call) each other.
  1088.   void addSplitRefRecursiveFunctions(Function &OriginalFunction,
  1089.                                      ArrayRef<Function *> NewFunctions);
  1090.  
  1091.   ///@}
  1092.  
  1093.   ///@{
  1094.   /// \name Static helpers for code doing updates to the call graph.
  1095.   ///
  1096.   /// These helpers are used to implement parts of the call graph but are also
  1097.   /// useful to code doing updates or otherwise wanting to walk the IR in the
  1098.   /// same patterns as when we build the call graph.
  1099.  
  1100.   /// Recursively visits the defined functions whose address is reachable from
  1101.   /// every constant in the \p Worklist.
  1102.   ///
  1103.   /// Doesn't recurse through any constants already in the \p Visited set, and
  1104.   /// updates that set with every constant visited.
  1105.   ///
  1106.   /// For each defined function, calls \p Callback with that function.
  1107.   static void visitReferences(SmallVectorImpl<Constant *> &Worklist,
  1108.                               SmallPtrSetImpl<Constant *> &Visited,
  1109.                               function_ref<void(Function &)> Callback);
  1110.  
  1111.   ///@}
  1112.  
  1113. private:
  1114.   using node_stack_iterator = SmallVectorImpl<Node *>::reverse_iterator;
  1115.   using node_stack_range = iterator_range<node_stack_iterator>;
  1116.  
  1117.   /// Allocator that holds all the call graph nodes.
  1118.   SpecificBumpPtrAllocator<Node> BPA;
  1119.  
  1120.   /// Maps function->node for fast lookup.
  1121.   DenseMap<const Function *, Node *> NodeMap;
  1122.  
  1123.   /// The entry edges into the graph.
  1124.   ///
  1125.   /// These edges are from "external" sources. Put another way, they
  1126.   /// escape at the module scope.
  1127.   EdgeSequence EntryEdges;
  1128.  
  1129.   /// Allocator that holds all the call graph SCCs.
  1130.   SpecificBumpPtrAllocator<SCC> SCCBPA;
  1131.  
  1132.   /// Maps Function -> SCC for fast lookup.
  1133.   DenseMap<Node *, SCC *> SCCMap;
  1134.  
  1135.   /// Allocator that holds all the call graph RefSCCs.
  1136.   SpecificBumpPtrAllocator<RefSCC> RefSCCBPA;
  1137.  
  1138.   /// The post-order sequence of RefSCCs.
  1139.   ///
  1140.   /// This list is lazily formed the first time we walk the graph.
  1141.   SmallVector<RefSCC *, 16> PostOrderRefSCCs;
  1142.  
  1143.   /// A map from RefSCC to the index for it in the postorder sequence of
  1144.   /// RefSCCs.
  1145.   DenseMap<RefSCC *, int> RefSCCIndices;
  1146.  
  1147.   /// Defined functions that are also known library functions which the
  1148.   /// optimizer can reason about and therefore might introduce calls to out of
  1149.   /// thin air.
  1150.   SmallSetVector<Function *, 4> LibFunctions;
  1151.  
  1152.   /// Helper to insert a new function, with an already looked-up entry in
  1153.   /// the NodeMap.
  1154.   Node &insertInto(Function &F, Node *&MappedN);
  1155.  
  1156.   /// Helper to initialize a new node created outside of creating SCCs and add
  1157.   /// it to the NodeMap if necessary. For example, useful when a function is
  1158.   /// split.
  1159.   Node &initNode(Function &F);
  1160.  
  1161.   /// Helper to update pointers back to the graph object during moves.
  1162.   void updateGraphPtrs();
  1163.  
  1164.   /// Allocates an SCC and constructs it using the graph allocator.
  1165.   ///
  1166.   /// The arguments are forwarded to the constructor.
  1167.   template <typename... Ts> SCC *createSCC(Ts &&...Args) {
  1168.     return new (SCCBPA.Allocate()) SCC(std::forward<Ts>(Args)...);
  1169.   }
  1170.  
  1171.   /// Allocates a RefSCC and constructs it using the graph allocator.
  1172.   ///
  1173.   /// The arguments are forwarded to the constructor.
  1174.   template <typename... Ts> RefSCC *createRefSCC(Ts &&...Args) {
  1175.     return new (RefSCCBPA.Allocate()) RefSCC(std::forward<Ts>(Args)...);
  1176.   }
  1177.  
  1178.   /// Common logic for building SCCs from a sequence of roots.
  1179.   ///
  1180.   /// This is a very generic implementation of the depth-first walk and SCC
  1181.   /// formation algorithm. It uses a generic sequence of roots and generic
  1182.   /// callbacks for each step. This is designed to be used to implement both
  1183.   /// the RefSCC formation and SCC formation with shared logic.
  1184.   ///
  1185.   /// Currently this is a relatively naive implementation of Tarjan's DFS
  1186.   /// algorithm to form the SCCs.
  1187.   ///
  1188.   /// FIXME: We should consider newer variants such as Nuutila.
  1189.   template <typename RootsT, typename GetBeginT, typename GetEndT,
  1190.             typename GetNodeT, typename FormSCCCallbackT>
  1191.   static void buildGenericSCCs(RootsT &&Roots, GetBeginT &&GetBegin,
  1192.                                GetEndT &&GetEnd, GetNodeT &&GetNode,
  1193.                                FormSCCCallbackT &&FormSCC);
  1194.  
  1195.   /// Build the SCCs for a RefSCC out of a list of nodes.
  1196.   void buildSCCs(RefSCC &RC, node_stack_range Nodes);
  1197.  
  1198.   /// Get the index of a RefSCC within the postorder traversal.
  1199.   ///
  1200.   /// Requires that this RefSCC is a valid one in the (perhaps partial)
  1201.   /// postorder traversed part of the graph.
  1202.   int getRefSCCIndex(RefSCC &RC) {
  1203.     auto IndexIt = RefSCCIndices.find(&RC);
  1204.     assert(IndexIt != RefSCCIndices.end() && "RefSCC doesn't have an index!");
  1205.     assert(PostOrderRefSCCs[IndexIt->second] == &RC &&
  1206.            "Index does not point back at RC!");
  1207.     return IndexIt->second;
  1208.   }
  1209. };
  1210.  
  1211. inline LazyCallGraph::Edge::Edge() = default;
  1212. inline LazyCallGraph::Edge::Edge(Node &N, Kind K) : Value(&N, K) {}
  1213.  
  1214. inline LazyCallGraph::Edge::operator bool() const {
  1215.   return Value.getPointer() && !Value.getPointer()->isDead();
  1216. }
  1217.  
  1218. inline LazyCallGraph::Edge::Kind LazyCallGraph::Edge::getKind() const {
  1219.   assert(*this && "Queried a null edge!");
  1220.   return Value.getInt();
  1221. }
  1222.  
  1223. inline bool LazyCallGraph::Edge::isCall() const {
  1224.   assert(*this && "Queried a null edge!");
  1225.   return getKind() == Call;
  1226. }
  1227.  
  1228. inline LazyCallGraph::Node &LazyCallGraph::Edge::getNode() const {
  1229.   assert(*this && "Queried a null edge!");
  1230.   return *Value.getPointer();
  1231. }
  1232.  
  1233. inline Function &LazyCallGraph::Edge::getFunction() const {
  1234.   assert(*this && "Queried a null edge!");
  1235.   return getNode().getFunction();
  1236. }
  1237.  
  1238. // Provide GraphTraits specializations for call graphs.
  1239. template <> struct GraphTraits<LazyCallGraph::Node *> {
  1240.   using NodeRef = LazyCallGraph::Node *;
  1241.   using ChildIteratorType = LazyCallGraph::EdgeSequence::iterator;
  1242.  
  1243.   static NodeRef getEntryNode(NodeRef N) { return N; }
  1244.   static ChildIteratorType child_begin(NodeRef N) { return (*N)->begin(); }
  1245.   static ChildIteratorType child_end(NodeRef N) { return (*N)->end(); }
  1246. };
  1247. template <> struct GraphTraits<LazyCallGraph *> {
  1248.   using NodeRef = LazyCallGraph::Node *;
  1249.   using ChildIteratorType = LazyCallGraph::EdgeSequence::iterator;
  1250.  
  1251.   static NodeRef getEntryNode(NodeRef N) { return N; }
  1252.   static ChildIteratorType child_begin(NodeRef N) { return (*N)->begin(); }
  1253.   static ChildIteratorType child_end(NodeRef N) { return (*N)->end(); }
  1254. };
  1255.  
  1256. /// An analysis pass which computes the call graph for a module.
  1257. class LazyCallGraphAnalysis : public AnalysisInfoMixin<LazyCallGraphAnalysis> {
  1258.   friend AnalysisInfoMixin<LazyCallGraphAnalysis>;
  1259.  
  1260.   static AnalysisKey Key;
  1261.  
  1262. public:
  1263.   /// Inform generic clients of the result type.
  1264.   using Result = LazyCallGraph;
  1265.  
  1266.   /// Compute the \c LazyCallGraph for the module \c M.
  1267.   ///
  1268.   /// This just builds the set of entry points to the call graph. The rest is
  1269.   /// built lazily as it is walked.
  1270.   LazyCallGraph run(Module &M, ModuleAnalysisManager &AM) {
  1271.     FunctionAnalysisManager &FAM =
  1272.         AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager();
  1273.     auto GetTLI = [&FAM](Function &F) -> TargetLibraryInfo & {
  1274.       return FAM.getResult<TargetLibraryAnalysis>(F);
  1275.     };
  1276.     return LazyCallGraph(M, GetTLI);
  1277.   }
  1278. };
  1279.  
  1280. /// A pass which prints the call graph to a \c raw_ostream.
  1281. ///
  1282. /// This is primarily useful for testing the analysis.
  1283. class LazyCallGraphPrinterPass
  1284.     : public PassInfoMixin<LazyCallGraphPrinterPass> {
  1285.   raw_ostream &OS;
  1286.  
  1287. public:
  1288.   explicit LazyCallGraphPrinterPass(raw_ostream &OS);
  1289.  
  1290.   PreservedAnalyses run(Module &M, ModuleAnalysisManager &AM);
  1291. };
  1292.  
  1293. /// A pass which prints the call graph as a DOT file to a \c raw_ostream.
  1294. ///
  1295. /// This is primarily useful for visualization purposes.
  1296. class LazyCallGraphDOTPrinterPass
  1297.     : public PassInfoMixin<LazyCallGraphDOTPrinterPass> {
  1298.   raw_ostream &OS;
  1299.  
  1300. public:
  1301.   explicit LazyCallGraphDOTPrinterPass(raw_ostream &OS);
  1302.  
  1303.   PreservedAnalyses run(Module &M, ModuleAnalysisManager &AM);
  1304. };
  1305.  
  1306. } // end namespace llvm
  1307.  
  1308. #endif // LLVM_ANALYSIS_LAZYCALLGRAPH_H
  1309.