diff options
Diffstat (limited to 'lib')
-rw-r--r-- | lib/Analysis/CallGraph.cpp | 29 | ||||
-rw-r--r-- | lib/StaticAnalyzer/Frontend/AnalysisConsumer.cpp | 62 |
2 files changed, 34 insertions, 57 deletions
diff --git a/lib/Analysis/CallGraph.cpp b/lib/Analysis/CallGraph.cpp index 424e5c795e..4082e2dc0e 100644 --- a/lib/Analysis/CallGraph.cpp +++ b/lib/Analysis/CallGraph.cpp @@ -16,6 +16,7 @@ #include "clang/AST/ASTContext.h" #include "clang/AST/Decl.h" #include "clang/AST/StmtVisitor.h" +#include "llvm/ADT/PostOrderIterator.h" #include "llvm/ADT/Statistic.h" #include "llvm/Support/GraphWriter.h" @@ -141,14 +142,8 @@ bool CallGraph::includeInGraph(const Decl *D) { void CallGraph::addNodeForDecl(Decl* D, bool IsGlobal) { assert(D); - // Do nothing if the node already exists. - if (FunctionMap.find(D) != FunctionMap.end()) - return; - // Allocate a new node, mark it as root, and process it's calls. CallGraphNode *Node = getOrInsertNode(D); - if (IsGlobal) - Root->addCallee(Node, this); // Process all the calls by this function as well. CGBuilder builder(this, Node); @@ -168,23 +163,31 @@ CallGraphNode *CallGraph::getOrInsertNode(Decl *F) { return Node; Node = new CallGraphNode(F); - // If not root, add to the parentless list. + // Make Root node a parent of all functions to make sure all are reachable. if (F != 0) - ParentlessNodes.insert(Node); + Root->addCallee(Node, this); return Node; } void CallGraph::print(raw_ostream &OS) const { OS << " --- Call graph Dump --- \n"; - for (const_iterator I = begin(), E = end(); I != E; ++I) { + + // We are going to print the graph in reverse post order, partially, to make + // sure the output is deterministic. + llvm::ReversePostOrderTraversal<const clang::CallGraph*> RPOT(this); + for (llvm::ReversePostOrderTraversal<const clang::CallGraph*>::rpo_iterator + I = RPOT.begin(), E = RPOT.end(); I != E; ++I) { + const CallGraphNode *N = *I; + OS << " Function: "; - if (I->second == Root) + if (N == Root) OS << "< root >"; else - I->second->print(OS); + N->print(OS); + OS << " calls: "; - for (CallGraphNode::iterator CI = I->second->begin(), - CE = I->second->end(); CI != CE; ++CI) { + for (CallGraphNode::const_iterator CI = N->begin(), + CE = N->end(); CI != CE; ++CI) { assert(*CI != Root && "No one can call the root node."); (*CI)->print(OS); OS << " "; diff --git a/lib/StaticAnalyzer/Frontend/AnalysisConsumer.cpp b/lib/StaticAnalyzer/Frontend/AnalysisConsumer.cpp index 025891adf8..07aa5b074a 100644 --- a/lib/StaticAnalyzer/Frontend/AnalysisConsumer.cpp +++ b/lib/StaticAnalyzer/Frontend/AnalysisConsumer.cpp @@ -39,6 +39,7 @@ #include "llvm/ADT/OwningPtr.h" #include "llvm/ADT/SmallPtrSet.h" #include "llvm/ADT/Statistic.h" +#include "llvm/ADT/PostOrderIterator.h" #include "llvm/Support/Path.h" #include "llvm/Support/Program.h" #include "llvm/Support/Timer.h" @@ -417,61 +418,34 @@ AnalysisConsumer::getInliningModeForFunction(const Decl *D, } void AnalysisConsumer::HandleDeclsCallGraph(const unsigned LocalTUDeclsSize) { - // Otherwise, use the Callgraph to derive the order. - // Build the Call Graph. - CallGraph CG; - - // Add all the top level declarations to the graph. + // Build the Call Graph by adding all the top level declarations to the graph. // Note: CallGraph can trigger deserialization of more items from a pch // (though HandleInterestingDecl); triggering additions to LocalTUDecls. // We rely on random access to add the initially processed Decls to CG. + CallGraph CG; for (unsigned i = 0 ; i < LocalTUDeclsSize ; ++i) { CG.addToCallGraph(LocalTUDecls[i]); } - // Find the top level nodes - children of root + the unreachable (parentless) - // nodes. - llvm::SmallVector<CallGraphNode*, 24> TopLevelFunctions; - for (CallGraph::nodes_iterator TI = CG.parentless_begin(), - TE = CG.parentless_end(); TI != TE; ++TI) { - TopLevelFunctions.push_back(*TI); - NumFunctionTopLevel++; - } - CallGraphNode *Entry = CG.getRoot(); - for (CallGraphNode::iterator I = Entry->begin(), - E = Entry->end(); I != E; ++I) { - TopLevelFunctions.push_back(*I); - NumFunctionTopLevel++; - } - - // Make sure the nodes are sorted in order reverse of their definition in the - // translation unit. This step is very important for performance. It ensures - // that we analyze the root functions before the externally available - // subroutines. - std::deque<CallGraphNode*> BFSQueue; - for (llvm::SmallVector<CallGraphNode*, 24>::reverse_iterator - TI = TopLevelFunctions.rbegin(), TE = TopLevelFunctions.rend(); - TI != TE; ++TI) - BFSQueue.push_back(*TI); - - // BFS over all of the functions, while skipping the ones inlined into - // the previously processed functions. Use external Visited set, which is - // also modified when we inline a function. + // Walk over all of the call graph nodes in topological order, so that we + // analyze parents before the children. Skip the functions inlined into + // the previously processed functions. Use external Visited set to identify + // inlined functions. The topological order allows the "do not reanalyze + // previously inlined function" performance heuristic to be triggered more + // often. SetOfConstDecls Visited; SetOfConstDecls VisitedAsTopLevel; - while(!BFSQueue.empty()) { - CallGraphNode *N = BFSQueue.front(); - BFSQueue.pop_front(); - - // Push the children into the queue. - for (CallGraphNode::const_iterator CI = N->begin(), - CE = N->end(); CI != CE; ++CI) { - if (!shouldSkipFunction((*CI)->getDecl(), Visited, VisitedAsTopLevel)) - BFSQueue.push_back(*CI); - } + llvm::ReversePostOrderTraversal<clang::CallGraph*> RPOT(&CG); + for (llvm::ReversePostOrderTraversal<clang::CallGraph*>::rpo_iterator + I = RPOT.begin(), E = RPOT.end(); I != E; ++I) { + NumFunctionTopLevel++; + CallGraphNode *N = *I; Decl *D = N->getDecl(); - assert(D); + + // Skip the abstract root node. + if (!D) + continue; // Skip the functions which have been processed already or previously // inlined. |