diff options
author | Zhongxing Xu <xuzhongxing@gmail.com> | 2011-01-10 09:23:01 +0000 |
---|---|---|
committer | Zhongxing Xu <xuzhongxing@gmail.com> | 2011-01-10 09:23:01 +0000 |
commit | 0742f181e5562ae49fb303d90ceb3955ed9f84ed (patch) | |
tree | 99bac4c3a6f039b9a42fedf88806cd2db6883f4f /lib/StaticAnalyzer/Checkers/ExprEngine.cpp | |
parent | d3731198193eee92796ddeb493973b7a598b003e (diff) |
Revert r123160. There are linking dependency problems.
git-svn-id: https://llvm.org/svn/llvm-project/cfe/trunk@123166 91177308-0d34-0410-b5e6-96231b3b80d8
Diffstat (limited to 'lib/StaticAnalyzer/Checkers/ExprEngine.cpp')
-rw-r--r-- | lib/StaticAnalyzer/Checkers/ExprEngine.cpp | 3521 |
1 files changed, 3521 insertions, 0 deletions
diff --git a/lib/StaticAnalyzer/Checkers/ExprEngine.cpp b/lib/StaticAnalyzer/Checkers/ExprEngine.cpp new file mode 100644 index 0000000000..ca960142ee --- /dev/null +++ b/lib/StaticAnalyzer/Checkers/ExprEngine.cpp @@ -0,0 +1,3521 @@ +//=-- ExprEngine.cpp - Path-Sensitive Expression-Level Dataflow ---*- C++ -*-= +// +// The LLVM Compiler Infrastructure +// +// This file is distributed under the University of Illinois Open Source +// License. See LICENSE.TXT for details. +// +//===----------------------------------------------------------------------===// +// +// This file defines a meta-engine for path-sensitive dataflow analysis that +// is built on GREngine, but provides the boilerplate to execute transfer +// functions and build the ExplodedGraph at the expression level. +// +//===----------------------------------------------------------------------===// + +// FIXME: Restructure checker registration. +#include "ExprEngineInternalChecks.h" + +#include "clang/StaticAnalyzer/BugReporter/BugType.h" +#include "clang/StaticAnalyzer/PathSensitive/AnalysisManager.h" +#include "clang/StaticAnalyzer/PathSensitive/ExprEngine.h" +#include "clang/StaticAnalyzer/PathSensitive/ExprEngineBuilders.h" +#include "clang/StaticAnalyzer/PathSensitive/Checker.h" +#include "clang/AST/CharUnits.h" +#include "clang/AST/ParentMap.h" +#include "clang/AST/StmtObjC.h" +#include "clang/AST/DeclCXX.h" +#include "clang/Basic/Builtins.h" +#include "clang/Basic/SourceManager.h" +#include "clang/Basic/SourceManager.h" +#include "clang/Basic/PrettyStackTrace.h" +#include "llvm/Support/raw_ostream.h" +#include "llvm/ADT/ImmutableList.h" + +#ifndef NDEBUG +#include "llvm/Support/GraphWriter.h" +#endif + +using namespace clang; +using namespace ento; +using llvm::dyn_cast; +using llvm::dyn_cast_or_null; +using llvm::cast; +using llvm::APSInt; + +namespace { + // Trait class for recording returned expression in the state. + struct ReturnExpr { + static int TagInt; + typedef const Stmt *data_type; + }; + int ReturnExpr::TagInt; +} + +//===----------------------------------------------------------------------===// +// Utility functions. +//===----------------------------------------------------------------------===// + +static inline Selector GetNullarySelector(const char* name, ASTContext& Ctx) { + IdentifierInfo* II = &Ctx.Idents.get(name); + return Ctx.Selectors.getSelector(0, &II); +} + +//===----------------------------------------------------------------------===// +// Checker worklist routines. +//===----------------------------------------------------------------------===// + +void ExprEngine::CheckerVisit(const Stmt *S, ExplodedNodeSet &Dst, + ExplodedNodeSet &Src, CallbackKind Kind) { + + // Determine if we already have a cached 'CheckersOrdered' vector + // specifically tailored for the provided <CallbackKind, Stmt kind>. This + // can reduce the number of checkers actually called. + CheckersOrdered *CO = &Checkers; + llvm::OwningPtr<CheckersOrdered> NewCO; + + // The cache key is made up of the and the callback kind (pre- or post-visit) + // and the statement kind. + CallbackTag K = GetCallbackTag(Kind, S->getStmtClass()); + + CheckersOrdered *& CO_Ref = COCache[K]; + + if (!CO_Ref) { + // If we have no previously cached CheckersOrdered vector for this + // statement kind, then create one. + NewCO.reset(new CheckersOrdered); + } + else { + // Use the already cached set. + CO = CO_Ref; + } + + if (CO->empty()) { + // If there are no checkers, return early without doing any + // more work. + Dst.insert(Src); + return; + } + + ExplodedNodeSet Tmp; + ExplodedNodeSet *PrevSet = &Src; + unsigned checkersEvaluated = 0; + + for (CheckersOrdered::iterator I=CO->begin(), E=CO->end(); I!=E; ++I) { + // If all nodes are sunk, bail out early. + if (PrevSet->empty()) + break; + ExplodedNodeSet *CurrSet = 0; + if (I+1 == E) + CurrSet = &Dst; + else { + CurrSet = (PrevSet == &Tmp) ? &Src : &Tmp; + CurrSet->clear(); + } + void *tag = I->first; + Checker *checker = I->second; + bool respondsToCallback = true; + + for (ExplodedNodeSet::iterator NI = PrevSet->begin(), NE = PrevSet->end(); + NI != NE; ++NI) { + + checker->GR_Visit(*CurrSet, *Builder, *this, S, *NI, tag, + Kind == PreVisitStmtCallback, respondsToCallback); + + } + + PrevSet = CurrSet; + + if (NewCO.get()) { + ++checkersEvaluated; + if (respondsToCallback) + NewCO->push_back(*I); + } + } + + // If we built NewCO, check if we called all the checkers. This is important + // so that we know that we accurately determined the entire set of checkers + // that responds to this callback. Note that 'checkersEvaluated' might + // not be the same as Checkers.size() if one of the Checkers generates + // a sink node. + if (NewCO.get() && checkersEvaluated == Checkers.size()) + CO_Ref = NewCO.take(); + + // Don't autotransition. The CheckerContext objects should do this + // automatically. +} + +void ExprEngine::CheckerEvalNilReceiver(const ObjCMessageExpr *ME, + ExplodedNodeSet &Dst, + const GRState *state, + ExplodedNode *Pred) { + bool evaluated = false; + ExplodedNodeSet DstTmp; + + for (CheckersOrdered::iterator I=Checkers.begin(),E=Checkers.end();I!=E;++I) { + void *tag = I->first; + Checker *checker = I->second; + + if (checker->GR_evalNilReceiver(DstTmp, *Builder, *this, ME, Pred, state, + tag)) { + evaluated = true; + break; + } else + // The checker didn't evaluate the expr. Restore the Dst. + DstTmp.clear(); + } + + if (evaluated) + Dst.insert(DstTmp); + else + Dst.insert(Pred); +} + +// CheckerEvalCall returns true if one of the checkers processed the node. +// This may return void when all call evaluation logic goes to some checker +// in the future. +bool ExprEngine::CheckerEvalCall(const CallExpr *CE, + ExplodedNodeSet &Dst, + ExplodedNode *Pred) { + bool evaluated = false; + ExplodedNodeSet DstTmp; + + for (CheckersOrdered::iterator I=Checkers.begin(),E=Checkers.end();I!=E;++I) { + void *tag = I->first; + Checker *checker = I->second; + + if (checker->GR_evalCallExpr(DstTmp, *Builder, *this, CE, Pred, tag)) { + evaluated = true; + break; + } else + // The checker didn't evaluate the expr. Restore the DstTmp set. + DstTmp.clear(); + } + + if (evaluated) + Dst.insert(DstTmp); + else + Dst.insert(Pred); + + return evaluated; +} + +// FIXME: This is largely copy-paste from CheckerVisit(). Need to +// unify. +void ExprEngine::CheckerVisitBind(const Stmt *StoreE, ExplodedNodeSet &Dst, + ExplodedNodeSet &Src, SVal location, + SVal val, bool isPrevisit) { + + if (Checkers.empty()) { + Dst.insert(Src); + return; + } + + ExplodedNodeSet Tmp; + ExplodedNodeSet *PrevSet = &Src; + + for (CheckersOrdered::iterator I=Checkers.begin(),E=Checkers.end(); I!=E; ++I) + { + ExplodedNodeSet *CurrSet = 0; + if (I+1 == E) + CurrSet = &Dst; + else { + CurrSet = (PrevSet == &Tmp) ? &Src : &Tmp; + CurrSet->clear(); + } + + void *tag = I->first; + Checker *checker = I->second; + + for (ExplodedNodeSet::iterator NI = PrevSet->begin(), NE = PrevSet->end(); + NI != NE; ++NI) + checker->GR_VisitBind(*CurrSet, *Builder, *this, StoreE, + *NI, tag, location, val, isPrevisit); + + // Update which NodeSet is the current one. + PrevSet = CurrSet; + } + + // Don't autotransition. The CheckerContext objects should do this + // automatically. +} +//===----------------------------------------------------------------------===// +// Engine construction and deletion. +//===----------------------------------------------------------------------===// + +static void RegisterInternalChecks(ExprEngine &Eng) { + // Register internal "built-in" BugTypes with the BugReporter. These BugTypes + // are different than what probably many checks will do since they don't + // create BugReports on-the-fly but instead wait until ExprEngine finishes + // analyzing a function. Generation of BugReport objects is done via a call + // to 'FlushReports' from BugReporter. + // The following checks do not need to have their associated BugTypes + // explicitly registered with the BugReporter. If they issue any BugReports, + // their associated BugType will get registered with the BugReporter + // automatically. Note that the check itself is owned by the ExprEngine + // object. + RegisterAdjustedReturnValueChecker(Eng); + // CallAndMessageChecker should be registered before AttrNonNullChecker, + // where we assume arguments are not undefined. + RegisterCallAndMessageChecker(Eng); + RegisterAttrNonNullChecker(Eng); + RegisterDereferenceChecker(Eng); + RegisterVLASizeChecker(Eng); + RegisterDivZeroChecker(Eng); + RegisterReturnUndefChecker(Eng); + RegisterUndefinedArraySubscriptChecker(Eng); + RegisterUndefinedAssignmentChecker(Eng); + RegisterUndefBranchChecker(Eng); + RegisterUndefCapturedBlockVarChecker(Eng); + RegisterUndefResultChecker(Eng); + RegisterStackAddrLeakChecker(Eng); + RegisterObjCAtSyncChecker(Eng); + + // This is not a checker yet. + RegisterNoReturnFunctionChecker(Eng); + RegisterBuiltinFunctionChecker(Eng); + RegisterOSAtomicChecker(Eng); + RegisterUnixAPIChecker(Eng); + RegisterMacOSXAPIChecker(Eng); +} + +ExprEngine::ExprEngine(AnalysisManager &mgr, TransferFuncs *tf) + : AMgr(mgr), + Engine(*this), + G(Engine.getGraph()), + Builder(NULL), + StateMgr(getContext(), mgr.getStoreManagerCreator(), + mgr.getConstraintManagerCreator(), G.getAllocator(), + *this), + SymMgr(StateMgr.getSymbolManager()), + svalBuilder(StateMgr.getSValBuilder()), + EntryNode(NULL), currentStmt(NULL), + NSExceptionII(NULL), NSExceptionInstanceRaiseSelectors(NULL), + RaiseSel(GetNullarySelector("raise", getContext())), + BR(mgr, *this), TF(tf) { + // Register internal checks. + RegisterInternalChecks(*this); + + // FIXME: Eventually remove the TF object entirely. + TF->RegisterChecks(*this); + TF->RegisterPrinters(getStateManager().Printers); +} + +ExprEngine::~ExprEngine() { + BR.FlushReports(); + delete [] NSExceptionInstanceRaiseSelectors; + + // Delete the set of checkers. + for (CheckersOrdered::iterator I=Checkers.begin(), E=Checkers.end(); I!=E;++I) + delete I->second; + + for (CheckersOrderedCache::iterator I=COCache.begin(), E=COCache.end(); + I!=E;++I) + delete I->second; +} + +//===----------------------------------------------------------------------===// +// Utility methods. +//===----------------------------------------------------------------------===// + +const GRState* ExprEngine::getInitialState(const LocationContext *InitLoc) { + const GRState *state = StateMgr.getInitialState(InitLoc); + + // Preconditions. + + // FIXME: It would be nice if we had a more general mechanism to add + // such preconditions. Some day. + do { + const Decl *D = InitLoc->getDecl(); + if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) { + // Precondition: the first argument of 'main' is an integer guaranteed + // to be > 0. + const IdentifierInfo *II = FD->getIdentifier(); + if (!II || !(II->getName() == "main" && FD->getNumParams() > 0)) + break; + + const ParmVarDecl *PD = FD->getParamDecl(0); + QualType T = PD->getType(); + if (!T->isIntegerType()) + break; + + const MemRegion *R = state->getRegion(PD, InitLoc); + if (!R) + break; + + SVal V = state->getSVal(loc::MemRegionVal(R)); + SVal Constraint_untested = evalBinOp(state, BO_GT, V, + svalBuilder.makeZeroVal(T), + getContext().IntTy); + + DefinedOrUnknownSVal *Constraint = + dyn_cast<DefinedOrUnknownSVal>(&Constraint_untested); + + if (!Constraint) + break; + + if (const GRState *newState = state->assume(*Constraint, true)) + state = newState; + + break; + } + + if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) { + // Precondition: 'self' is always non-null upon entry to an Objective-C + // method. + const ImplicitParamDecl *SelfD = MD->getSelfDecl(); + const MemRegion *R = state->getRegion(SelfD, InitLoc); + SVal V = state->getSVal(loc::MemRegionVal(R)); + + if (const Loc *LV = dyn_cast<Loc>(&V)) { + // Assume that the pointer value in 'self' is non-null. + state = state->assume(*LV, true); + assert(state && "'self' cannot be null"); + } + } + } while (0); + + return state; +} + +//===----------------------------------------------------------------------===// +// Top-level transfer function logic (Dispatcher). +//===----------------------------------------------------------------------===// + +/// evalAssume - Called by ConstraintManager. Used to call checker-specific +/// logic for handling assumptions on symbolic values. +const GRState *ExprEngine::ProcessAssume(const GRState *state, SVal cond, + bool assumption) { + // Determine if we already have a cached 'CheckersOrdered' vector + // specifically tailored for processing assumptions. This + // can reduce the number of checkers actually called. + CheckersOrdered *CO = &Checkers; + llvm::OwningPtr<CheckersOrdered> NewCO; + + CallbackTag K = GetCallbackTag(ProcessAssumeCallback); + CheckersOrdered *& CO_Ref = COCache[K]; + + if (!CO_Ref) { + // If we have no previously cached CheckersOrdered vector for this + // statement kind, then create one. + NewCO.reset(new CheckersOrdered); + } + else { + // Use the already cached set. + CO = CO_Ref; + } + + if (!CO->empty()) { + // Let the checkers have a crack at the assume before the transfer functions + // get their turn. + for (CheckersOrdered::iterator I = CO->begin(), E = CO->end(); I!=E; ++I) { + + // If any checker declares the state infeasible (or if it starts that + // way), bail out. + if (!state) + return NULL; + + Checker *C = I->second; + bool respondsToCallback = true; + + state = C->evalAssume(state, cond, assumption, &respondsToCallback); + + // Check if we're building the cache of checkers that care about + // assumptions. + if (NewCO.get() && respondsToCallback) + NewCO->push_back(*I); + } + + // If we got through all the checkers, and we built a list of those that + // care about assumptions, save it. + if (NewCO.get()) + CO_Ref = NewCO.take(); + } + + // If the state is infeasible at this point, bail out. + if (!state) + return NULL; + + return TF->evalAssume(state, cond, assumption); +} + +bool ExprEngine::WantsRegionChangeUpdate(const GRState* state) { + CallbackTag K = GetCallbackTag(EvalRegionChangesCallback); + CheckersOrdered *CO = COCache[K]; + + if (!CO) + CO = &Checkers; + + for (CheckersOrdered::iterator I = CO->begin(), E = CO->end(); I != E; ++I) { + Checker *C = I->second; + if (C->WantsRegionChangeUpdate(state)) + return true; + } + + return false; +} + +const GRState * +ExprEngine::ProcessRegionChanges(const GRState *state, + const MemRegion * const *Begin, + const MemRegion * const *End) { + // FIXME: Most of this method is copy-pasted from ProcessAssume. + + // Determine if we already have a cached 'CheckersOrdered' vector + // specifically tailored for processing region changes. This + // can reduce the number of checkers actually called. + CheckersOrdered *CO = &Checkers; + llvm::OwningPtr<CheckersOrdered> NewCO; + + CallbackTag K = GetCallbackTag(EvalRegionChangesCallback); + CheckersOrdered *& CO_Ref = COCache[K]; + + if (!CO_Ref) { + // If we have no previously cached CheckersOrdered vector for this + // callback, then create one. + NewCO.reset(new CheckersOrdered); + } + else { + // Use the already cached set. + CO = CO_Ref; + } + + // If there are no checkers, just return the state as is. + if (CO->empty()) + return state; + + for (CheckersOrdered::iterator I = CO->begin(), E = CO->end(); I != E; ++I) { + // If any checker declares the state infeasible (or if it starts that way), + // bail out. + if (!state) + return NULL; + + Checker *C = I->second; + bool respondsToCallback = true; + + state = C->EvalRegionChanges(state, Begin, End, &respondsToCallback); + + // See if we're building a cache of checkers that care about region changes. + if (NewCO.get() && respondsToCallback) + NewCO->push_back(*I); + } + + // If we got through all the checkers, and we built a list of those that + // care about region changes, save it. + if (NewCO.get()) + CO_Ref = NewCO.take(); + + return state; +} + +void ExprEngine::ProcessEndWorklist(bool hasWorkRemaining) { + for (CheckersOrdered::iterator I = Checkers.begin(), E = Checkers.end(); + I != E; ++I) { + I->second->VisitEndAnalysis(G, BR, *this); + } +} + +void ExprEngine::ProcessElement(const CFGElement E, + StmtNodeBuilder& builder) { + switch (E.getKind()) { + case CFGElement::Statement: + ProcessStmt(E.getAs<CFGStmt>(), builder); + break; + case CFGElement::Initializer: + ProcessInitializer(E.getAs<CFGInitializer>(), builder); + break; + case CFGElement::ImplicitDtor: + ProcessImplicitDtor(E.getAs<CFGImplicitDtor>(), builder); + break; + default: + // Suppress compiler warning. + llvm_unreachable("Unexpected CFGElement kind."); + } +} + +void ExprEngine::ProcessStmt(const CFGStmt S, StmtNodeBuilder& builder) { + currentStmt = S.getStmt(); + PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), + currentStmt->getLocStart(), + "Error evaluating statement"); + + Builder = &builder; + EntryNode = builder.getBasePredecessor(); + + // Create the cleaned state. + const LocationContext *LC = EntryNode->getLocationContext(); + SymbolReaper SymReaper(LC, currentStmt, SymMgr); + + if (AMgr.shouldPurgeDead()) { + const GRState *St = EntryNode->getState(); + + for (CheckersOrdered::iterator I = Checkers.begin(), E = Checkers.end(); + I != E; ++I) { + Checker *checker = I->second; + checker->MarkLiveSymbols(St, SymReaper); + } + + const StackFrameContext *SFC = LC->getCurrentStackFrame(); + CleanedState = StateMgr.RemoveDeadBindings(St, SFC, SymReaper); + } else { + CleanedState = EntryNode->getState(); + } + + // Process any special transfer function for dead symbols. + ExplodedNodeSet Tmp; + + if (!SymReaper.hasDeadSymbols()) + Tmp.Add(EntryNode); + else { + SaveAndRestore<bool> OldSink(Builder->BuildSinks); + SaveOr OldHasGen(Builder->HasGeneratedNode); + + SaveAndRestore<bool> OldPurgeDeadSymbols(Builder->PurgingDeadSymbols); + Builder->PurgingDeadSymbols = true; + + // FIXME: This should soon be removed. + ExplodedNodeSet Tmp2; + getTF().evalDeadSymbols(Tmp2, *this, *Builder, EntryNode, + CleanedState, SymReaper); + + if (Checkers.empty()) + Tmp.insert(Tmp2); + else { + ExplodedNodeSet Tmp3; + ExplodedNodeSet *SrcSet = &Tmp2; + for (CheckersOrdered::iterator I = Checkers.begin(), E = Checkers.end(); + I != E; ++I) { + ExplodedNodeSet *DstSet = 0; + if (I+1 == E) + DstSet = &Tmp; + else { + DstSet = (SrcSet == &Tmp2) ? &Tmp3 : &Tmp2; + DstSet->clear(); + } + + void *tag = I->first; + Checker *checker = I->second; + for (ExplodedNodeSet::iterator NI = SrcSet->begin(), NE = SrcSet->end(); + NI != NE; ++NI) + checker->GR_evalDeadSymbols(*DstSet, *Builder, *this, currentStmt, + *NI, SymReaper, tag); + SrcSet = DstSet; + } + } + + if (!Builder->BuildSinks && !Builder->HasGeneratedNode) + Tmp.Add(EntryNode); + } + + bool HasAutoGenerated = false; + + for (ExplodedNodeSet::iterator I=Tmp.begin(), E=Tmp.end(); I!=E; ++I) { + ExplodedNodeSet Dst; + + // Set the cleaned state. + Builder->SetCleanedState(*I == EntryNode ? CleanedState : GetState(*I)); + + // Visit the statement. + Visit(currentStmt, *I, Dst); + + // Do we need to auto-generate a node? We only need to do this to generate + // a node with a "cleaned" state; CoreEngine will actually handle + // auto-transitions for other cases. + if (Dst.size() == 1 && *Dst.begin() == EntryNode + && !Builder->HasGeneratedNode && !HasAutoGenerated) { + HasAutoGenerated = true; + builder.generateNode(currentStmt, GetState(EntryNode), *I); + } + } + + // NULL out these variables to cleanup. + CleanedState = NULL; + EntryNode = NULL; + + currentStmt = 0; + + Builder = NULL; +} + +void ExprEngine::ProcessInitializer(const CFGInitializer Init, + StmtNodeBuilder &builder) { + // We don't set EntryNode and currentStmt. And we don't clean up state. + const CXXCtorInitializer *BMI = Init.getInitializer(); + + ExplodedNode *Pred = builder.getBasePredecessor(); + const LocationContext *LC = Pred->getLocationContext(); + + if (BMI->isAnyMemberInitializer()) { + ExplodedNodeSet Dst; + + // Evaluate the initializer. + Visit(BMI->getInit(), Pred, Dst); + + for (ExplodedNodeSet::iterator I = Dst.begin(), E = Dst.end(); I != E; ++I){ + ExplodedNode *Pred = *I; + const GRState *state = Pred->getState(); + + const FieldDecl *FD = BMI->getAnyMember(); + const RecordDecl *RD = FD->getParent(); + const CXXThisRegion *ThisR = getCXXThisRegion(cast<CXXRecordDecl>(RD), + cast<StackFrameContext>(LC)); + + SVal ThisV = state->getSVal(ThisR); + SVal FieldLoc = state->getLValue(FD, ThisV); + SVal InitVal = state->getSVal(BMI->getInit()); + state = state->bindLoc(FieldLoc, InitVal); + + // Use a custom node building process. + PostInitializer PP(BMI, LC); + // Builder automatically add the generated node to the deferred set, + // which are processed in the builder's dtor. + builder.generateNode(PP, state, Pred); + } + } +} + +void ExprEngine::ProcessImplicitDtor(const CFGImplicitDtor D, + StmtNodeBuilder &builder) { + Builder = &builder; + + switch (D.getDtorKind()) { + case CFGElement::AutomaticObjectDtor: + ProcessAutomaticObjDtor(cast<CFGAutomaticObjDtor>(D), builder); + break; + case CFGElement::BaseDtor: + ProcessBaseDtor(cast<CFGBaseDtor>(D), builder); + break; + case CFGElement::MemberDtor: + ProcessMemberDtor(cast<CFGMemberDtor>(D), builder); + break; + case CFGElement::TemporaryDtor: + ProcessTemporaryDtor(cast<CFGTemporaryDtor>(D), builder); + break; + default: + llvm_unreachable("Unexpected dtor kind."); + } +} + +void ExprEngine::ProcessAutomaticObjDtor(const CFGAutomaticObjDtor dtor, + StmtNodeBuilder &builder) { + ExplodedNode *pred = builder.getBasePredecessor(); + const GRState *state = pred->getState(); + const VarDecl *varDecl = dtor.getVarDecl(); + + QualType varType = varDecl->getType(); + + if (const ReferenceType *refType = varType->getAs<ReferenceType>()) + varType = refType->getPointeeType(); + + const CXXRecordDecl *recordDecl = varType->getAsCXXRecordDecl(); + assert(recordDecl && "get CXXRecordDecl fail"); + const CXXDestructorDecl *dtorDecl = recordDecl->getDestructor(); + + Loc dest = state->getLValue(varDecl, pred->getLocationContext()); + + ExplodedNodeSet dstSet; + VisitCXXDestructor(dtorDecl, cast<loc::MemRegionVal>(dest).getRegion(), + dtor.getTriggerStmt(), pred, dstSet); +} + +void ExprEngine::ProcessBaseDtor(const CFGBaseDtor D, + StmtNodeBuilder &builder) { +} + +void ExprEngine::ProcessMemberDtor(const CFGMemberDtor D, + StmtNodeBuilder &builder) { +} + +void ExprEngine::ProcessTemporaryDtor(const CFGTemporaryDtor D, + StmtNodeBuilder &builder) { +} + +void ExprEngine::Visit(const Stmt* S, ExplodedNode* Pred, + ExplodedNodeSet& Dst) { + PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), + S->getLocStart(), + "Error evaluating statement"); + + // Expressions to ignore. + if (const Expr *Ex = dyn_cast<Expr>(S)) + S = Ex->IgnoreParens(); + + // FIXME: add metadata to the CFG so that we can disable + // this check when we KNOW that there is no block-level subexpression. + // The motivation is that this check requires a hashtable lookup. + + if (S != currentStmt && Pred->getLocationContext()->getCFG()->isBlkExpr(S)) { + Dst.Add(Pred); + return; + } + + switch (S->getStmtClass()) { + // C++ stuff we don't support yet. + case Stmt::CXXBindTemporaryExprClass: + case Stmt::CXXCatchStmtClass: + case Stmt::CXXDefaultArgExprClass: + case Stmt::CXXDependentScopeMemberExprClass: + case Stmt::ExprWithCleanupsClass: + case Stmt::CXXNullPtrLiteralExprClass: + case Stmt::CXXPseudoDestructorExprClass: + case Stmt::CXXTemporaryObjectExprClass: + case Stmt::CXXThrowExprClass: + case Stmt::CXXTryStmtClass: + case Stmt::CXXTypeidExprClass: + case Stmt::CXXUuidofExprClass: + case Stmt::CXXUnresolvedConstructExprClass: + case Stmt::CXXScalarValueInitExprClass: + case Stmt::DependentScopeDeclRefExprClass: + case Stmt::UnaryTypeTraitExprClass: + case Stmt::BinaryTypeTraitExprClass: + case Stmt::UnresolvedLookupExprClass: + case Stmt::UnresolvedMemberExprClass: + case Stmt::CXXNoexceptExprClass: + case Stmt::PackExpansionExprClass: + { + SaveAndRestore<bool> OldSink(Builder->BuildSinks); + Builder->BuildSinks = true; + MakeNode(Dst, S, Pred, GetState(Pred)); + break; + } + + case Stmt::ParenExprClass: + llvm_unreachable("ParenExprs already handled."); + // Cases that should never be evaluated simply because they shouldn't + // appear in the CFG. + case Stmt::BreakStmtClass: + case Stmt::CaseStmtClass: + case Stmt::CompoundStmtClass: + case Stmt::ContinueStmtClass: + case Stmt::DefaultStmtClass: + case Stmt::DoStmtClass: + case Stmt::GotoStmtClass: + case Stmt::IndirectGotoStmtClass: + case Stmt::LabelStmtClass: + case Stmt::NoStmtClass: + case Stmt::NullStmtClass: + case Stmt::SwitchCaseClass: + case Stmt::OpaqueValueExprClass: + llvm_unreachable("Stmt should not be in analyzer evaluation loop"); + break; + + case Stmt::GNUNullExprClass: { + MakeNode(Dst, S, Pred, GetState(Pred)->BindExpr(S, svalBuilder.makeNull())); + break; + } + + case Stmt::ObjCAtSynchronizedStmtClass: + VisitObjCAtSynchronizedStmt(cast<ObjCAtSynchronizedStmt>(S), Pred, Dst); + break; + + // Cases not handled yet; but will handle some day. + case Stmt::DesignatedInitExprClass: + case Stmt::ExtVectorElementExprClass: + case Stmt::ImaginaryLiteralClass: + case Stmt::ImplicitValueInitExprClass: + case Stmt::ObjCAtCatchStmtClass: + case Stmt::ObjCAtFinallyStmtClass: + case Stmt::ObjCAtTryStmtClass: + case Stmt::ObjCEncodeExprClass: + case Stmt::ObjCIsaExprClass: + case Stmt::ObjCPropertyRefExprClass: + case Stmt::ObjCProtocolExprClass: + case Stmt::ObjCSelectorExprClass: + case Stmt::ObjCStringLiteralClass: + case Stmt::ParenListExprClass: + case Stmt::PredefinedExprClass: + case Stmt::ShuffleVectorExprClass: + case Stmt::VAArgExprClass: + // Fall through. + + // Cases we intentionally don't evaluate, since they don't need + // to be explicitly evaluated. + case Stmt::AddrLabelExprClass: + case Stmt::IntegerLiteralClass: + case Stmt::CharacterLiteralClass: + case Stmt::CXXBoolLiteralExprClass: + case Stmt::FloatingLiteralClass: + case Stmt::SizeOfPackExprClass: + Dst.Add(Pred); // No-op. Simply propagate the current state unchanged. + break; + + case Stmt::ArraySubscriptExprClass: + VisitLvalArraySubscriptExpr(cast<ArraySubscriptExpr>(S), Pred, Dst); + break; + + case Stmt::AsmStmtClass: + VisitAsmStmt(cast<AsmStmt>(S), Pred, Dst); + break; + + case Stmt::BlockDeclRefExprClass: { + const BlockDeclRefExpr *BE = cast<BlockDeclRefExpr>(S); + VisitCommonDeclRefExpr(BE, BE->getDecl(), Pred, Dst); + break; + } + + case Stmt::BlockExprClass: + VisitBlockExpr(cast<BlockExpr>(S), Pred, Dst); + break; + + case Stmt::BinaryOperatorClass: { + const BinaryOperator* B = cast<BinaryOperator>(S); + if (B->isLogicalOp()) { + VisitLogicalExpr(B, Pred, Dst); + break; + } + else if (B->getOpcode() == BO_Comma) { + const GRState* state = GetState(Pred); + MakeNode(Dst, B, Pred, state->BindExpr(B, state->getSVal(B->getRHS()))); + break; + } + + if (AMgr.shouldEagerlyAssume() && + (B->isRelationalOp() || B->isEqualityOp())) { + ExplodedNodeSet Tmp; + VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Tmp); + evalEagerlyAssume(Dst, Tmp, cast<Expr>(S)); + } + else + VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Dst); + + break; + } + + case Stmt::CallExprClass: { + const CallExpr* C = cast<CallExpr>(S); + VisitCall(C, Pred, C->arg_begin(), C->arg_end(), Dst); + break; + } + + case Stmt::CXXConstructExprClass: { + const CXXConstructExpr *C = cast<CXXConstructExpr>(S); + // For block-level CXXConstructExpr, we don't have a destination region. + // Let VisitCXXConstructExpr() create one. + VisitCXXConstructExpr(C, 0, Pred, Dst); + break; + } + + case Stmt::CXXMemberCallExprClass: { + const CXXMemberCallExpr *MCE = cast<CXXMemberCallExpr>(S); + VisitCXXMemberCallExpr(MCE, Pred, Dst); + break; + } + + case Stmt::CXXOperatorCallExprClass: { + const CXXOperatorCallExpr *C = cast<CXXOperatorCallExpr>(S); + VisitCXXOperatorCallExpr(C, Pred, Dst); + break; + } + + case Stmt::CXXNewExprClass: { + const CXXNewExpr *NE = cast<CXXNewExpr>(S); + VisitCXXNewExpr(NE, Pred, Dst); + break; + } + + case Stmt::CXXDeleteExprClass: { + const CXXDeleteExpr *CDE = cast<CXXDeleteExpr>(S); + VisitCXXDeleteExpr(CDE, Pred, Dst); + break; + } + // FIXME: ChooseExpr is really a constant. We need to fix + // the CFG do not model them as explicit control-flow. + + case Stmt::ChooseExprClass: { // __builtin_choose_expr + const ChooseExpr* C = cast<ChooseExpr>(S); + VisitGuardedExpr(C, C->getLHS(), C->getRHS(), Pred, Dst); + break; + } + + case Stmt::CompoundAssignOperatorClass: + VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Dst); + break; + + case Stmt::CompoundLiteralExprClass: + VisitCompoundLiteralExpr(cast<CompoundLiteralExpr>(S), Pred, Dst); + break; + + case Stmt::ConditionalOperatorClass: { // '?' operator + const ConditionalOperator* C = cast<ConditionalOperator>(S); + VisitGuardedExpr(C, C->getLHS(), C->getRHS(), Pred, Dst); + break; + } + + case Stmt::CXXThisExprClass: + VisitCXXThisExpr(cast<CXXThisExpr>(S), Pred, Dst); + break; + + case Stmt::DeclRefExprClass: { + const DeclRefExpr *DE = cast<DeclRefExpr>(S); + VisitCommonDeclRefExpr(DE, DE->getDecl(), Pred, Dst); + break; + } + + case Stmt::DeclStmtClass: + VisitDeclStmt(cast<DeclStmt>(S), Pred, Dst); + break; + + case Stmt::ForStmtClass: + // This case isn't for branch processing, but for handling the + // initialization of a condition variable. + VisitCondInit(cast<ForStmt>(S)->getConditionVariable(), S, Pred, Dst); + break; + + case Stmt::ImplicitCastExprClass: + case Stmt::CStyleCastExprClass: + case Stmt::CXXStaticCastExprClass: + case Stmt::CXXDynamicCastExprClass: + case Stmt::CXXReinterpretCastExprClass: + case Stmt::CXXConstCastExprClass: + case Stmt::CXXFunctionalCastExprClass: { + const CastExpr* C = cast<CastExpr>(S); + VisitCast(C, C->getSubExpr(), Pred, Dst); + break; + } + + case Stmt::IfStmtClass: + // This case isn't for branch processing, but for handling the + // initialization of a condition variable. + VisitCondInit(cast<IfStmt>(S)->getConditionVariable(), S, Pred, Dst); + break; + + case Stmt::InitListExprClass: + VisitInitListExpr(cast<InitListExpr>(S), Pred, Dst); + break; + + case Stmt::MemberExprClass: + VisitMemberExpr(cast<MemberExpr>(S), Pred, Dst); + break; + case Stmt::ObjCIvarRefExprClass: + VisitLvalObjCIvarRefExpr(cast<ObjCIvarRefExpr>(S), Pred, Dst); + break; + + case Stmt::ObjCForCollectionStmtClass: + VisitObjCForCollectionStmt(cast<ObjCForCollectionStmt>(S), Pred, Dst); + break; + + case Stmt::ObjCMessageExprClass: + VisitObjCMessageExpr(cast<ObjCMessageExpr>(S), Pred, Dst); + break; + + case Stmt::ObjCAtThrowStmtClass: { + // FIXME: This is not complete. We basically treat @throw as + // an abort. + SaveAndRestore<bool> OldSink(Builder->BuildSinks); + Builder->BuildSinks = true; + MakeNode(Dst, S, Pred, GetState(Pred)); + break; + } + + case Stmt::ReturnStmtClass: + VisitReturnStmt(cast<ReturnStmt>(S), Pred, Dst); + break; + + case Stmt::OffsetOfExprClass: + VisitOffsetOfExpr(cast<OffsetOfExpr>(S), Pred, Dst); + break; + + case Stmt::SizeOfAlignOfExprClass: + VisitSizeOfAlignOfExpr(cast<SizeOfAlignOfExpr>(S), Pred, Dst); + break; + + |