summaryrefslogtreecommitdiffstats
path: root/lib/StaticAnalyzer/Core
diff options
context:
space:
mode:
Diffstat (limited to 'lib/StaticAnalyzer/Core')
-rw-r--r--lib/StaticAnalyzer/Core/AggExprVisitor.cpp2
-rw-r--r--lib/StaticAnalyzer/Core/BasicStore.cpp15
-rw-r--r--lib/StaticAnalyzer/Core/BasicValueFactory.cpp8
-rw-r--r--lib/StaticAnalyzer/Core/BugReporter.cpp14
-rw-r--r--lib/StaticAnalyzer/Core/CFRefCount.cpp63
-rw-r--r--lib/StaticAnalyzer/Core/CMakeLists.txt9
-rw-r--r--lib/StaticAnalyzer/Core/CXXExprEngine.cpp300
-rw-r--r--lib/StaticAnalyzer/Core/CheckerContext.cpp (renamed from lib/StaticAnalyzer/Core/Checker.cpp)10
-rw-r--r--lib/StaticAnalyzer/Core/CheckerManager.cpp97
-rw-r--r--lib/StaticAnalyzer/Core/CoreEngine.cpp23
-rw-r--r--lib/StaticAnalyzer/Core/Environment.cpp19
-rw-r--r--lib/StaticAnalyzer/Core/ExplodedGraph.cpp2
-rw-r--r--lib/StaticAnalyzer/Core/ExprEngine.cpp3217
-rw-r--r--lib/StaticAnalyzer/Core/FlatStore.cpp13
-rw-r--r--lib/StaticAnalyzer/Core/ObjCMessage.cpp56
-rw-r--r--lib/StaticAnalyzer/Core/RegionStore.cpp77
-rw-r--r--lib/StaticAnalyzer/Core/SValBuilder.cpp139
-rw-r--r--lib/StaticAnalyzer/Core/SimpleConstraintManager.cpp1
-rw-r--r--lib/StaticAnalyzer/Core/SimpleSValBuilder.cpp30
-rw-r--r--lib/StaticAnalyzer/Core/Store.cpp4
20 files changed, 3804 insertions, 295 deletions
diff --git a/lib/StaticAnalyzer/Core/AggExprVisitor.cpp b/lib/StaticAnalyzer/Core/AggExprVisitor.cpp
index e80cf9b..901190d 100644
--- a/lib/StaticAnalyzer/Core/AggExprVisitor.cpp
+++ b/lib/StaticAnalyzer/Core/AggExprVisitor.cpp
@@ -60,7 +60,7 @@ void AggExprVisitor::VisitCXXConstructExpr(CXXConstructExpr *E) {
}
void AggExprVisitor::VisitCXXMemberCallExpr(CXXMemberCallExpr *E) {
- Eng.VisitCXXMemberCallExpr(E, Pred, DstSet);
+ Eng.Visit(E, Pred, DstSet);
}
void ExprEngine::VisitAggExpr(const Expr *E, const MemRegion *Dest,
diff --git a/lib/StaticAnalyzer/Core/BasicStore.cpp b/lib/StaticAnalyzer/Core/BasicStore.cpp
index 98365e7..4faa84c 100644
--- a/lib/StaticAnalyzer/Core/BasicStore.cpp
+++ b/lib/StaticAnalyzer/Core/BasicStore.cpp
@@ -429,12 +429,15 @@ StoreRef BasicStoreManager::getInitialStore(const LocationContext *InitLoc) {
}
if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(InitLoc->getDecl())) {
- // For C++ methods add symbolic region for 'this' in initial stack frame.
- QualType ThisT = MD->getThisType(StateMgr.getContext());
- MemRegionManager &RegMgr = svalBuilder.getRegionManager();
- const CXXThisRegion *ThisR = RegMgr.getCXXThisRegion(ThisT, InitLoc);
- SVal ThisV = svalBuilder.getRegionValueSymbolVal(ThisR);
- St = Bind(St.getStore(), svalBuilder.makeLoc(ThisR), ThisV);
+ // For C++ non-static member variables, add a symbolic region for 'this' in
+ // the initial stack frame.
+ if (MD->isInstance()) {
+ QualType ThisT = MD->getThisType(StateMgr.getContext());
+ MemRegionManager &RegMgr = svalBuilder.getRegionManager();
+ const CXXThisRegion *ThisR = RegMgr.getCXXThisRegion(ThisT, InitLoc);
+ SVal ThisV = svalBuilder.getRegionValueSymbolVal(ThisR);
+ St = Bind(St.getStore(), svalBuilder.makeLoc(ThisR), ThisV);
+ }
}
return St;
diff --git a/lib/StaticAnalyzer/Core/BasicValueFactory.cpp b/lib/StaticAnalyzer/Core/BasicValueFactory.cpp
index 6315d83..ae8a04c 100644
--- a/lib/StaticAnalyzer/Core/BasicValueFactory.cpp
+++ b/lib/StaticAnalyzer/Core/BasicValueFactory.cpp
@@ -14,6 +14,7 @@
//===----------------------------------------------------------------------===//
#include "clang/StaticAnalyzer/Core/PathSensitive/BasicValueFactory.h"
+#include "clang/StaticAnalyzer/Core/PathSensitive/Store.h"
using namespace clang;
using namespace ento;
@@ -25,8 +26,9 @@ void CompoundValData::Profile(llvm::FoldingSetNodeID& ID, QualType T,
}
void LazyCompoundValData::Profile(llvm::FoldingSetNodeID& ID,
- const void *store,const TypedRegion *region) {
- ID.AddPointer(store);
+ const StoreRef &store,
+ const TypedRegion *region) {
+ ID.AddPointer(store.getStore());
ID.AddPointer(region);
}
@@ -124,7 +126,7 @@ BasicValueFactory::getCompoundValData(QualType T,
}
const LazyCompoundValData*
-BasicValueFactory::getLazyCompoundValData(const void *store,
+BasicValueFactory::getLazyCompoundValData(const StoreRef &store,
const TypedRegion *region) {
llvm::FoldingSetNodeID ID;
LazyCompoundValData::Profile(ID, store, region);
diff --git a/lib/StaticAnalyzer/Core/BugReporter.cpp b/lib/StaticAnalyzer/Core/BugReporter.cpp
index 672982a..8b5d383 100644
--- a/lib/StaticAnalyzer/Core/BugReporter.cpp
+++ b/lib/StaticAnalyzer/Core/BugReporter.cpp
@@ -432,7 +432,7 @@ public:
else if (const DeclStmt* DS = dyn_cast<DeclStmt>(S)) {
// FIXME: Eventually CFGs won't have DeclStmts. Right now we
// assume that each DeclStmt has a single Decl. This invariant
- // holds by contruction in the CFG.
+ // holds by construction in the CFG.
VD = dyn_cast<VarDecl>(*DS->decl_begin());
}
@@ -859,7 +859,8 @@ class EdgeBuilder {
default:
break;
case Stmt::ParenExprClass:
- S = cast<ParenExpr>(S)->IgnoreParens();
+ case Stmt::GenericSelectionExprClass:
+ S = cast<Expr>(S)->IgnoreParens();
firstCharOnly = true;
continue;
case Stmt::BinaryConditionalOperatorClass:
@@ -1170,13 +1171,14 @@ static void GenerateExtensivePathDiagnostic(PathDiagnostic& PD,
}
if (const BlockEntrance *BE = dyn_cast<BlockEntrance>(&P)) {
- if (CFGStmt S = BE->getFirstElement().getAs<CFGStmt>()) {
- if (IsControlFlowExpr(S)) {
+ if (const CFGStmt *S = BE->getFirstElement().getAs<CFGStmt>()) {
+ const Stmt *stmt = S->getStmt();
+ if (IsControlFlowExpr(stmt)) {
// Add the proper context for '&&', '||', and '?'.
- EB.addContext(S);
+ EB.addContext(stmt);
}
else
- EB.addExtendedContext(PDB.getEnclosingStmtLocation(S).asStmt());
+ EB.addExtendedContext(PDB.getEnclosingStmtLocation(stmt).asStmt());
}
break;
diff --git a/lib/StaticAnalyzer/Core/CFRefCount.cpp b/lib/StaticAnalyzer/Core/CFRefCount.cpp
index b3721d7..d9b1ce8 100644
--- a/lib/StaticAnalyzer/Core/CFRefCount.cpp
+++ b/lib/StaticAnalyzer/Core/CFRefCount.cpp
@@ -12,7 +12,11 @@
//
//===----------------------------------------------------------------------===//
+#include "clang/StaticAnalyzer/Core/Checker.h"
+#include "clang/StaticAnalyzer/Core/CheckerManager.h"
+#include "clang/StaticAnalyzer/Core/PathSensitive/CheckerContext.h"
#include "clang/AST/DeclObjC.h"
+#include "clang/AST/DeclCXX.h"
#include "clang/AST/StmtVisitor.h"
#include "clang/Basic/LangOptions.h"
#include "clang/Basic/SourceManager.h"
@@ -20,7 +24,6 @@
#include "clang/StaticAnalyzer/Core/BugReporter/PathDiagnostic.h"
#include "clang/StaticAnalyzer/Checkers/LocalCheckers.h"
#include "clang/Analysis/DomainSpecific/CocoaConventions.h"
-#include "clang/StaticAnalyzer/Core/PathSensitive/CheckerVisitor.h"
#include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngineBuilders.h"
#include "clang/StaticAnalyzer/Core/PathSensitive/GRStateTrait.h"
#include "clang/StaticAnalyzer/Core/PathSensitive/TransferFuncs.h"
@@ -1198,7 +1201,7 @@ RetainSummaryManager::updateSummaryFromAnnotations(RetainSummary &Summ,
// Effects on the parameters.
unsigned parm_idx = 0;
for (FunctionDecl::param_const_iterator pi = FD->param_begin(),
- pe = FD->param_end(); pi != pe; ++pi) {
+ pe = FD->param_end(); pi != pe; ++pi, ++parm_idx) {
const ParmVarDecl *pd = *pi;
if (pd->getAttr<NSConsumedAttr>()) {
if (!GCEnabled)
@@ -2428,7 +2431,7 @@ CFRefLeakReport::CFRefLeakReport(CFRefBug& D, const CFRefCount &tf,
SymbolRef sym, ExprEngine& Eng)
: CFRefReport(D, tf, n, sym) {
- // Most bug reports are cached at the location where they occured.
+ // Most bug reports are cached at the location where they occurred.
// With leaks, we want to unique them by the location where they were
// allocated, and only report a single path. To do this, we need to find
// the allocation site of a piece of tracked memory, which we do via a
@@ -2526,6 +2529,14 @@ void CFRefCount::evalSummary(ExplodedNodeSet& Dst,
RegionsToInvalidate.push_back(region);
}
+ // Invalidate all instance variables for the callee of a C++ method call.
+ // FIXME: We should be able to do better with inter-procedural analysis.
+ // FIXME: we can probably do better for const versus non-const methods.
+ if (callOrMsg.isCXXCall()) {
+ if (const MemRegion *callee = callOrMsg.getCXXCallee().getAsRegion())
+ RegionsToInvalidate.push_back(callee);
+ }
+
for (unsigned idx = 0, e = callOrMsg.getNumArgs(); idx != e; ++idx) {
SVal V = callOrMsg.getArgSValAsScalarOrLoc(idx);
SymbolRef Sym = V.getAsLocSymbol();
@@ -2678,11 +2689,14 @@ void CFRefCount::evalSummary(ExplodedNodeSet& Dst,
// FIXME: We eventually should handle structs and other compound types
// that are returned by value.
- QualType T = callOrMsg.getResultType(Eng.getContext());
- if (Loc::isLocType(T) || (T->isIntegerType() && T->isScalarType())) {
+ // Use the result type from callOrMsg as it automatically adjusts
+ // for methods/functions that return references.
+ QualType resultTy = callOrMsg.getResultType(Eng.getContext());
+ if (Loc::isLocType(resultTy) ||
+ (resultTy->isIntegerType() && resultTy->isScalarType())) {
unsigned Count = Builder.getCurrentBlockCount();
SValBuilder &svalBuilder = Eng.getSValBuilder();
- SVal X = svalBuilder.getConjuredSymbolVal(NULL, Ex, T, Count);
+ SVal X = svalBuilder.getConjuredSymbolVal(NULL, Ex, resultTy, Count);
state = state->BindExpr(Ex, X, false);
}
@@ -2709,9 +2723,12 @@ void CFRefCount::evalSummary(ExplodedNodeSet& Dst,
unsigned Count = Builder.getCurrentBlockCount();
SValBuilder &svalBuilder = Eng.getSValBuilder();
SymbolRef Sym = svalBuilder.getConjuredSymbol(Ex, Count);
- QualType RetT = GetReturnType(Ex, svalBuilder.getContext());
+
+ // Use the result type from callOrMsg as it automatically adjusts
+ // for methods/functions that return references.
+ QualType resultTy = callOrMsg.getResultType(Eng.getContext());
state = state->set<RefBindings>(Sym, RefVal::makeOwned(RE.getObjKind(),
- RetT));
+ resultTy));
state = state->BindExpr(Ex, svalBuilder.makeLoc(Sym), false);
// FIXME: Add a flag to the checker where allocations are assumed to
@@ -2764,11 +2781,17 @@ void CFRefCount::evalCall(ExplodedNodeSet& Dst,
if (dyn_cast_or_null<BlockDataRegion>(L.getAsRegion())) {
Summ = Summaries.getPersistentStopSummary();
}
- else {
- const FunctionDecl* FD = L.getAsFunctionDecl();
- Summ = !FD ? Summaries.getDefaultSummary() :
- Summaries.getSummary(FD);
+ else if (const FunctionDecl* FD = L.getAsFunctionDecl()) {
+ Summ = Summaries.getSummary(FD);
+ }
+ else if (const CXXMemberCallExpr *me = dyn_cast<CXXMemberCallExpr>(CE)) {
+ if (const CXXMethodDecl *MD = me->getMethodDecl())
+ Summ = Summaries.getSummary(MD);
+ else
+ Summ = Summaries.getDefaultSummary();
}
+ else
+ Summ = Summaries.getDefaultSummary();
assert(Summ);
evalSummary(Dst, Eng, Builder, CE,
@@ -3395,19 +3418,15 @@ void CFRefCount::ProcessNonLeakError(ExplodedNodeSet& Dst,
namespace {
class RetainReleaseChecker
- : public CheckerVisitor<RetainReleaseChecker> {
- CFRefCount *TF;
+ : public Checker< check::PostStmt<BlockExpr> > {
public:
- RetainReleaseChecker(CFRefCount *tf) : TF(tf) {}
- static void* getTag() { static int x = 0; return &x; }
-
- void PostVisitBlockExpr(CheckerContext &C, const BlockExpr *BE);
+ void checkPostStmt(const BlockExpr *BE, CheckerContext &C) const;
};
} // end anonymous namespace
-void RetainReleaseChecker::PostVisitBlockExpr(CheckerContext &C,
- const BlockExpr *BE) {
+void RetainReleaseChecker::checkPostStmt(const BlockExpr *BE,
+ CheckerContext &C) const {
// Scan the BlockDecRefExprs for any object the retain/release checker
// may be tracking.
@@ -3510,7 +3529,9 @@ void CFRefCount::RegisterChecks(ExprEngine& Eng) {
// Register the RetainReleaseChecker with the ExprEngine object.
// Functionality in CFRefCount will be migrated to RetainReleaseChecker
// over time.
- Eng.registerCheck(new RetainReleaseChecker(this));
+ // FIXME: HACK! Remove TransferFuncs and turn all of CFRefCount into fully
+ // using the checker mechanism.
+ Eng.getCheckerManager().registerChecker<RetainReleaseChecker>();
}
TransferFuncs* ento::MakeCFRefCountTF(ASTContext& Ctx, bool GCEnabled,
diff --git a/lib/StaticAnalyzer/Core/CMakeLists.txt b/lib/StaticAnalyzer/Core/CMakeLists.txt
index 14c636c..089a5cc 100644
--- a/lib/StaticAnalyzer/Core/CMakeLists.txt
+++ b/lib/StaticAnalyzer/Core/CMakeLists.txt
@@ -8,18 +8,19 @@ add_clang_library(clangStaticAnalyzerCore
BasicConstraintManager.cpp
BasicStore.cpp
BasicValueFactory.cpp
+ BlockCounter.cpp
BugReporter.cpp
BugReporterVisitors.cpp
CFRefCount.cpp
- Checker.cpp
+ CXXExprEngine.cpp
+ CheckerContext.cpp
CheckerHelpers.cpp
CheckerManager.cpp
+ CoreEngine.cpp
Environment.cpp
ExplodedGraph.cpp
+ ExprEngine.cpp
FlatStore.cpp
- BlockCounter.cpp
- CXXExprEngine.cpp
- CoreEngine.cpp
GRState.cpp
HTMLDiagnostics.cpp
MemRegion.cpp
diff --git a/lib/StaticAnalyzer/Core/CXXExprEngine.cpp b/lib/StaticAnalyzer/Core/CXXExprEngine.cpp
index 56dfe8c..54cbca0 100644
--- a/lib/StaticAnalyzer/Core/CXXExprEngine.cpp
+++ b/lib/StaticAnalyzer/Core/CXXExprEngine.cpp
@@ -11,6 +11,7 @@
//
//===----------------------------------------------------------------------===//
+#include "clang/StaticAnalyzer/Core/CheckerManager.h"
#include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h"
#include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
#include "clang/AST/DeclCXX.h"
@@ -61,6 +62,34 @@ void ExprEngine::evalArguments(ConstExprIterator AI, ConstExprIterator AE,
}
}
+void ExprEngine::evalCallee(const CallExpr *callExpr,
+ const ExplodedNodeSet &src,
+ ExplodedNodeSet &dest) {
+
+ const Expr *callee = 0;
+
+ switch (callExpr->getStmtClass()) {
+ case Stmt::CXXMemberCallExprClass: {
+ // Evaluate the implicit object argument that is the recipient of the
+ // call.
+ callee = cast<CXXMemberCallExpr>(callExpr)->getImplicitObjectArgument();
+
+ // FIXME: handle member pointers.
+ if (!callee)
+ return;
+
+ break;
+ }
+ default: {
+ callee = callExpr->getCallee()->IgnoreParens();
+ break;
+ }
+ }
+
+ for (ExplodedNodeSet::iterator i = src.begin(), e = src.end(); i != e; ++i)
+ Visit(callee, *i, dest);
+}
+
const CXXThisRegion *ExprEngine::getCXXThisRegion(const CXXRecordDecl *D,
const StackFrameContext *SFC) {
const Type *T = D->getTypeForDecl();
@@ -95,50 +124,121 @@ void ExprEngine::CreateCXXTemporaryObject(const Expr *Ex, ExplodedNode *Pred,
}
void ExprEngine::VisitCXXConstructExpr(const CXXConstructExpr *E,
- const MemRegion *Dest,
- ExplodedNode *Pred,
- ExplodedNodeSet &Dst) {
- if (!Dest)
- Dest = svalBuilder.getRegionManager().getCXXTempObjectRegion(E,
- Pred->getLocationContext());
-
- if (E->isElidable()) {
- VisitAggExpr(E->getArg(0), Dest, Pred, Dst);
- return;
- }
+ const MemRegion *Dest,
+ ExplodedNode *Pred,
+ ExplodedNodeSet &destNodes) {
const CXXConstructorDecl *CD = E->getConstructor();
assert(CD);
-
+
+#if 0
if (!(CD->isThisDeclarationADefinition() && AMgr.shouldInlineCall()))
// FIXME: invalidate the object.
return;
-
+#endif
// Evaluate other arguments.
ExplodedNodeSet argsEvaluated;
const FunctionProtoType *FnType = CD->getType()->getAs<FunctionProtoType>();
evalArguments(E->arg_begin(), E->arg_end(), FnType, Pred, argsEvaluated);
- // The callee stack frame context used to create the 'this' parameter region.
- const StackFrameContext *SFC = AMgr.getStackFrame(CD,
- Pred->getLocationContext(),
- E, Builder->getBlock(),
- Builder->getIndex());
- const CXXThisRegion *ThisR =getCXXThisRegion(E->getConstructor()->getParent(),
- SFC);
-
- CallEnter Loc(E, SFC, Pred->getLocationContext());
- for (ExplodedNodeSet::iterator NI = argsEvaluated.begin(),
- NE = argsEvaluated.end(); NI != NE; ++NI) {
- const GRState *state = GetState(*NI);
- // Setup 'this' region, so that the ctor is evaluated on the object pointed
- // by 'Dest'.
- state = state->bindLoc(loc::MemRegionVal(ThisR), loc::MemRegionVal(Dest));
- ExplodedNode *N = Builder->generateNode(Loc, state, Pred);
- if (N)
- Dst.Add(N);
+#if 0
+ // Is the constructor elidable?
+ if (E->isElidable()) {
+ VisitAggExpr(E->getArg(0), destNodes, Pred, Dst);
+ // FIXME: this is here to force propagation if VisitAggExpr doesn't
+ if (destNodes.empty())
+ destNodes.Add(Pred);
+ return;
+ }
+#endif
+
+ // Perform the previsit of the constructor.
+ ExplodedNodeSet destPreVisit;
+ getCheckerManager().runCheckersForPreStmt(destPreVisit, argsEvaluated, E,
+ *this);
+
+ // Evaluate the constructor. Currently we don't now allow checker-specific
+ // implementations of specific constructors (as we do with ordinary
+ // function calls. We can re-evaluate this in the future.
+
+#if 0
+ // Inlining currently isn't fully implemented.
+
+ if (AMgr.shouldInlineCall()) {
+ if (!Dest)
+ Dest =
+ svalBuilder.getRegionManager().getCXXTempObjectRegion(E,
+ Pred->getLocationContext());
+
+ // The callee stack frame context used to create the 'this'
+ // parameter region.
+ const StackFrameContext *SFC =
+ AMgr.getStackFrame(CD, Pred->getLocationContext(),
+ E, Builder->getBlock(), Builder->getIndex());
+
+ // Create the 'this' region.
+ const CXXThisRegion *ThisR =
+ getCXXThisRegion(E->getConstructor()->getParent(), SFC);
+
+ CallEnter Loc(E, SFC, Pred->getLocationContext());
+
+
+ for (ExplodedNodeSet::iterator NI = argsEvaluated.begin(),
+ NE = argsEvaluated.end(); NI != NE; ++NI) {
+ const GRState *state = GetState(*NI);
+ // Setup 'this' region, so that the ctor is evaluated on the object pointed
+ // by 'Dest'.
+ state = state->bindLoc(loc::MemRegionVal(ThisR), loc::MemRegionVal(Dest));
+ if (ExplodedNode *N = Builder->generateNode(Loc, state, *NI))
+ destNodes.Add(N);
+ }
+ }
+#endif
+
+ // Default semantics: invalidate all regions passed as arguments.
+ llvm::SmallVector<const MemRegion*, 10> regionsToInvalidate;
+
+ // FIXME: We can have collisions on the conjured symbol if the
+ // expression *I also creates conjured symbols. We probably want
+ // to identify conjured symbols by an expression pair: the enclosing
+ // expression (the context) and the expression itself. This should
+ // disambiguate conjured symbols.
+ unsigned blockCount = Builder->getCurrentBlockCount();
+
+ // NOTE: Even if RegionsToInvalidate is empty, we must still invalidate
+ // global variables.
+ ExplodedNodeSet destCall;
+
+ for (ExplodedNodeSet::iterator
+ i = destPreVisit.begin(), e = destPreVisit.end();
+ i != e; ++i)
+ {
+ ExplodedNode *Pred = *i;
+ const GRState *state = GetState(Pred);
+
+ // Accumulate list of regions that are invalidated.
+ for (CXXConstructExpr::const_arg_iterator
+ ai = E->arg_begin(), ae = E->arg_end();
+ ai != ae; ++ai)
+ {
+ SVal val = state->getSVal(*ai);
+ if (const MemRegion *region = val.getAsRegion())
+ regionsToInvalidate.push_back(region);
+ }
+
+ // Invalidate the regions.
+ state = state->invalidateRegions(regionsToInvalidate.data(),
+ regionsToInvalidate.data() +
+ regionsToInvalidate.size(),
+ E, blockCount, 0,
+ /* invalidateGlobals = */ true);
+
+ Builder->MakeNode(destCall, E, Pred, state);
}
+
+ // Do the post visit.
+ getCheckerManager().runCheckersForPostStmt(destNodes, destCall, E, *this);
}
void ExprEngine::VisitCXXDestructor(const CXXDestructorDecl *DD,
@@ -165,105 +265,25 @@ void ExprEngine::VisitCXXDestructor(const CXXDestructorDecl *DD,
Dst.Add(N);
}
-void ExprEngine::VisitCXXMemberCallExpr(const CXXMemberCallExpr *MCE,
- ExplodedNode *Pred,
- ExplodedNodeSet &Dst) {
- // Get the method type.
- const FunctionProtoType *FnType =
- MCE->getCallee()->getType()->getAs<FunctionProtoType>();
- assert(FnType && "Method type not available");
-
- // Evaluate explicit arguments with a worklist.
- ExplodedNodeSet argsEvaluated;
- evalArguments(MCE->arg_begin(), MCE->arg_end(), FnType, Pred, argsEvaluated);
-
- // Evaluate the implicit object argument.
- ExplodedNodeSet AllargsEvaluated;
- const MemberExpr *ME = dyn_cast<MemberExpr>(MCE->getCallee()->IgnoreParens());
- if (!ME)
- return;
- Expr *ObjArgExpr = ME->getBase();
- for (ExplodedNodeSet::iterator I = argsEvaluated.begin(),
- E = argsEvaluated.end(); I != E; ++I) {
- Visit(ObjArgExpr, *I, AllargsEvaluated);
- }
-
- // Now evaluate the call itself.
- const CXXMethodDecl *MD = cast<CXXMethodDecl>(ME->getMemberDecl());
- assert(MD && "not a CXXMethodDecl?");
- evalMethodCall(MCE, MD, ObjArgExpr, Pred, AllargsEvaluated, Dst);
-}
-
-void ExprEngine::VisitCXXOperatorCallExpr(const CXXOperatorCallExpr *C,
- ExplodedNode *Pred,
- ExplodedNodeSet &Dst) {
- const CXXMethodDecl *MD = dyn_cast_or_null<CXXMethodDecl>(C->getCalleeDecl());
- if (!MD) {
- // If the operator doesn't represent a method call treat as regural call.
- VisitCall(C, Pred, C->arg_begin(), C->arg_end(), Dst);
- return;
- }
-
- // Determine the type of function we're calling (if available).
- const FunctionProtoType *Proto = NULL;
- QualType FnType = C->getCallee()->IgnoreParens()->getType();
- if (const PointerType *FnTypePtr = FnType->getAs<PointerType>())
- Proto = FnTypePtr->getPointeeType()->getAs<FunctionProtoType>();
-
- // Evaluate arguments treating the first one (object method is called on)
- // as alvalue.
- ExplodedNodeSet argsEvaluated;
- evalArguments(C->arg_begin(), C->arg_end(), Proto, Pred, argsEvaluated, true);
-
- // Now evaluate the call itself.
- evalMethodCall(C, MD, C->getArg(0), Pred, argsEvaluated, Dst);
-}
-
-void ExprEngine::evalMethodCall(const CallExpr *MCE, const CXXMethodDecl *MD,
- const Expr *ThisExpr, ExplodedNode *Pred,
- ExplodedNodeSet &Src, ExplodedNodeSet &Dst) {
- // Allow checkers to pre-visit the member call.
- ExplodedNodeSet PreVisitChecks;
- CheckerVisit(MCE, PreVisitChecks, Src, PreVisitStmtCallback);
-
- if (!(MD->isThisDeclarationADefinition() && AMgr.shouldInlineCall())) {
- // FIXME: conservative method call evaluation.
- CheckerVisit(MCE, Dst, PreVisitChecks, PostVisitStmtCallback);
- return;
- }
-
- const StackFrameContext *SFC = AMgr.getStackFrame(MD,
- Pred->getLocationContext(),
- MCE,
- Builder->getBlock(),
- Builder->getIndex());
- const CXXThisRegion *ThisR = getCXXThisRegion(MD, SFC);
- CallEnter Loc(MCE, SFC, Pred->getLocationContext());
- for (ExplodedNodeSet::iterator I = PreVisitChecks.begin(),
- E = PreVisitChecks.end(); I != E; ++I) {
- // Set up 'this' region.
- const GRState *state = GetState(*I);
- state = state->bindLoc(loc::MemRegionVal(ThisR), state->getSVal(ThisExpr));
- Dst.Add(Builder->generateNode(Loc, state, *I));
- }
-}
-
void ExprEngine::VisitCXXNewExpr(const CXXNewExpr *CNE, ExplodedNode *Pred,
ExplodedNodeSet &Dst) {
- if (CNE->isArray()) {
- // FIXME: allocating an array has not been handled.
- return;
- }
-
- unsigned Count = Builder->getCurrentBlockCount();
+
+ unsigned blockCount = Builder->getCurrentBlockCount();
DefinedOrUnknownSVal symVal =
- svalBuilder.getConjuredSymbolVal(NULL, CNE, CNE->getType(), Count);
- const MemRegion *NewReg = cast<loc::MemRegionVal>(symVal).getRegion();
-
+ svalBuilder.getConjuredSymbolVal(NULL, CNE, CNE->getType(), blockCount);
+ const MemRegion *NewReg = cast<loc::MemRegionVal>(symVal).getRegion();
QualType ObjTy = CNE->getType()->getAs<PointerType>()->getPointeeType();
-
const ElementRegion *EleReg =
- getStoreManager().GetElementZeroRegion(NewReg, ObjTy);
+ getStoreManager().GetElementZeroRegion(NewReg, ObjTy);
+
+ if (CNE->isArray()) {
+ // FIXME: allocating an array requires simulating the constructors.
+ // For now, just return a symbolicated region.
+ const GRState *state = GetState(Pred);
+ state = state->BindExpr(CNE, loc::MemRegionVal(EleReg));
+ MakeNode(Dst, CNE, Pred, state);
+ return;
+ }
// Evaluate constructor arguments.
const FunctionProtoType *FnType = NULL;
@@ -277,11 +297,39 @@ void ExprEngine::VisitCXXNewExpr(const CXXNewExpr *CNE, ExplodedNode *Pred,
// Initialize the object region and bind the 'new' expression.
for (ExplodedNodeSet::iterator I = argsEvaluated.begin(),
E = argsEvaluated.end(); I != E; ++I) {
+
const GRState *state = GetState(*I);
+
+ // Accumulate list of regions that are invalidated.
+ // FIXME: Eventually we should unify the logic for constructor
+ // processing in one place.
+ llvm::SmallVector<const MemRegion*, 10> regionsToInvalidate;
+ for (CXXNewExpr::const_arg_iterator
+ ai = CNE->constructor_arg_begin(), ae = CNE->constructor_arg_end();
+ ai != ae; ++ai)
+ {
+ SVal val = state->getSVal(*ai);
+ if (const MemRegion *region = val.getAsRegion())
+ regionsToInvalidate.push_back(region);
+ }
if (ObjTy->isRecordType()) {
- state = state->invalidateRegion(EleReg, CNE, Count);
+ regionsToInvalidate.push_back(EleReg);
+ // Invalidate the regions.
+ state = state->invalidateRegions(regionsToInvalidate.data(),
+ regionsToInvalidate.data() +
+ regionsToInvalidate.size(),
+ CNE, blockCount, 0,
+ /* invalidateGlobals = */ true);
+
} else {
+ // Invalidate the regions.
+ state = state->invalidateRegions(regionsToInvalidate.data(),
+ regionsToInvalidate.data() +
+ regionsToInvalidate.size(),
+ CNE, blockCount, 0,
+ /* invalidateGlobals = */ true);
+
if (CNE->hasInitializer()) {
SVal V = state->getSVal(*CNE->constructor_arg_begin());
state = state->bindLoc(loc::MemRegionVal(EleReg), V);
diff --git a/lib/StaticAnalyzer/Core/Checker.cpp b/lib/StaticAnalyzer/Core/CheckerContext.cpp
index a014eec..f6fb8f2 100644
--- a/lib/StaticAnalyzer/Core/Checker.cpp
+++ b/lib/StaticAnalyzer/Core/CheckerContext.cpp
@@ -1,4 +1,4 @@
-//== Checker.h - Abstract interface for checkers -----------------*- C++ -*--=//
+//== CheckerContext.cpp - Context info for path-sensitive checkers-----------=//
//
// The LLVM Compiler Infrastructure
//
@@ -7,17 +7,15 @@
//
//===----------------------------------------------------------------------===//
//
-// This file defines Checker and CheckerVisitor, classes used for creating
-// domain-specific checks.
+// This file defines CheckerContext that provides contextual info for
+// path-sensitive checkers.
//
//===----------------------------------------------------------------------===//
-#include "clang/StaticAnalyzer/Core/PathSensitive/Checker.h"
+#include "clang/StaticAnalyzer/Core/PathSensitive/CheckerContext.h"
using namespace clang;
using namespace ento;
-Checker::~Checker() {}
-
CheckerContext::~CheckerContext() {
// Do we need to autotransition? 'Dst' can get populated in a variety of
// ways, including 'addTransition()' adding the predecessor node to Dst
diff --git a/lib/StaticAnalyzer/Core/CheckerManager.cpp b/lib/StaticAnalyzer/Core/CheckerManager.cpp
index 75d331a..4a25490 100644
--- a/lib/StaticAnalyzer/Core/CheckerManager.cpp
+++ b/lib/StaticAnalyzer/Core/CheckerManager.cpp
@@ -20,6 +20,32 @@
using namespace clang;
using namespace ento;
+bool CheckerManager::hasPathSensitiveCheckers() const {
+ return !StmtCheckers.empty() ||
+ !PreObjCMessageCheckers.empty() ||
+ !PostObjCMessageCheckers.empty() ||
+ !LocationCheckers.empty() ||
+ !BindCheckers.empty() ||
+ !EndAnalysisCheckers.empty() ||
+ !EndPathCheckers.empty() ||
+ !BranchConditionCheckers.empty() ||
+ !LiveSymbolsCheckers.empty() ||
+ !DeadSymbolsCheckers.empty() ||
+ !RegionChangesCheckers.empty() ||
+ !EvalAssumeCheckers.empty() ||
+ !EvalCallCheckers.empty();
+}
+
+void CheckerManager::finishedCheckerRegistration() {
+#ifndef NDEBUG
+ // Make sure that for every event that has listeners, there is at least
+ // one dispatcher registered for it.
+ for (llvm::DenseMap<EventTag, EventInfo>::iterator
+ I = Events.begin(), E = Events.end(); I != E; ++I)
+ assert(I->second.HasDispatcher && "No dispatcher registered for an event");
+#endif
+}
+
//===----------------------------------------------------------------------===//
// Functions for running checkers for AST traversing..
//===----------------------------------------------------------------------===//
@@ -205,6 +231,40 @@ void CheckerManager::runCheckersForLocation(ExplodedNodeSet &Dst,
expandGraphWithCheckers(C, Dst, Src);
}
+namespace {
+ struct CheckBindContext {
+ typedef std::vector<CheckerManager::CheckBindFunc> CheckersTy;
+ const CheckersTy &Checkers;
+ SVal Loc;
+ SVal Val;
+ const Stmt *S;
+ ExprEngine &Eng;
+
+ CheckersTy::const_iterator checkers_begin() { return Checkers.begin(); }
+ CheckersTy::const_iterator checkers_end() { return Checkers.end(); }
+
+ CheckBindContext(const CheckersTy &checkers,
+ SVal loc, SVal val, const Stmt *s, ExprEngine &eng)
+ : Checkers(checkers), Loc(loc), Val(val), S(s), Eng(eng) { }
+
+ void runChecker(CheckerManager::CheckBindFunc checkFn,
+ ExplodedNodeSet &Dst, ExplodedNode *Pred) {
+ CheckerContext C(Dst, Eng.getBuilder(), Eng, Pred, checkFn.Checker,
+ ProgramPoint::PreStmtKind, 0, S);
+ checkFn(Loc, Val, C);
+ }
+ };
+}
+
+/// \brief Run checkers for binding of a value to a location.
+void CheckerManager::runCheckersForBind(ExplodedNodeSet &Dst,
+ const ExplodedNodeSet &Src,
+ SVal location, SVal val,
+ const Stmt *S, ExprEngine &Eng) {
+ CheckBindContext C(BindCheckers, location, val, S, Eng);
+ expandGraphWithCheckers(C, Dst, Src);
+}
+
void CheckerManager::runCheckersForEndAnalysis(ExplodedGraph &G,
BugReporter &BR,
ExprEngine &Eng) {
@@ -222,6 +282,16 @@ void CheckerManager::runCheckersForEndPath(EndOfFunctionNodeBuilder &B,
}
}
+/// \brief Run checkers for branch condition.
+void CheckerManager::runCheckersForBranchCondition(const Stmt *condition,
+ BranchNodeBuilder &B,
+ ExprEngine &Eng) {
+ for (unsigned i = 0, e = BranchConditionCheckers.size(); i != e; ++i) {
+ CheckBranchConditionFunc fn = BranchConditionCheckers[i];
+ fn(condition, B, Eng);
+ }
+}
+
/// \brief Run checkers for live symbols.
void CheckerManager::runCheckersForLiveSymbols(const GRState *state,
SymbolReaper &SymReaper) {
@@ -287,6 +357,20 @@ CheckerManager::runCheckersForRegionChanges(const GRState *state,
return state;
}
+/// \brief Run checkers for handling assumptions on symbolic values.
+const GRState *
+CheckerManager::runCheckersForEvalAssume(const GRState *state,
+ SVal Cond, bool Assumption) {
+ for (unsigned i = 0, e = EvalAssumeCheckers.size(); i != e; ++i) {
+ // If any checker declares the state infeasible (or if it starts that way),
+ // bail out.
+ if (!state)
+ return NULL;
+ state = EvalAssumeCheckers[i](state, Cond, Assumption);
+ }
+ return state;
+}
+
/// \brief Run checkers for evaluating a call.
/// Only one checker will evaluate the call.
void CheckerManager::runCheckersForEvalCall(ExplodedNodeSet &Dst,
@@ -371,6 +455,10 @@ void CheckerManager::_registerForLocation(CheckLocationFunc checkfn) {
LocationCheckers.push_back(checkfn);
}
+void CheckerManager::_registerForBind(CheckBindFunc checkfn) {
+ BindCheckers.push_back(checkfn);
+}
+
void CheckerManager::_registerForEndAnalysis(CheckEndAnalysisFunc checkfn) {
EndAnalysisCheckers.push_back(checkfn);
}
@@ -379,6 +467,11 @@ void CheckerManager::_registerForEndPath(CheckEndPathFunc checkfn) {
EndPathCheckers.push_back(checkfn);
}
+void CheckerManager::_registerForBranchCondition(
+ CheckBranchConditionFunc checkfn) {
+ BranchConditionCheckers.push_back(checkfn);
+}
+
void CheckerManager::_registerForLiveSymbols(CheckLiveSymbolsFunc checkfn) {
LiveSymbolsCheckers.push_back(checkfn);
}
@@ -393,6 +486,10 @@ void CheckerManager::_registerForRegionChanges(CheckRegionChangesFunc checkfn,
RegionChangesCheckers.push_back(info);
}
+void CheckerManager::_registerForEvalAssume(EvalAssumeFunc checkfn) {
+ EvalAssumeCheckers.push_back(checkfn);
+}
+
void CheckerManager::_registerForEvalCall(EvalCallFunc checkfn) {
EvalCallCheckers.push_back(checkfn);
}
diff --git a/lib/StaticAnalyzer/Core/CoreEngine.cpp b/lib/StaticAnalyzer/Core/CoreEngine.cpp
index 08a2068..34cd6e8 100644
--- a/lib/StaticAnalyzer/Core/CoreEngine.cpp
+++ b/lib/StaticAnalyzer/Core/CoreEngine.cpp
@@ -19,8 +19,6 @@
#include "clang/AST/Expr.h"
#include "llvm/Support/Casting.h"
#include "llvm/ADT/DenseMap.h"
-#include <vector>
-#include <queue>
using llvm::cast;
using llvm::isa;
@@ -310,7 +308,7 @@ void CoreEngine::HandleBlockEdge(const BlockEdge& L, ExplodedNode* Pred) {
for (llvm::SmallVectorImpl<ExplodedNode*>::const_iterator
I = nodeBuilder.sinks().begin(), E = nodeBuilder.sinks().end();
I != E; ++I) {
- blocksAborted.push_back(std::make_pair(L, *I));
+ blocksExhausted.push_back(std::make_pair(L, *I));
}
}
@@ -602,6 +600,25 @@ StmtNodeBuilder::generateNodeInternal(const ProgramPoint &Loc,
return NULL;
}
+// This function generate a new ExplodedNode but not a new branch(block edge).
+ExplodedNode* BranchNodeBuilder::generateNode(const Stmt* Condition,
+ const GRState* State) {
+ bool IsNew;
+
+ ExplodedNode* Succ
+ = Eng.G->getNode(PostCondition(Condition, Pred->getLocationContext()), State,
+ &IsNew);
+
+ Succ->addPredecessor(Pred, *Eng.G);
+
+ Pred = Succ;
+
+ if (IsNew)
+ return Succ;
+
+ return NULL;
+}
+
ExplodedNode* BranchNodeBuilder::generateNode(const GRState* State,
bool branch) {
diff --git a/lib/StaticAnalyzer/Core/Environment.cpp b/lib/StaticAnalyzer/Core/Environment.cpp
index 1bffa30..a00f9dc1 100644
--- a/lib/StaticAnalyzer/Core/Environment.cpp
+++ b/lib/StaticAnalyzer/Core/Environment.cpp
@@ -27,7 +27,17 @@ SVal Environment::lookupExpr(const Stmt* E) const {
return UnknownVal();
}
-SVal Environment::getSVal(const Stmt *E, SValBuilder& svalBuilder) const {
+SVal Environment::getSVal(const Stmt *E, SValBuilder& svalBuilder,
+ bool useOnlyDirectBindings) const {
+
+ if (useOnlyDirectBindings) {
+ // This branch is rarely taken, but can be exercised by
+ // checkers that explicitly bind values to arbitrary
+ // expressions. It is crucial that we do not ignore any
+ // expression here, and do a direct lookup.
+ return lookupExpr(E);
+ }
+
for (;;) {
switch (E->getStmtClass()) {
case Stmt::AddrLabelExprClass:
@@ -41,6 +51,10 @@ SVal Environment::getSVal(const Stmt *E, SValBuilder& svalBuilder) const {
// ParenExprs are no-ops.
E = cast<ParenExpr>(E)->getSubExpr();
continue;
+ case Stmt::GenericSelectionExprClass:
+ // GenericSelectionExprs are no-ops.
+ E = cast<GenericSelectionExpr>(E)->getResultExpr();
+ continue;
case Stmt::CharacterLiteralClass: {
const CharacterLiteral* C = cast<CharacterLiteral>(E);
return svalBuilder.makeIntVal(C->getValue(), C->getType());
@@ -60,6 +74,9 @@ SVal Environment::getSVal(const Stmt *E, SValBuilder& svalBuilder) const {
else
return svalBuilder.makeIntVal(cast<IntegerLiteral>(E));
}
+ // For special C0xx nullptr case, make a null pointer SVal.
+ case Stmt::CXXNullPtrLiteralExprClass:
+ return svalBuilder.makeNull();
case Stmt::ImplicitCastExprClass:
case Stmt::CXXFunctionalCastExprClass:
case Stmt::CStyleCastExprClass: {
diff --git a/lib/StaticAnalyzer/Core/ExplodedGraph.cpp b/lib/StaticAnalyzer/Core/ExplodedGraph.cpp
index 2a8364d..fa16fea 100644
--- a/lib/StaticAnalyzer/Core/ExplodedGraph.cpp
+++ b/lib/StaticAnalyzer/Core/ExplodedGraph.cpp
@@ -374,7 +374,7 @@ ExplodedGraph::TrimInternal(const ExplodedNode* const* BeginSources,
WL2.push_back(*I);
}
- // Finally, explictly mark all nodes without any successors as sinks.
+ // Finally, explicitly mark all nodes without any successors as sinks.
if (N->isSink())
NewN->markAsSink();
}
diff --git a/lib/StaticAnalyzer/Core/ExprEngine.cpp b/lib/StaticAnalyzer/Core/ExprEngine.cpp
new file mode 100644
index 0000000..657420d
--- /dev/null
+++ b/lib/StaticAnalyzer/Core/ExprEngine.cpp
@@ -0,0 +1,3217 @@
+//=-- ExprEngine.cpp - Path-Sensitive Expression-Level Dataflow ---*- C++ -*-=
+//
+// The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+//
+// This file defines a meta-engine for path-sensitive dataflow analysis that
+// is built on GREngine, but provides the boilerplate to execute transfer
+// functions and build the ExplodedGraph at the expression level.
+//
+//===----------------------------------------------------------------------===//
+
+#include "clang/StaticAnalyzer/Core/CheckerManager.h"
+#include "clang/StaticAnalyzer/Core/BugReporter/BugType.h"
+#include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h"
+#include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
+#include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngineBuilders.h"
+#include "clang/AST/CharUnits.h"
+#include "clang/AST/ParentMap.h"
+#include "clang/AST/StmtObjC.h"
+#include "clang/AST/DeclCXX.h"
+#include "clang/Basic/Builtins.h"
+#include "clang/Basic/SourceManager.h"
+#include "clang/Basic/SourceManager.h"
+#include "clang/Basic/PrettyStackTrace.h"
+#include "llvm/Support/raw_ostream.h"
+#include "llvm/ADT/ImmutableList.h"
+
+#ifndef NDEBUG
+#include "llvm/Support/GraphWriter.h"
+#endif
+
+using namespace clang;
+using namespace ento;
+using llvm::dyn_cast;
+using llvm::dyn_cast_or_null;
+using llvm::cast;
+using llvm::APSInt;
+
+namespace {
+ // Trait class for recording returned expression in the state.
+ struct ReturnExpr {
+ static int TagInt;
+ typedef const Stmt *data_type;
+ };
+ int ReturnExpr::TagInt;
+}
+
+//===----------------------------------------------------------------------===//
+// Utility functions.
+//===----------------------------------------------------------------------===//
+
+static inline Selector GetNullarySelector(const char* name, ASTContext& Ctx) {
+ IdentifierInfo* II = &Ctx.Idents.get(name);
+ return Ctx.Selectors.getSelector(0, &II);
+}
+
+//===----------------------------------------------------------------------===//
+// Engine construction and deletion.
+//===----------------------------------------------------------------------===//
+
+ExprEngine::ExprEngine(AnalysisManager &mgr, TransferFuncs *tf)
+ : AMgr(mgr),
+ Engine(*this),
+ G(Engine.getGraph()),
+ Builder(NULL),
+ StateMgr(getContext(), mgr.getStoreManagerCreator(),
+ mgr.getConstraintManagerCreator(), G.getAllocator(),
+ *this),
+ SymMgr(StateMgr.getSymbolManager()),
+ svalBuilder(StateMgr.getSValBuilder()),
+ EntryNode(NULL), currentStmt(NULL),
+ NSExceptionII(NULL), NSExceptionInstanceRaiseSelectors(NULL),
+ RaiseSel(GetNullarySelector("raise", getContext())),
+ BR(mgr, *this), TF(tf) {
+
+ // FIXME: Eventually remove the TF object entirely.
+ TF->RegisterChecks(*this);
+ TF->RegisterPrinters(getStateManager().Printers);
+
+ if (mgr.shouldEagerlyTrimExplodedGraph()) {
+ // Enable eager node reclaimation when constructing the ExplodedGraph.
+ G.enableNodeReclamation();
+ }
+}
+
+ExprEngine::~ExprEngine() {
+ BR.FlushReports();
+ delete [] NSExceptionInstanceRaiseSelectors;
+}
+
+//===----------------------------------------------------------------------===//
+// Utility methods.
+//===----------------------------------------------------------------------===//
+
+const GRState* ExprEngine::getInitialState(const LocationContext *InitLoc) {
+ const GRState *state = StateMgr.getInitialState(InitLoc);
+
+ // Preconditions.
+
+ // FIXME: It would be nice if we had a more general mechanism to add
+ // such preconditions. Some day.
+ do {
+ const Decl *D = InitLoc->getDecl();
+ if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
+ // Precondition: the first argument of 'main' is an integer guaranteed
+ // to be > 0.
+ const IdentifierInfo *II = FD->getIdentifier();
+ if (!II || !(II->getName() == "main" && FD->getNumParams() > 0))
+ break;
+
+ const ParmVarDecl *PD = FD->getParamDecl(0);
+ QualType T = PD->getType();
+ if (!T->isIntegerType())
+ break;
+
+ const MemRegion *R = state->getRegion(PD, InitLoc);
+ if (!R)
+ break;
+
+ SVal V = state->getSVal(loc::MemRegionVal(R));
+ SVal Constraint_untested = evalBinOp(state, BO_GT, V,
+ svalBuilder.makeZeroVal(T),
+ getContext().IntTy);
+
+ DefinedOrUnknownSVal *Constraint =
+ dyn_cast<DefinedOrUnknownSVal>(&Constraint_untested);
+
+ if (!Constraint)
+ break;
+
+ if (const GRState *newState = state->assume(*Constraint, true))
+ state = newState;
+
+ break;
+ }
+
+ if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) {
+ // Precondition: 'self' is always non-null upon entry to an Objective-C
+ // method.
+ const ImplicitParamDecl *SelfD = MD->getSelfDecl();
+ const MemRegion *R = state->getRegion(SelfD, InitLoc);
+ SVal V = state->getSVal(loc::MemRegionVal(R));
+
+ if (const Loc *LV = dyn_cast<Loc>(&V)) {
+ // Assume that the pointer value in 'self' is non-null.
+ state = state->assume(*LV, true);
+ assert(state && "'self' cannot be null");
+ }
+ }
+ } while (0);
+
+ return state;
+}
+
+//===----------------------------------------------------------------------===//
+// Top-level transfer function logic (Dispatcher).
+//===----------------------------------------------------------------------===//
+
+/// evalAssume - Called by ConstraintManager. Used to call checker-specific
+/// logic for handling assumptions on symbolic values.
+const GRState *ExprEngine::processAssume(const GRState *state, SVal cond,
+ bool assumption) {
+ state = getCheckerManager().runCheckersForEvalAssume(state, cond, assumption);
+
+ // If the state is infeasible at this point, bail out.
+ if (!state)
+ return NULL;
+
+ return TF->evalAssume(state, cond, assumption);
+}
+
+bool ExprEngine::wantsRegionChangeUpdate(const GRState* state) {
+ return getCheckerManager().wantsRegionChangeUpdate(state);
+}
+
+const GRState *
+ExprEngine::processRegionChanges(const GRState *state,
+ const MemRegion * const *Begin,
+ const MemRegion * const *End) {
+ return getCheckerManager().runCheckersForRegionChanges(state, Begin, End);
+}
+
+void ExprEngine::processEndWorklist(bool hasWorkRemaining) {
+ getCheckerManager().runCheckersForEndAnalysis(G, BR, *this);
+}
+
+void ExprEngine::processCFGElement(const CFGElement E,
+ StmtNodeBuilder& builder) {
+ switch (E.getKind()) {
+ case CFGElement::Invalid:
+ llvm_unreachable("Unexpected CFGElement kind.");
+ case CFGElement::Statement:
+ ProcessStmt(E.getAs<CFGStmt>()->getStmt(), builder);
+ return;
+ case CFGElement::Initializer:
+ ProcessInitializer(E.getAs<CFGInitializer>()->getInitializer(), builder);
+ return;
+ case CFGElement::AutomaticObjectDtor:
+ case CFGElement::BaseDtor:
+ case CFGElement::MemberDtor:
+ case CFGElement::TemporaryDtor:
+ ProcessImplicitDtor(*E.getAs<CFGImplicitDtor>(), builder);
+ return;
+ }
+}
+
+void ExprEngine::ProcessStmt(const CFGStmt S, StmtNodeBuilder& builder) {
+ // Reclaim any unnecessary nodes in the ExplodedGraph.
+ G.reclaimRecentlyAllocatedNodes();
+ // Recycle any unused states in the GRStateManager.
+ StateMgr.recycleUnusedStates();
+
+ currentStmt = S.getStmt();
+ PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
+ currentStmt->getLocStart(),
+ "Error evaluating statement");
+
+ Builder = &builder;
+ EntryNode = builder.getPredecessor();
+
+ // Create the cleaned state.
+ const LocationContext *LC = EntryNode->getLocationContext();
+ SymbolReaper SymReaper(LC, currentStmt, SymMgr);
+
+ if (AMgr.shouldPurgeDead()) {
+ const GRState *St = EntryNode->getState();
+ getCheckerManager().runCheckersForLiveSymbols(St, SymReaper);
+
+ const StackFrameContext *SFC = LC->getCurrentStackFrame();
+ CleanedState = StateMgr.removeDeadBindings(St, SFC, SymReaper);
+ } else {
+ CleanedState = EntryNode->getState();
+ }
+
+ // Process any special transfer function for dead symbols.
+ ExplodedNodeSet Tmp;
+
+ if (!SymReaper.hasDeadSymbols())
+ Tmp.Add(EntryNode);
+ else {
+ SaveAndRestore<bool> OldSink(Builder->BuildSinks);
+ SaveOr OldHasGen(Builder->hasGeneratedNode);
+
+ SaveAndRestore<bool> OldPurgeDeadSymbols(Builder->PurgingDeadSymbols);
+ Builder->PurgingDeadSymbols = true;
+
+ // FIXME: This should soon be removed.
+ ExplodedNodeSet Tmp2;
+ getTF().evalDeadSymbols(Tmp2, *this, *Builder, EntryNode,
+ CleanedState, SymReaper);
+
+ getCheckerManager().runCheckersForDeadSymbols(Tmp, Tmp2,
+ SymReaper, currentStmt, *this);
+
+ if (!Builder->BuildSinks && !Builder->hasGeneratedNode)
+ Tmp.Add(EntryNode);
+ }
+
+ bool HasAutoGenerated = false;
+
+ for (ExplodedNodeSet::iterator I=Tmp.begin(), E=Tmp.end(); I!=E; ++I) {
+ ExplodedNodeSet Dst;
+
+ // Set the cleaned state.
+ Builder->SetCleanedState(*I == EntryNode ? CleanedState : GetState(*I));
+
+ // Visit the statement.
+ Visit(currentStmt, *I, Dst);
+
+ // Do we need to auto-generate a node? We only need to do this to generate
+ // a node with a "cleaned" state; CoreEngine will actually handle
+ // auto-transitions for other cases.
+ if (Dst.size() == 1 && *Dst.begin() == EntryNode
+ && !Builder->hasGeneratedNode && !HasAutoGenerated) {
+ HasAutoGenerated = true;
+ builder.generateNode(currentStmt, GetState(EntryNode), *I);
+ }
+ }
+
+ // NULL out these variables to cleanup.
+ CleanedState = NULL;
+ EntryNode = NULL;
+
+ currentStmt = 0;
+
+ Builder = NULL;
+}
+
+void ExprEngine::ProcessInitializer(const CFGInitializer Init,
+ StmtNodeBuilder &builder) {
+ // We don't set EntryNode and currentStmt. And we don't clean up state.
+ const CXXCtorInitializer *BMI = Init.getInitializer();
+
+ ExplodedNode *pred = builder.getPredecessor();
+
+ const StackFrameContext *stackFrame = cast<StackFrameContext>(pred->getLocationContext());
+ const CXXConstructorDecl *decl = cast<CXXConstructorDecl>(stackFrame->getDecl());
+ const CXXThisRegion *thisReg = getCXXThisRegion(decl, stackFrame);
+
+ SVal thisVal = pred->getState()->getSVal(thisReg);
+
+ if (BMI->isAnyMemberInitializer()) {
+ ExplodedNodeSet Dst;
+
+ // Evaluate the initializer.
+ Visit(BMI->getInit(), pred, Dst);
+
+ for (ExplodedNodeSet::iterator I = Dst.begin(), E = Dst.end(); I != E; ++I){
+ ExplodedNode *Pred = *I;
+ const GRState *state = Pred->getState();
+
+ const FieldDecl *FD = BMI->getAnyMember();
+
+ SVal FieldLoc = state->getLValue(FD, thisVal);
+ SVal InitVal = state->getSVal(BMI->getInit());
+ state = state->bindLoc(FieldLoc, InitVal);
+
+ // Use a custom node building process.
+ PostInitializer PP(BMI, stackFrame);
+ // Builder automatically add the generated node to the deferred set,
+ // which are processed in the builder's dtor.
+ builder.generateNode(PP, state, Pred);
+ }
+ return;
+ }
+
+ assert(BMI->isBaseInitializer());
+
+ // Get the base class declaration.
+ const CXXConstructExpr *ctorExpr = cast<CXXConstructExpr>(BMI->getInit());
+
+ // Create the base object region.
+ SVal baseVal =
+ getStoreManager().evalDerivedToBase(thisVal, ctorExpr->getType());
+ const MemRegion *baseReg = baseVal.getAsRegion();
+ assert(baseReg);
+ Builder = &builder;
+ ExplodedNodeSet dst;
+ VisitCXXConstructExpr(ctorExpr, baseReg, pred, dst);
+}
+
+void ExprEngine::ProcessImplicitDtor(const CFGImplicitDtor D,
+ StmtNodeBuilder &builder) {
+ Builder = &builder;
+
+ switch (D.getKind()) {
+ case CFGElement::AutomaticObjectDtor:
+ ProcessAutomaticObjDtor(cast<CFGAutomaticObjDtor>(D), builder);
+ break;
+ case CFGElement::BaseDtor:
+ ProcessBaseDtor(cast<CFGBaseDtor>(D), builder);
+ break;
+ case CFGElement::MemberDtor:
+ ProcessMemberDtor(cast<CFGMemberDtor>(D), builder);
+ break;
+ case CFGElement::TemporaryDtor:
+ ProcessTemporaryDtor(cast<CFGTemporaryDtor>(D), builder);
+ break;
+ default:
+ llvm_unreachable("Unexpected dtor kind.");
+ }
+}
+
+void ExprEngine::ProcessAutomaticObjDtor(const CFGAutomaticObjDtor dtor,
+ StmtNodeBuilder &builder) {
+ ExplodedNode *pred = builder.getPredecessor();
+ const GRState *state = pred->getState();
+ const VarDecl *varDecl = dtor.getVarDecl();
+
+ QualType varType = varDecl->getType();
+
+ if (const ReferenceType *refType = varType->getAs<ReferenceType>())
+ varType = refType->getPointeeType();
+
+ const CXXRecordDecl *recordDecl = varType->getAsCXXRecordDecl();
+ assert(recordDecl && "get CXXRecordDecl fail");
+ const CXXDestructorDecl *dtorDecl = recordDecl->getDestructor();
+
+ Loc dest = state->getLValue(varDecl, pred->getLocationContext());
+
+ ExplodedNodeSet dstSet;
+ VisitCXXDestructor(dtorDecl, cast<loc::MemRegionVal>(dest).getRegion(),
+ dtor.getTriggerStmt(), pred, dstSet);
+}
+
+void ExprEngine::ProcessBaseDtor(const CFGBaseDtor D,
+ StmtNodeBuilder &builder) {
+}
+
+void ExprEngine::ProcessMemberDtor(const CFGMemberDtor D,
+ StmtNodeBuilder &builder) {
+}
+
+void ExprEngine::ProcessTemporaryDtor(const CFGTemporaryDtor D,
+ StmtNodeBuilder &builder) {
+}
+
+void ExprEngine::Visit(const Stmt* S, ExplodedNode* Pred,
+ ExplodedNodeSet& Dst) {
+ PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
+ S->getLocStart(),
+ "Error evaluating statement");
+
+ // Expressions to ignore.
+ if (const Expr *Ex = dyn_cast<Expr>(S))
+ S = Ex->IgnoreParens();
+
+ // FIXME: add metadata to the CFG so that we can disable
+ // this check when we KNOW that there is no block-level subexpression.
+ // The motivation is that this check requires a hashtable lookup.
+
+ if (S != currentStmt && Pred->getLocationContext()->getCFG()->isBlkExpr(S)) {
+ Dst.Add(Pred);
+ return;
+ }
+
+ switch (S->getStmtClass()) {
+ // C++ stuff we don't support yet.
+ case Stmt::CXXBindTemporaryExprClass:
+ case Stmt::CXXCatchStmtClass:
+ case Stmt::CXXDependentScopeMemberExprClass:
+ case Stmt::CXXForRangeStmtClass:
+ case Stmt::CXXPseudoDestructorExprClass:
+ case Stmt::CXXTemporaryObjectExprClass:
+ case Stmt::CXXThrowExprClass:
+ case Stmt::CXXTryStmtClass:
+ case Stmt::CXXTypeidExprClass:
+ case Stmt::CXXUuidofExprClass:
+ case Stmt::CXXUnresolvedConstructExprClass:
+ case Stmt::CXXScalarValueInitExprClass:
+ case Stmt::DependentScopeDeclRefExprClass:
+ case Stmt::UnaryTypeTraitExprClass:
+ case Stmt::BinaryTypeTraitExprClass:
+ case Stmt::ArrayTypeTraitExprClass:
+ case Stmt::ExpressionTraitExprClass:
+ case Stmt::UnresolvedLookupExprClass:
+ case Stmt::UnresolvedMemberExprClass:
+ case Stmt::CXXNoexceptExprClass:
+ case Stmt::PackExpansionExprClass:
+ case Stmt::SubstNonTypeTemplateParmPackExprClass:
+ case Stmt::SEHTryStmtClass:
+ case Stmt::SEHExceptStmtClass:
+ case Stmt::SEHFinallyStmtClass:
+ {
+ SaveAndRestore<bool> OldSink(Builder->BuildSinks);
+ Builder->BuildSinks = true;
+ const ExplodedNode *node = MakeNode(Dst, S, Pred, GetState(Pred));
+ Engine.addAbortedBlock(node, Builder->getBlock());
+ break;
+ }
+
+ // We don't handle default arguments either yet, but we can fake it
+ // for now by just skipping them.
+ case Stmt::CXXDefaultArgExprClass: {
+ Dst.Add(Pred);
+ break;
+ }
+
+ case Stmt::ParenExprClass:
+ llvm_unreachable("ParenExprs already handled.");
+ case Stmt::GenericSelectionExprClass:
+ llvm_unreachable("GenericSelectionExprs already handled.");
+ // Cases that should never be evaluated simply because they shouldn't
+ // appear in the CFG.
+ case Stmt::BreakStmtClass:
+ case Stmt::CaseStmtClass:
+ case Stmt::CompoundStmtClass:
+ case Stmt::ContinueStmtClass:
+ case Stmt::DefaultStmtClass:
+ case Stmt::DoStmtClass:
+ case Stmt::ForStmtClass:
+ case Stmt::GotoStmtClass:
+ case Stmt::IfStmtClass:
+ case Stmt::IndirectGotoStmtClass:
+ case Stmt::LabelStmtClass:
+ case Stmt::NoStmtClass:
+ case Stmt::NullStmtClass:
+ case Stmt::SwitchStmtClass:
+ case Stmt::WhileStmtClass:
+ llvm_unreachable("Stmt should not be in analyzer evaluation loop");
+ break;
+
+ case Stmt::GNUNullExprClass: {
+ MakeNode(Dst, S, Pred, GetState(Pred)->BindExpr(S, svalBuilder.makeNull()));
+ break;
+ }
+
+ case Stmt::ObjCAtSynchronizedStmtClass:
+ VisitObjCAtSynchronizedStmt(cast<ObjCAtSynchronizedStmt>(S), Pred, Dst);
+ break;
+
+ case Stmt::ObjCPropertyRefExprClass:
+ VisitObjCPropertyRefExpr(cast<ObjCPropertyRefExpr>(S), Pred, Dst);
+ break;
+
+ // Cases not handled yet; but will handle some day.
+ case Stmt::DesignatedInitExprClass:
+ case Stmt::ExtVectorElementExprClass:
+ case Stmt::ImaginaryLiteralClass:
+ case Stmt::ImplicitValueInitExprClass:
+ case Stmt::ObjCAtCatchStmtClass:
+ case Stmt::ObjCAtFinallyStmtClass:
+ case Stmt::ObjCAtTryStmtClass:
+ case Stmt::ObjCEncodeExprClass:
+ case Stmt::ObjCIsaExprClass:
+ case Stmt::ObjCProtocolExprClass:
+ case Stmt::ObjCSelectorExprClass:
+ case Stmt::ObjCStringLiteralClass:
+ case Stmt::ParenListExprClass:
+ case Stmt::PredefinedExprClass:
+ case Stmt::ShuffleVectorExprClass:
+ case Stmt::VAArgExprClass:
+ case Stmt::CUDAKernelCallExprClass:
+ case Stmt::OpaqueValueExprClass:
+ // Fall through.
+
+ // Cases we intentionally don't evaluate, since they don't need
+ // to be explicitly evaluated.
+ case Stmt::AddrLabelExprClass:
+ case Stmt::IntegerLiteralClass:
+ case Stmt::CharacterLiteralClass:
+ case Stmt::CXXBoolLiteralExprClass:
+ case Stmt::ExprWithCleanupsClass:
+ case Stmt::FloatingLiteralClass:
+ case Stmt::SizeOfPackExprClass:
+ case Stmt::CXXNullPtrLiteralExprClass:
+ Dst.Add(Pred); // No-op. Simply propagate the current state unchanged.
+ break;
+
+ case Stmt::ArraySubscriptExprClass:
+ VisitLvalArraySubscriptExpr(cast<ArraySubscriptExpr>(S), Pred, Dst);
+ break;
+
+ case Stmt::AsmStmtClass:
+ VisitAsmStmt(cast<AsmStmt>(S), Pred, Dst);
+ break;
+
+ case Stmt::BlockDeclRefExprClass: {
+ const BlockDeclRefExpr *BE = cast<BlockDeclRefExpr>(S);
+ VisitCommonDeclRefExpr(BE, BE->getDecl(), Pred, Dst);
+ break;
+ }
+
+ case Stmt::BlockExprClass:
+ VisitBlockExpr(cast<BlockExpr>(S), Pred, Dst);
+ break;
+
+ case Stmt::BinaryOperatorClass: {
+ const BinaryOperator* B = cast<BinaryOperator>(S);
+ if (B->isLogicalOp()) {
+ VisitLogicalExpr(B, Pred, Dst);
+ break;
+ }
+ else if (B->getOpcode() == BO_Comma) {
+ const GRState* state = GetState(Pred);
+ MakeNode(Dst, B, Pred, state->BindExpr(B, state->getSVal(B->getRHS())));
+ break;
+ }
+
+ if (AMgr.shouldEagerlyAssume() &&
+ (B->isRelationalOp() || B->isEqualityOp())) {
+ ExplodedNodeSet Tmp;
+ VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Tmp);
+ evalEagerlyAssume(Dst, Tmp, cast<Expr>(S));
+ }
+ else
+ VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Dst);
+
+ break;
+ }
+
+ case Stmt::CallExprClass:
+ case Stmt::CXXOperatorCallExprClass:
+ case Stmt::CXXMemberCallExprClass: {
+ VisitCallExpr(cast<CallExpr>(S), Pred, Dst);
+ break;
+ }
+
+ case Stmt::CXXConstructExprClass: {
+ const CXXConstructExpr *C = cast<CXXConstructExpr>(S);
+ // For block-level CXXConstructExpr, we don't have a destination region.
+ // Let VisitCXXConstructExpr() create one.
+ VisitCXXConstructExpr(C, 0, Pred, Dst);
+ break;
+ }
+
+ case Stmt::CXXNewExprClass: {
+ const CXXNewExpr *NE = cast<CXXNewExpr>(S);
+ VisitCXXNewExpr(NE, Pred, Dst);
+ break;
+ }
+
+ case Stmt::CXXDeleteExprClass: {
+ const CXXDeleteExpr *CDE = cast<CXXDeleteExpr>(S);
+ VisitCXXDeleteExpr(CDE, Pred, Dst);
+ break;
+ }
+ // FIXME: ChooseExpr is really a constant. We need to fix
+ // the CFG do not model them as explicit control-flow.
+
+ case Stmt::ChooseExprClass: { // __builtin_choose_expr
+ const ChooseExpr* C = cast<ChooseExpr>(S);
+ VisitGuardedExpr(C, C->getLHS(), C->getRHS(), Pred, Dst);
+ break;
+ }
+
+ case Stmt::CompoundAssignOperatorClass:
+ VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Dst);
+ break;
+
+ case Stmt::CompoundLiteralExprClass:
+ VisitCompoundLiteralExpr(cast<CompoundLiteralExpr>(S), Pred, Dst);
+ break;
+
+ case Stmt::BinaryConditionalOperatorClass:
+ case Stmt::ConditionalOperatorClass: { // '?' operator
+ const AbstractConditionalOperator *C
+ = cast<AbstractConditionalOperator>(S);
+ VisitGuardedExpr(C, C->getTrueExpr(), C->getFalseExpr(), Pred, Dst);
+ break;
+ }
+
+ case Stmt::CXXThisExprClass:
+ VisitCXXThisExpr(cast<CXXThisExpr>(S), Pred, Dst);
+ break;
+
+ case Stmt::DeclRefExprClass: {
+ const DeclRefExpr *DE = cast<DeclRefExpr>(S);
+ VisitCommonDeclRefExpr(DE, DE->getDecl(), Pred, Dst);
+ break;
+ }
+
+ case Stmt::DeclStmtClass:
+ VisitDeclStmt(cast<DeclStmt>(S), Pred, Dst);
+ break;
+
+ case Stmt::ImplicitCastExprClass:
+ case Stmt::CStyleCastExprClass:
+ case Stmt::CXXStaticCastExprClass:
+ case Stmt::CXXDynamicCastExprClass:
+ case Stmt::CXXReinterpretCastExprClass:
+ case Stmt::CXXConstCastExprClass:
+ case Stmt::CXXFunctionalCastExprClass: {
+ const CastExpr* C = cast<CastExpr>(S);
+ VisitCast(C, C->getSubExpr(), Pred, Dst);
+ break;
+ }
+
+ case Stmt::InitListExprClass:
+ VisitInitListExpr(cast<InitListExpr>(S), Pred, Dst);
+ break;
+
+ case Stmt::MemberExprClass:
+ VisitMemberExpr(cast<MemberExpr>(S), Pred, Dst);
+ break;
+ case Stmt::ObjCIvarRefExprClass:
+ VisitLvalObjCIvarRefExpr(cast<ObjCIvarRefExpr>(S), Pred, Dst);
+ break;
+
+ case Stmt::ObjCForCollectionStmtClass:
+ VisitObjCForCollectionStmt(cast<ObjCForCollectionStmt>(S), Pred, Dst);
+ break;
+
+ case Stmt::ObjCMessageExprClass:
+ VisitObjCMessageExpr(cast<ObjCMessageExpr>(S), Pred, Dst);
+ break;
+
+ case Stmt::ObjCAtThrowStmtClass: {
+ // FIXME: This is not complete. We basically treat @throw as
+ // an abort.
+ SaveAndRestore<bool> OldSink(Builder->BuildSinks);
+ Builder->BuildSinks = true;
+ MakeNode(Dst, S, Pred, GetState(Pred));
+ break;
+ }
+
+ case Stmt::ReturnStmtClass:
+ VisitReturnStmt(cast<ReturnStmt>(S), Pred, Dst);
+ break;
+
+ case Stmt::OffsetOfExprClass:
+ VisitOffsetOfExpr(cast<OffsetOfExpr>(S), Pred, Dst);
+ break;
+
+ case Stmt::UnaryExprOrTypeTraitExprClass:
+ VisitUnaryExprOrTypeTraitExpr(cast<UnaryExprOrTypeTraitExpr>(S),
+ Pred, Dst);
+ break;
+
+ case Stmt::StmtExprClass: {
+ const StmtExpr* SE = cast<StmtExpr>(S);
+
+ if (SE->getSubStmt()->body_empty()) {
+ // Empty statement expression.
+ assert(SE->getType() == getContext().VoidTy
+ && "Empty statement expression must have void type.");
+ Dst.Add(Pred);
+ break;
+ }
+
+ if (Expr* LastExpr = dyn_cast<Expr>(*SE->getSubStmt()->body_rbegin())) {
+ const GRState* state = GetState(Pred);
+ MakeNode(Dst, SE, Pred, state->BindExpr(SE, state->getSVal(LastExpr)));
+ }
+ else
+ Dst.Add(Pred);
+
+ break;
+ }
+
+ case Stmt::StringLiteralClass: {
+ const GRState* state = GetState(Pred);
+ SVal V = state->getLValue(cast<StringLiteral>(S));
+ MakeNode(Dst, S, Pred, state->BindExpr(S, V));
+ return;
+ }
+
+ case Stmt::UnaryOperatorClass: {
+ const UnaryOperator *U = cast<UnaryOperator>(S);
+ if (AMgr.shouldEagerlyAssume()&&(U->getOpcode() == UO_LNot)) {
+ ExplodedNodeSet Tmp;
+ VisitUnaryOperator(U, Pred, Tmp);
+ evalEagerlyAssume(Dst, Tmp, U);
+ }
+ else
+ VisitUnaryOperator(U, Pred, Dst);
+ break;
+ }
+ }
+}
+
+//===----------------------------------------------------------------------===//
+// Block entrance. (Update counters).
+//===----------------------------------------------------------------------===//
+
+void ExprEngine::processCFGBlockEntrance(ExplodedNodeSet &dstNodes,
+ GenericNodeBuilder<BlockEntrance> &nodeBuilder){
+
+ // FIXME: Refactor this into a checker.
+ const CFGBlock *block = nodeBuilder.getProgramPoint().getBlock();
+ ExplodedNode *pred = nodeBuilder.getPredecessor();
+
+ if (nodeBuilder.getBlockCounter().getNumVisited(
+ pred->getLocationContext()->getCurrentStackFrame(),
+ block->getBlockID()) >= AMgr.getMaxVisit()) {
+
+ static int tag = 0;
+ nodeBuilder.generateNode(pred->getState(), pred, &tag, true);
+ }
+}
+
+//===----------------------------------------------------------------------===//
+// Generic node creation.
+//===----------------------------------------------------------------------===//
+
+ExplodedNode* ExprEngine::MakeNode(ExplodedNodeSet& Dst, const Stmt* S,
+ ExplodedNode* Pred, const GRState* St,
+ ProgramPoint::Kind K, const void *tag) {
+ assert (Builder && "StmtNodeBuilder not present.");
+ SaveAndRestore<const void*> OldTag(Builder->Tag);
+ Builder->Tag = tag;
+ return Builder->MakeNode(Dst, S, Pred, St, K);
+}
+
+//===----------------------------------------------------------------------===//
+// Branch processing.
+//===----------------------------------------------------------------------===//
+
+const GRState* ExprEngine::MarkBranch(const GRState* state,
+ const Stmt* Terminator,
+ bool branchTaken) {
+
+ switch (Terminator->getStmtClass()) {
+ default:
+ return state;
+
+ case Stmt::BinaryOperatorClass: { // '&&' and '||'
+
+ const BinaryOperator* B = cast<BinaryOperator>(Terminator);
+ BinaryOperator::Opcode Op = B->getOpcode();
+
+ assert (Op == BO_LAnd || Op == BO_LOr);
+
+ // For &&, if we take the true branch, then the value of the whole
+ // expression is that of the RHS expression.
+ //
+ // For ||, if we take the false branch, then the value of the whole
+ // expression is that of the RHS expression.
+
+ const Expr* Ex = (Op == BO_LAnd && branchTaken) ||
+ (Op == BO_LOr && !branchTaken)
+ ? B->getRHS() : B->getLHS();
+
+ return state->BindExpr(B, UndefinedVal(Ex));
+ }
+
+ case Stmt::BinaryConditionalOperatorClass:
+ case Stmt::ConditionalOperatorClass: { // ?:
+ const AbstractConditionalOperator* C
+ = cast<AbstractConditionalOperator>(Terminator);
+
+ // For ?, if branchTaken == true then the value is either the LHS or
+ // the condition itself. (GNU extension).
+
+ const Expr* Ex;
+
+ if (branchTaken)
+ Ex = C->getTrueExpr();
+ else
+ Ex = C->getFalseExpr();
+
+ return state->BindExpr(C, UndefinedVal(Ex));
+ }
+
+ case Stmt::ChooseExprClass: { // ?:
+
+ const ChooseExpr* C = cast<ChooseExpr>(Terminator);
+
+ const Expr* Ex = branchTaken ? C->getLHS() : C->getRHS();
+ return state->BindExpr(C, UndefinedVal(Ex));
+ }
+ }
+}
+
+/// RecoverCastedSymbol - A helper function for ProcessBranch that is used
+/// to try to recover some path-sensitivity for casts of symbolic
+/// integers that promote their values (which are currently not tracked well).
+/// This function returns the SVal bound to Condition->IgnoreCasts if all the
+// cast(s) did was sign-extend the original value.
+static SVal RecoverCastedSymbol(GRStateManager& StateMgr, const GRState* state,
+ const Stmt* Condition, ASTContext& Ctx) {
+
+ const Expr *Ex = dyn_cast<Expr>(Condition);
+ if (!Ex)
+ return UnknownVal();
+
+ uint64_t bits = 0;
+ bool bitsInit = false;
+
+ while (const CastExpr *CE = dyn_cast<CastExpr>(Ex)) {
+ QualType T = CE->getType();
+
+ if (!T->isIntegerType())
+ return UnknownVal();
+
+ uint64_t newBits = Ctx.getTypeSize(T);
+ if (!bitsInit || newBits < bits) {
+ bitsInit = true;
+ bits = newBits;
+ }
+
+ Ex = CE->getSubExpr();
+ }
+
+ // We reached a non-cast. Is it a symbolic value?
+ QualType T = Ex->getType();
+
+ if (!bitsInit || !T->isIntegerType() || Ctx.getTypeSize(T) > bits)
+ return UnknownVal();
+
+ return state->getSVal(Ex);
+}
+
+void ExprEngine::processBranch(const Stmt* Condition, const Stmt* Term,
+ BranchNodeBuilder& builder) {
+
+ // Check for NULL conditions; e.g. "for(;;)"
+ if (!Condition) {
+ builder.markInfeasible(false);
+ return;
+ }
+
+ PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
+ Condition->getLocStart(),
+ "Error evaluating branch");
+
+ getCheckerManager().runCheckersForBranchCondition(Condition, builder, *this);
+
+ // If the branch condition is undefined, return;
+ if (!builder.isFeasible(true) && !builder.isFeasible(false))
+ return;
+
+ const GRState* PrevState = builder.getState();
+ SVal X = PrevState->getSVal(Condition);
+
+ if (X.isUnknownOrUndef()) {
+ // Give it a chance to recover from unknown.
+ if (const Expr *Ex = dyn_cast<Expr>(Condition)) {
+ if (Ex->getType()->isIntegerType()) {
+ // Try to recover some path-sensitivity. Right now casts of symbolic
+ // integers that promote their values are currently not tracked well.
+ // If 'Condition' is such an expression, try and recover the
+ // underlying value and use that instead.
+ SVal recovered = RecoverCastedSymbol(getStateManager(),
+ builder.getState(), Condition,
+ getContext());
+
+ if (!recovered.isUnknown()) {
+ X = recovered;
+ }
+ }
+ }
+ // If the condition is still unknown, give up.
+ if (X.isUnknownOrUndef()) {
+ builder.generateNode(MarkBranch(PrevState, Term, true), true);
+ builder.generateNode(MarkBranch(PrevState, Term, false), false);
+ return;
+ }
+ }
+
+ DefinedSVal V = cast<DefinedSVal>(X);
+
+ // Process the true branch.
+ if (builder.isFeasible(true)) {
+ if (const GRState *state = PrevState->assume(V, true))
+ builder.generateNode(MarkBranch(state, Term, true), true);
+ else
+ builder.markInfeasible(true);
+ }
+
+ // Process the false branch.
+ if (builder.isFeasible(false)) {
+ if (const GRState *state = PrevState->assume(V, false))
+ builder.generateNode(MarkBranch(state, Term, false), false);
+ else
+ builder.markInfeasible(false);
+ }
+}
+
+/// processIndirectGoto - Called by CoreEngine. Used to generate successor
+/// nodes by processing the 'effects' of a computed goto jump.
+void ExprEngine::processIndirectGoto(IndirectGotoNodeBuilder &builder) {
+
+ const GRState *state = builder.getState();
+ SVal V = state->getSVal(builder.getTarget());
+
+ // Three possibilities:
+ //
+ // (1) We know the computed label.
+ // (2) The label is NULL (or some other constant), or Undefined.
+ // (3) We have no clue about the label. Dispatch to all targets.
+ //
+
+ typedef IndirectGotoNodeBuilder::iterator iterator;
+
+ if (isa<loc::GotoLabel>(V)) {
+ const LabelDecl *L = cast<loc::GotoLabel>(V).getLabel();
+
+ for (iterator I = builder.begin(), E = builder.end(); I != E; ++I) {
+ if (I.getLabel() == L) {
+ builder.generateNode(I, state);
+ return;
+ }
+ }
+
+ assert(false && "No block with label.");
+ return;
+ }
+
+ if (isa<loc::ConcreteInt>(V) || isa<UndefinedVal>(V)) {
+ // Dispatch to the first target and mark it as a sink.
+ //ExplodedNode* N = builder.generateNode(builder.begin(), state, true);
+ // FIXME: add checker visit.
+ // UndefBranches.insert(N);
+ return;
+ }
+
+ // This is really a catch-all. We don't support symbolics yet.
+ // FIXME: Implement dispatch for symbolic pointers.
+
+ for (iterator I=builder.begin(), E=builder.end(); I != E; ++I)
+ builder.generateNode(I, state);
+}
+
+
+void ExprEngine::VisitGuardedExpr(const Expr* Ex, const Expr* L,
+ const Expr* R,
+ ExplodedNode* Pred, ExplodedNodeSet& Dst) {
+
+ assert(Ex == currentStmt &&
+ Pred->getLocationContext()->getCFG()->isBlkExpr(Ex));
+
+ const GRState* state = GetState(Pred);
+ SVal X = state->getSVal(Ex);
+
+ assert (X.isUndef());
+
+ const Expr *SE = (Expr*) cast<UndefinedVal>(X).getData();
+ assert(SE);
+ X = state->getSVal(SE);
+
+ // Make sure that we invalidate the previous binding.
+ MakeNode(Dst, Ex, Pred, state->BindExpr(Ex, X, true));
+}
+
+/// ProcessEndPath - Called by CoreEngine. Used to generate end-of-path
+/// nodes when the control reaches the end of a function.
+void ExprEngine::processEndOfFunction(EndOfFunctionNodeBuilder& builder) {
+ getTF().evalEndPath(*this, builder);
+ StateMgr.EndPath(builder.getState());
+ getCheckerManager().runCheckersForEndPath(builder, *this);
+}
+
+/// ProcessSwitch - Called by CoreEngine. Used to generate successor
+/// nodes by processing the 'effects' of a switch statement.
+void ExprEngine::processSwitch(SwitchNodeBuilder& builder) {
+ typedef SwitchNodeBuilder::iterator iterator;
+ const GRState* state = builder.getState();
+ const Expr* CondE = builder.getCondition();
+ SVal CondV_untested = state->getSVal(CondE);
+
+ if (CondV_untested.isUndef()) {
+ //ExplodedNode* N = builder.generateDefaultCaseNode(state, true);
+ // FIXME: add checker
+ //UndefBranches.insert(N);
+
+ return;
+ }
+ DefinedOrUnknownSVal CondV = cast<DefinedOrUnknownSVal>(CondV_untested);
+
+ const GRState *DefaultSt = state;
+
+ iterator I = builder.begin(), EI = builder.end();
+ bool defaultIsFeasible = I == EI;
+
+ for ( ; I != EI; ++I) {
+ // Successor may be pruned out during CFG construction.
+ if (!I.getBlock())
+ continue;
+
+ const CaseStmt* Case = I.getCase();
+
+ // Evaluate the LHS of the case value.
+ Expr::EvalResult V1;
+ bool b = Case->getLHS()->Evaluate(V1, getContext());
+
+ // Sanity checks. These go away in Release builds.
+ assert(b && V1.Val.isInt() && !V1.HasSideEffects
+ && "Case condition must evaluate to an integer constant.");
+ (void)b; // silence unused variable warning
+ assert(V1.Val.getInt().getBitWidth() ==
+ getContext().getTypeSize(CondE->getType()));
+
+ // Get the RHS of the case, if it exists.
+ Expr::EvalResult V2;
+
+ if (const Expr* E = Case->getRHS()) {
+ b = E->Evaluate(V2, getContext());
+ assert(b && V2.Val.isInt() && !V2.HasSideEffects
+ && "Case condition must evaluate to an integer constant.");
+ (void)b; // silence unused variable warning
+ }
+ else
+ V2 = V1;
+
+ // FIXME: Eventually we should replace the logic below with a range
+ // comparison, rather than concretize the values within the range.
+ // This should be easy once we have "ranges" for NonLVals.
+
+ do {
+ nonloc::ConcreteInt CaseVal(getBasicVals().getValue(V1.Val.getInt()));
+ DefinedOrUnknownSVal Res = svalBuilder.evalEQ(DefaultSt ? DefaultSt : state,
+ CondV, CaseVal);
+
+ // Now "assume" that the case matches.
+ if (const GRState* stateNew = state->assume(Res, true)) {
+ builder.generateCaseStmtNode(I, stateNew);
+
+ // If CondV evaluates to a constant, then we know that this
+ // is the *only* case that we can take, so stop evaluating the
+ // others.
+ if (isa<nonloc::ConcreteInt>(CondV))
+ return;
+ }
+
+ // Now "assume" that the case doesn't match. Add this state
+ // to the default state (if it is feasible).
+ if (DefaultSt) {
+ if (const GRState *stateNew = DefaultSt->assume(Res, false)) {
+ defaultIsFeasible = true;
+ DefaultSt = stateNew;
+ }
+ else {
+ defaultIsFeasible = false;
+ DefaultSt = NULL;
+ }
+ }
+
+ // Concretize the next value in the range.
+ if (V1.Val.getInt() == V2.Val.getInt())
+ break;
+
+ ++V1.Val.getInt();
+ assert (V1.Val.getInt() <= V2.Val.getInt());
+
+ } while (true);
+ }
+
+ if (!defaultIsFeasible)
+ return;
+
+ // If we have switch(enum value), the default branch is not
+ // feasible if all of the enum constants not covered by 'case:' statements
+ // are not feasible values for the switch condition.
+ //
+ // Note that this isn't as accurate as it could be. Even if there isn't
+ // a case for a particular enum value as long as that enum value isn't
+ // feasible then it shouldn't be considered for making 'default:' reachable.
+ const SwitchStmt *SS = builder.getSwitch();
+ const Expr *CondExpr = SS->getCond()->IgnoreParenImpCasts();
+ if (CondExpr->getType()->getAs<EnumType>()) {
+ if (SS->isAllEnumCasesCovered())
+ return;
+ }
+
+ builder.generateDefaultCaseNode(DefaultSt);
+}
+
+void ExprEngine::processCallEnter(CallEnterNodeBuilder &B) {
+ const GRState *state = B.getState()->enterStackFrame(B.getCalleeContext());
+ B.generateNode(state);
+}
+
+void ExprEngine::processCallExit(CallExitNodeBuilder &B) {
+ const GRState *state = B.getState();
+ const ExplodedNode *Pred = B.getPredecessor();
+ const StackFrameContext *calleeCtx =
+ cast<StackFrameContext>(Pred->getLocationContext());
+ const Stmt *CE = calleeCtx->getCallSite();
+
+ // If the callee returns an expression, bind its value to CallExpr.
+ const Stmt *ReturnedExpr = state->get<ReturnExpr>();
+ if (ReturnedExpr) {
+ SVal RetVal = state->getSVal(ReturnedExpr);
+ state = state->BindExpr(CE, RetVal);
+ // Clear the return expr GDM.
+ state = state->remove<ReturnExpr>();
+ }
+
+ // Bind the constructed object value to CXXConstructExpr.
+ if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(CE)) {
+ const CXXThisRegion *ThisR =
+ getCXXThisRegion(CCE->getConstructor()->getParent(), calleeCtx);
+
+ SVal ThisV = state->getSVal(ThisR);
+ // Always bind the region to the CXXConstructExpr.
+ state = state->BindExpr(CCE, ThisV);
+ }
+
+ B.generateNode(state);
+}
+
+//===----------------------------------------------------------------------===//
+// Transfer functions: logical operations ('&&', '||').
+//===----------------------------------------------------------------------===//
+
+void ExprEngine::VisitLogicalExpr(const BinaryOperator* B, ExplodedNode* Pred,
+ ExplodedNodeSet& Dst) {
+
+ assert(B->getOpcode() == BO_LAnd ||
+ B->getOpcode() == BO_LOr);
+
+ assert(B==currentStmt && Pred->getLocationContext()->getCFG()->isBlkExpr(B));
+
+ const GRState* state = GetState(Pred);
+ SVal X = state->getSVal(B);
+ assert(X.isUndef());
+
+ const Expr *Ex = (const Expr*) cast<UndefinedVal>(X).getData();
+ assert(Ex);
+
+ if (Ex == B->getRHS()) {
+ X = state->getSVal(Ex);
+
+ // Handle undefined values.
+ if (X.isUndef()) {
+ MakeNode(Dst, B, Pred, state->BindExpr(B, X));
+ return;
+ }
+
+ DefinedOrUnknownSVal XD = cast<DefinedOrUnknownSVal>(X);
+
+ // We took the RHS. Because the value of the '&&' or '||' expression must
+ // evaluate to 0 or 1, we must assume the value of the RHS evaluates to 0
+ // or 1. Alternatively, we could take a lazy approach, and calculate this
+ // value later when necessary. We don't have the machinery in place for
+ // this right now, and since most logical expressions are used for branches,
+ // the payoff is not likely to be large. Instead, we do eager evaluation.
+ if (const GRState *newState = state->assume(XD, true))
+ MakeNode(Dst, B, Pred,
+ newState->BindExpr(B, svalBuilder.makeIntVal(1U, B->getType())));
+
+ if (const GRState *newState = state->assume(XD, false))
+ MakeNode(Dst, B, Pred,
+ newState->BindExpr(B, svalBuilder.makeIntVal(0U, B->getType())));
+ }
+ else {
+ // We took the LHS expression. Depending on whether we are '&&' or
+ // '||' we know what the value of the expression is via properties of
+ // the short-circuiting.
+ X = svalBuilder.makeIntVal(B->getOpcode() == BO_LAnd ? 0U : 1U,
+ B->getType());
+ MakeNode(Dst, B, Pred, state->BindExpr(B, X));
+ }
+}
+
+//===----------------------------------------------------------------------===//
+// Transfer functions: Loads and stores.
+//===----------------------------------------------------------------------===//
+
+void ExprEngine::VisitBlockExpr(const BlockExpr *BE, ExplodedNode *Pred,
+ ExplodedNodeSet &Dst) {
+
+ ExplodedNodeSet Tmp;
+
+ CanQualType T = getContext().getCanonicalType(BE->getType());
+ SVal V = svalBuilder.getBlockPointer(BE->getBlockDecl(), T,
+ Pred->getLocationContext());
+
+ MakeNode(Tmp, BE, Pred, GetState(Pred)->BindExpr(BE, V),
+ ProgramPoint::PostLValueKind);
+
+ // Post-visit the BlockExpr.
+ getCheckerManager().runCheckersForPostStmt(Dst, Tmp, BE, *this);
+}
+
+void ExprEngine::VisitCommonDeclRefExpr(const Expr *Ex, const NamedDecl *D,
+ ExplodedNode *Pred,
+ ExplodedNodeSet &Dst) {
+ const GRState *state = GetState(Pred);
+
+ if (const VarDecl* VD = dyn_cast<VarDecl>(D)) {
+ assert(Ex->isLValue());
+ SVal V = state->getLValue(VD, Pred->getLocationContext());
+
+ // For references, the 'lvalue' is the pointer address stored in the
+ // reference region.
+ if (VD->getType()->isReferenceType()) {
+ if (const MemRegion *R = V.getAsRegion())
+ V = state->getSVal(R);
+ else
+ V = UnknownVal();
+ }
+
+ MakeNode(Dst, Ex, Pred, state->BindExpr(Ex, V),
+ ProgramPoint::PostLValueKind);
+ return;
+ }
+ if (const EnumConstantDecl* ED = dyn_cast<EnumConstantDecl>(D)) {
+ assert(!Ex->isLValue());
+ SVal V = svalBuilder.makeIntVal(ED->getInitVal());
+ MakeNode(Dst, Ex, Pred, state->BindExpr(Ex, V));
+ return;
+ }
+ if (const FunctionDecl* FD = dyn_cast<FunctionDecl>(D)) {
+ SVal V = svalBuilder.getFunctionPointer(FD);
+ MakeNode(Dst, Ex, Pred, state->BindExpr(Ex, V),
+ ProgramPoint::PostLValueKind);
+ return;
+ }
+ assert (false &&
+ "ValueDecl support for this ValueDecl not implemented.");
+}
+
+/// VisitArraySubscriptExpr - Transfer function for array accesses
+void ExprEngine::VisitLvalArraySubscriptExpr(const ArraySubscriptExpr* A,
+ ExplodedNode* Pred,
+ ExplodedNodeSet& Dst){
+
+ const Expr* Base = A->getBase()->IgnoreParens();
+ const Expr* Idx = A->getIdx()->IgnoreParens();
+
+ // Evaluate the base.
+ ExplodedNodeSet Tmp;
+ Visit(Base, Pred, Tmp);
+
+ for (ExplodedNodeSet::iterator I1=Tmp.begin(), E1=Tmp.end(); I1!=E1; ++I1) {
+ ExplodedNodeSet Tmp2;
+ Visit(Idx, *I1, Tmp2); // Evaluate the index.
+ ExplodedNodeSet Tmp3;
+ getCheckerManager().runCheckersForPreStmt(Tmp3, Tmp2, A, *this);
+
+ for (ExplodedNodeSet::iterator I2=Tmp3.begin(),E2=Tmp3.end();I2!=E2; ++I2) {
+ const GRState* state = GetState(*I2);
+ SVal V = state->getLValue(A->getType(), state->getSVal(Idx),
+ state->getSVal(Base));
+ assert(A->isLValue());
+ MakeNode(Dst, A, *I2, state->BindExpr(A, V), ProgramPoint::PostLValueKind);
+ }
+ }
+}
+
+/// VisitMemberExpr - Transfer function for member expressions.
+void ExprEngine::VisitMemberExpr(const MemberExpr* M, ExplodedNode* Pred,
+ ExplodedNodeSet& Dst) {
+
+ Expr *baseExpr = M->getBase()->IgnoreParens();
+ ExplodedNodeSet dstBase;
+ Visit(baseExpr, Pred, dstBase);
+
+ FieldDecl *field = dyn_cast<FieldDecl>(M->getMemberDecl());
+ if (!field) // FIXME: skipping member expressions for non-fields
+ return;
+
+ for (ExplodedNodeSet::iterator I = dstBase.begin(), E = dstBase.end();
+ I != E; ++I) {
+ const GRState* state = GetState(*I);
+ SVal baseExprVal = state->getSVal(baseExpr);
+ if (isa<nonloc::LazyCompoundVal>(baseExprVal) ||
+ isa<nonloc::CompoundVal>(baseExprVal) ||
+ // FIXME: This can originate by conjuring a symbol for an unknown
+ // temporary struct object, see test/Analysis/fields.c:
+ // (p = getit()).x
+ isa<nonloc::SymbolVal>(baseExprVal)) {
+ MakeNode(Dst, M, *I, state->BindExpr(M, UnknownVal()));
+ continue;
+ }
+
+ // FIXME: Should we insert some assumption logic in here to determine
+ // if "Base" is a valid piece of memory? Before we put this assumption
+ // later when using FieldOffset lvals (which we no longer have).
+
+ // For all other cases, compute an lvalue.
+ SVal L = state->getLValue(field, baseExprVal);
+ if (M->isLValue())
+ MakeNode(Dst, M, *I, state->BindExpr(M, L), ProgramPoint::PostLValueKind);
+ else
+ evalLoad(Dst, M, *I, state, L);
+ }
+}
+
+/// evalBind - Handle the semantics of binding a value to a specific location.
+/// This method is used by evalStore and (soon) VisitDeclStmt, and others.
+void ExprEngine::evalBind(ExplodedNodeSet& Dst, const Stmt* StoreE,
+ ExplodedNode* Pred, const GRState* state,
+ SVal location, SVal Val, bool atDeclInit) {
+
+
+ // Do a previsit of the bind.
+ ExplodedNodeSet CheckedSet, Src;
+ Src.Add(Pred);
+ getCheckerManager().runCheckersForBind(CheckedSet, Src, location, Val, StoreE,
+ *this);
+
+ for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end();
+ I!=E; ++I) {
+
+ if (Pred != *I)
+ state = GetState(*I);
+
+ const GRState* newState = 0;
+
+ if (atDeclInit) {
+ const VarRegion *VR =
+ cast<VarRegion>(cast<loc::MemRegionVal>(location).getRegion());
+
+ newState = state->bindDecl(VR, Val);
+ }
+ else {
+ if (location.isUnknown()) {
+ // We know that the new state will be the same as the old state since
+ // the location of the binding is "unknown". Consequently, there
+ // is no reason to just create a new node.
+ newState = state;
+ }
+ else {
+ // We are binding to a value other than 'unknown'. Perform the binding
+ // using the StoreManager.
+ newState = state->bindLoc(cast<Loc>(location), Val);
+ }
+ }
+
+ // The next thing to do is check if the TransferFuncs object wants to
+ // update the state based on the new binding. If the GRTransferFunc object
+ // doesn't do anything, just auto-propagate the current state.
+
+ // NOTE: We use 'AssignE' for the location of the PostStore if 'AssignE'
+ // is non-NULL. Checkers typically care about
+
+ StmtNodeBuilderRef BuilderRef(Dst, *Builder, *this, *I, newState, StoreE,
+ true);
+
+ getTF().evalBind(BuilderRef, location, Val);
+ }
+}
+
+/// evalStore - Handle the semantics of a store via an assignment.
+/// @param Dst The node set to store generated state nodes
+/// @param AssignE The assignment expression if the store happens in an
+/// assignment.
+/// @param LocatioinE The location expression that is stored to.
+/// @param state The current simulation state
+/// @param location The location to store the value
+/// @param Val The value to be stored
+void ExprEngine::evalStore(ExplodedNodeSet& Dst, const Expr *AssignE,
+ const Expr* LocationE,
+ ExplodedNode* Pred,
+ const GRState* state, SVal location, SVal Val,
+ const void *tag) {
+
+ assert(Builder && "StmtNodeBuilder must be defined.");
+
+ // Proceed with the store. We use AssignE as the anchor for the PostStore
+ // ProgramPoint if it is non-NULL, and LocationE otherwise.
+ const Expr *StoreE = AssignE ? AssignE : LocationE;
+
+ if (isa<loc::ObjCPropRef>(location)) {
+ loc::ObjCPropRef prop = cast<loc::ObjCPropRef>(location);
+ ExplodedNodeSet src = Pred;
+ return VisitObjCMessage(ObjCPropertySetter(prop.getPropRefExpr(),
+ StoreE, Val), src, Dst);
+ }
+
+ // Evaluate the location (checks for bad dereferences).
+ ExplodedNodeSet Tmp;
+ evalLocation(Tmp, LocationE, Pred, state, location, tag, false);
+
+ if (Tmp.empty())
+ return;
+
+ if (location.isUndef())
+ return;
+
+ SaveAndRestore<ProgramPoint::Kind> OldSPointKind(Builder->PointKind,
+ ProgramPoint::PostStoreKind);
+
+ for (ExplodedNodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI)
+ evalBind(Dst, StoreE, *NI, GetState(*NI), location, Val);
+}
+
+void ExprEngine::evalLoad(ExplodedNodeSet& Dst, const Expr *Ex,
+ ExplodedNode* Pred,
+ const GRState* state, SVal location,
+ const void *tag, QualType LoadTy) {
+ assert(!isa<NonLoc>(location) && "location cannot be a NonLoc.");
+
+ if (isa<loc::ObjCPropRef>(location)) {
+ loc::ObjCPropRef prop = cast<loc::ObjCPropRef>(location);
+ ExplodedNodeSet src = Pred;
+ return VisitObjCMessage(ObjCPropertyGetter(prop.getPropRefExpr(), Ex),
+ src, Dst);
+ }
+
+ // Are we loading from a region? This actually results in two loads; one
+ // to fetch the address of the referenced value and one to fetch the
+ // referenced value.
+ if (const TypedRegion *TR =
+ dyn_cast_or_null<TypedRegion>(location.getAsRegion())) {
+
+ QualType ValTy = TR->getValueType();
+ if (const ReferenceType *RT = ValTy->getAs<ReferenceType>()) {
+ static int loadReferenceTag = 0;
+ ExplodedNodeSet Tmp;
+ evalLoadCommon(Tmp, Ex, Pred, state, location, &loadReferenceTag,
+ getContext().getPointerType(RT->getPointeeType()));
+
+ // Perform the load from the referenced value.
+ for (ExplodedNodeSet::iterator I=Tmp.begin(), E=Tmp.end() ; I!=E; ++I) {
+ state = GetState(*I);
+ location = state->getSVal(Ex);
+ evalLoadCommon(Dst, Ex, *I, state, location, tag, LoadTy);
+ }
+ return;
+ }
+ }
+
+ evalLoadCommon(Dst, Ex, Pred, state, location, tag, LoadTy);
+}
+
+void ExprEngine::evalLoadCommon(ExplodedNodeSet& Dst, const Expr *Ex,
+ ExplodedNode* Pred,
+ const GRState* state, SVal location,
+ const void *tag, QualType LoadTy) {
+
+ // Evaluate the location (checks for bad dereferences).
+ ExplodedNodeSet Tmp;
+ evalLocation(Tmp, Ex, Pred, state, location, tag, true);
+
+ if (Tmp.empty())
+ return;
+
+ if (location.isUndef())
+ return;
+
+ SaveAndRestore<ProgramPoint::Kind> OldSPointKind(Builder->PointKind);
+
+ // Proceed with the load.
+ for (ExplodedNodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI) {
+ state = GetState(*NI);
+
+ if (location.isUnknown()) {
+ // This is important. We must nuke the old binding.
+ MakeNode(Dst, Ex, *NI, state->BindExpr(Ex, UnknownVal()),
+ ProgramPoint::PostLoadKind, tag);
+ }
+ else {
+ if (LoadTy.isNull())
+ LoadTy = Ex->getType();
+ SVal V = state->getSVal(cast<Loc>(location), LoadTy);
+ MakeNode(Dst, Ex, *NI, state->bindExprAndLocation(Ex, location, V),
+ ProgramPoint::PostLoadKind, tag);
+ }
+ }
+}
+
+void ExprEngine::evalLocation(ExplodedNodeSet &Dst, const Stmt *S,
+ ExplodedNode* Pred,
+ const GRState* state, SVal location,
+ const void *tag, bool isLoad) {
+ // Early checks for performance reason.
+ if (location.isUnknown()) {
+ Dst.Add(Pred);
+ return;
+ }
+
+ ExplodedNodeSet Src;
+ if (Builder->GetState(Pred) == state) {
+ Src.Add(Pred);
+ } else {
+ // Associate this new state with an ExplodedNode.
+ // FIXME: If I pass null tag, the graph is incorrect, e.g for
+ // int *p;
+ // p = 0;
+ // *p = 0xDEADBEEF;
+ // "p = 0" is not noted as "Null pointer value stored to 'p'" but
+ // instead "int *p" is noted as
+ // "Variable 'p' initialized to a null pointer value"
+ ExplodedNode *N = Builder->generateNode(S, state, Pred, this);
+ Src.Add(N ? N : Pred);
+ }
+ getCheckerManager().runCheckersForLocation(Dst, Src, location, isLoad, S,
+ *this);
+}
+
+bool ExprEngine::InlineCall(ExplodedNodeSet &Dst, const CallExpr *CE,
+ ExplodedNode *Pred) {
+ return false;
+
+ // Inlining isn't correct right now because we:
+ // (a) don't generate CallExit nodes.
+ // (b) we need a way to postpone doing post-visits of CallExprs until
+ // the CallExit. This means we need CallExits for the non-inline
+ // cases as well.
+
+#if 0
+ const GRState *state = GetState(Pred);
+ const Expr *Callee = CE->getCallee();
+ SVal L = state->getSVal(Callee);
+
+ const FunctionDecl *FD = L.getAsFunctionDecl();
+ if (!FD)
+ return false;
+
+ // Specially handle CXXMethods.
+ const CXXMethodDecl *methodDecl = 0;
+
+ switch (CE->getStmtClass()) {
+ default: break;
+ case Stmt::CXXOperatorCallExprClass: {
+ const CXXOperatorCallExpr *opCall = cast<CXXOperatorCallExpr>(CE);
+ methodDecl =
+ llvm::dyn_cast_or_null<CXXMethodDecl>(opCall->getCalleeDecl());
+ break;
+ }
+ case Stmt::CXXMemberCallExprClass: {
+ const CXXMemberCallExpr *memberCall = cast<CXXMemberCallExpr>(CE);
+ const MemberExpr *memberExpr =
+ cast<MemberExpr>(memberCall->getCallee()->IgnoreParens());
+ methodDecl = cast<CXXMethodDecl>(memberExpr->getMemberDecl());
+ break;
+ }
+ }
+
+
+
+
+ // Check if the function definition is in the same translation unit.
+ if (FD->hasBody(FD)) {
+ const StackFrameContext *stackFrame =
+ AMgr.getStackFrame(AMgr.getAnalysisContext(FD),
+ Pred->getLocationContext(),
+ CE, Builder->getBlock(), Builder->getIndex());
+ // Now we have the definition of the callee, create a CallEnter node.
+ CallEnter Loc(CE, stackFrame, Pred->getLocationContext());
+
+ ExplodedNode *N = Builder->generateNode(Loc, state, Pred);
+ Dst.Add(N);
+ return true;
+ }
+
+ // Check if we can find the function definition in other translation units.
+ if (AMgr.hasIndexer()) {
+ AnalysisContext *C = AMgr.getAnalysisContextInAnotherTU(FD);
+ if (C == 0)
+ return false;
+ const StackFrameContext *stackFrame =
+ AMgr.getStackFrame(C, Pred->getLocationContext(),
+ CE, Builder->getBlock(), Builder->getIndex());
+ CallEnter Loc(CE, stackFrame, Pred->getLocationContext());
+ ExplodedNode *N = Builder->generateNode(Loc, state, Pred);
+ Dst.Add(N);
+ return true;
+ }
+
+ // Generate the CallExit node.
+
+ return false;
+#endif
+}
+
+void ExprEngine::VisitCallExpr(const CallExpr* CE, ExplodedNode* Pred,
+ ExplodedNodeSet& dst) {
+
+ // Determine the type of function we're calling (if available).
+ const FunctionProtoType *Proto = NULL;
+ QualType FnType = CE->getCallee()->IgnoreParens()->getType();
+ if (const PointerType *FnTypePtr = FnType->getAs<PointerType>())
+ Proto = FnTypePtr->getPointeeType()->getAs<FunctionProtoType>();
+
+ // Should the first argument be evaluated as an lvalue?
+ bool firstArgumentAsLvalue = false;
+ switch (CE->getStmtClass()) {
+ case Stmt::CXXOperatorCallExprClass:
+ firstArgumentAsLvalue = true;
+ break;
+ default:
+ break;
+ }
+
+ // Evaluate the arguments.
+ ExplodedNodeSet dstArgsEvaluated;
+ evalArguments(CE->arg_begin(), CE->arg_end(), Proto, Pred, dstArgsEvaluated,
+ firstArgumentAsLvalue);
+
+ // Evaluate the callee.
+ ExplodedNodeSet dstCalleeEvaluated;
+ evalCallee(CE, dstArgsEvaluated, dstCalleeEvaluated);
+
+ // Perform the previsit of the CallExpr.
+ ExplodedNodeSet dstPreVisit;
+ getCheckerManager().runCheckersForPreStmt(dstPreVisit, dstCalleeEvaluated,
+ CE, *this);
+
+ // Now evaluate the call itself.
+ class DefaultEval : public GraphExpander {
+ ExprEngine &Eng;
+ const CallExpr *CE;
+ public:
+
+ DefaultEval(ExprEngine &eng, const CallExpr *ce)
+ : Eng(eng), CE(ce) {}
+ virtual void expandGraph(ExplodedNodeSet &Dst, ExplodedNode *Pred) {
+ // Should we inline the call?
+ if (Eng.getAnalysisManager().shouldInlineCall() &&
+ Eng.InlineCall(Dst, CE, Pred)) {
+ return;
+ }
+
+ StmtNodeBuilder &Builder = Eng.getBuilder();
+ assert(&Builder && "StmtNodeBuilder must be defined.");
+
+ // Dispatch to the plug-in transfer function.
+ unsigned oldSize = Dst.size();
+ SaveOr OldHasGen(Builder.hasGeneratedNode);
+
+ // Dispatch to transfer function logic to handle the call itself.
+ const Expr* Callee = CE->getCallee()->IgnoreParens();
+ const GRState* state = Eng.GetState(Pred);
+ SVal L = state->getSVal(Callee);
+ Eng.getTF().evalCall(Dst, Eng, Builder, CE, L, Pred);
+
+ // Handle the case where no nodes where generated. Auto-generate that
+ // contains the updated state if we aren't generating sinks.
+ if (!Builder.BuildSinks && Dst.size() == oldSize &&
+ !Builder.hasGeneratedNode)
+ Eng.MakeNode(Dst, CE, Pred, state);
+ }
+ };
+
+ // Finally, evaluate the function call. We try each of the checkers
+ // to see if the can evaluate the function call.
+ ExplodedNodeSet dstCallEvaluated;
+ DefaultEval defEval(*this, CE);
+ getCheckerManager().runCheckersForEvalCall(dstCallEvaluated,
+ dstPreVisit,
+ CE, *this, &defEval);
+
+ // Finally, perform the post-condition check of the CallExpr and store
+ // the created nodes in 'Dst'.
+ getCheckerManager().runCheckersForPostStmt(dst, dstCallEvaluated, CE,
+ *this);
+}
+
+//===----------------------------------------------------------------------===//
+// Transfer function: Objective-C dot-syntax to access a property.
+//===----------------------------------------------------------------------===//
+
+void ExprEngine::VisitObjCPropertyRefExpr(const ObjCPropertyRefExpr *Ex,
+ ExplodedNode *Pred,
+ ExplodedNodeSet &Dst) {
+ ExplodedNodeSet dstBase;
+
+ // Visit the receiver (if any).
+ if (Ex->isObjectReceiver())
+ Visit(Ex->getBase(), Pred, dstBase);
+ else
+ dstBase = Pred;
+
+ ExplodedNodeSet dstPropRef;
+
+ // Using the base, compute the lvalue of the instance variable.
+ for (ExplodedNodeSet::iterator I = dstBase.begin(), E = dstBase.end();
+ I!=E; ++I) {
+ ExplodedNode *nodeBase = *I;
+ const GRState *state = GetState(nodeBase);
+ MakeNode(dstPropRef, Ex, *I, state->BindExpr(Ex, loc::ObjCPropRef(Ex)));
+ }
+
+ Dst.insert(dstPropRef);
+}
+
+//===----------------------------------------------------------------------===//
+// Transfer function: Objective-C ivar references.
+//===----------------------------------------------------------------------===//
+
+static std::pair<const void*,const void*> EagerlyAssumeTag
+ = std::pair<const void*,const void*>(&EagerlyAssumeTag,static_cast<void*>(0));
+
+void ExprEngine::evalEagerlyAssume(ExplodedNodeSet &Dst, ExplodedNodeSet &Src,
+ const Expr *Ex) {
+ for (ExplodedNodeSet::iterator I=Src.begin(), E=Src.end(); I!=E; ++I) {
+ ExplodedNode *Pred = *I;
+
+ // Test if the previous node was as the same expression. This can happen
+ // when the expression fails to evaluate to anything meaningful and
+ // (as an optimization) we don't generate a node.
+ ProgramPoint P = Pred->getLocation();
+ if (!isa<PostStmt>(P) || cast<PostStmt>(P).getStmt() != Ex) {
+ Dst.Add(Pred);
+ continue;
+ }
+
+ const GRState* state = GetState(Pred);
+ SVal V = state->getSVal(Ex);
+ if (nonloc::SymExprVal *SEV = dyn_cast<nonloc::SymExprVal>(&V)) {
+ // First assume that the condition is true.
+ if (const GRState *stateTrue = state->assume(*SEV, true)) {
+ stateTrue = stateTrue->BindExpr(Ex,
+ svalBuilder.makeIntVal(1U, Ex->getType()));
+ Dst.Add(Builder->generateNode(PostStmtCustom(Ex,
+ &EagerlyAssumeTag, Pred->getLocationContext()),
+ stateTrue, Pred));
+ }
+
+ // Next, assume that the condition is false.
+ if (const GRState *stateFalse = state->assume(*SEV, false)) {
+ stateFalse = stateFalse->BindExpr(Ex,
+ svalBuilder.makeIntVal(0U, Ex->getType()));
+ Dst.Add(Builder->generateNode(PostStmtCustom(Ex, &EagerlyAssumeTag,
+ Pred->getLocationContext()),
+ stateFalse, Pred));
+ }
+ }
+ else
+ Dst.Add(Pred);
+ }
+}
+
+//===----------------------------------------------------------------------===//
+// Transfer function: Objective-C @synchronized.
+//===----------------------------------------------------------------------===//
+
+void ExprEngine::VisitObjCAtSynchronizedStmt(const ObjCAtSynchronizedStmt *S,
+ ExplodedNode *Pred,
+ ExplodedNodeSet &Dst) {
+
+ // The mutex expression is a CFGElement, so we don't need to explicitly
+ // visit it since it will already be processed.
+
+ // Pre-visit the ObjCAtSynchronizedStmt.
+ ExplodedNodeSet Tmp;
+ Tmp.Add(Pred);
+ getCheckerManager().runCheckersForPreStmt(Dst, Tmp, S, *this);
+}
+
+//===----------------------------------------------------------------------===//
+// Transfer function: Objective-C ivar references.
+//===----------------------------------------------------------------------===//
+
+void ExprEngine::VisitLvalObjCIvarRefExpr(const ObjCIvarRefExpr* Ex,
+ ExplodedNode* Pred,
+ ExplodedNodeSet& Dst) {
+
+ // Visit the base expression, which is needed for computing the lvalue
+ // of the ivar.
+ ExplodedNodeSet dstBase;
+ const Expr *baseExpr = Ex->getBase();
+ Visit(baseExpr, Pred, dstBase);
+
+ ExplodedNodeSet dstIvar;
+
+ // Using the base, compute the lvalue of the instance variable.
+ for (ExplodedNodeSet::iterator I = dstBase.begin(), E = dstBase.end();
+ I!=E; ++I) {
+ ExplodedNode *nodeBase = *I;
+ const GRState *state = GetState(nodeBase);
+ SVal baseVal = state->getSVal(baseExpr);
+ SVal location = state->getLValue(Ex->getDecl(), baseVal);
+ MakeNode(dstIvar, Ex, *I, state->BindExpr(Ex, location));
+ }
+
+ // Perform the post-condition check of the ObjCIvarRefExpr and store
+ // the created nodes in 'Dst'.
+ getCheckerManager().runCheckersForPostStmt(Dst, dstIvar, Ex, *this);
+}
+
+//===----------------------------------------------------------------------===//
+// Transfer function: Objective-C fast enumeration 'for' statements.
+//===----------------------------------------------------------------------===//
+
+void ExprEngine::VisitObjCForCollectionStmt(const ObjCForCollectionStmt* S,
+ ExplodedNode* Pred, ExplodedNodeSet& Dst) {
+
+ // ObjCForCollectionStmts are processed in two places. This method
+ // handles the case where an ObjCForCollectionStmt* occurs as one of the
+ // statements within a basic block. This transfer function does two things:
+ //
+ // (1) binds the next container value to 'element'. This creates a new
+ // node in the ExplodedGraph.
+ //
+ // (2) binds the value 0/1 to the ObjCForCollectionStmt* itself, indicating
+ // whether or not the container has any more elements. This value
+ // will be tested in ProcessBranch. We need to explicitly bind
+ // this value because a container can contain nil elements.
+ //
+ // FIXME: Eventually this logic should actually do dispatches to
+ // 'countByEnumeratingWithState:objects:count:' (NSFastEnumeration).
+ // This will require simulating a temporary NSFastEnumerationState, either
+ // through an SVal or through the use of MemRegions. This value can
+ // be affixed to the ObjCForCollectionStmt* instead of 0/1; when the loop
+ // terminates we reclaim the temporary (it goes out of scope) and we
+ // we can test if the SVal is 0 or if the MemRegion is null (depending
+ // on what approach we take).
+ //
+ // For now: simulate (1) by assigning either a symbol or nil if the
+ // container is empty. Thus this transfer function will by default
+ // result in state splitting.
+
+ const Stmt* elem = S->getElement();
+ SVal ElementV;
+
+ if (const DeclStmt* DS = dyn_cast<DeclStmt>(elem)) {
+ const VarDecl* ElemD = cast<VarDecl>(DS->getSingleDecl());
+ assert (ElemD->getInit() == 0);
+ ElementV = GetState(Pred)->getLValue(ElemD, Pred->getLocationContext());
+ VisitObjCForCollectionStmtAux(S, Pred, Dst, ElementV);
+ return;
+ }
+
+ ExplodedNodeSet Tmp;
+ Visit(cast<Expr>(elem), Pred, Tmp);
+ for (ExplodedNodeSet::iterator I = Tmp.begin(), E = Tmp.end(); I!=E; ++I) {
+ const GRState* state = GetState(*I);
+ VisitObjCForCollectionStmtAux(S, *I, Dst, state->getSVal(elem));
+ }
+}
+
+void ExprEngine::VisitObjCForCollectionStmtAux(const ObjCForCollectionStmt* S,
+ ExplodedNode* Pred, ExplodedNodeSet& Dst,
+ SVal ElementV) {
+
+ // Check if the location we are writing back to is a null pointer.
+ const Stmt* elem = S->getElement();
+ ExplodedNodeSet Tmp;
+ evalLocation(Tmp, elem, Pred, GetState(Pred), ElementV, NULL, false);
+
+ if (Tmp.empty())
+ return;
+
+ for (ExplodedNodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI) {
+ Pred = *NI;
+ const GRState *state = GetState(Pred);
+
+ // Handle the case where the container still has elements.
+ SVal TrueV = svalBuilder.makeTruthVal(1);
+ const GRState *hasElems = state->BindExpr(S, TrueV);
+
+ // Handle the case where the container has no elements.
+ SVal FalseV = svalBuilder.makeTruthVal(0);
+ const GRState *noElems = state->BindExpr(S, FalseV);
+
+ if (loc::MemRegionVal* MV = dyn_cast<loc::MemRegionVal>(&ElementV))
+ if (const TypedRegion* R = dyn_cast<TypedRegion>(MV->getRegion())) {
+ // FIXME: The proper thing to do is to really iterate over the
+ // container. We will do this with dispatch logic to the store.
+ // For now, just 'conjure' up a symbolic value.
+ QualType T = R->getValueType();
+ assert(Loc::isLocType(T));
+ unsigned Count = Builder->getCurrentBlockCount();
+ SymbolRef Sym = SymMgr.getConjuredSymbol(elem, T, Count);
+ SVal V = svalBuilder.makeLoc(Sym);
+ hasElems = hasElems->bindLoc(ElementV, V);
+
+ // Bind the location to 'nil' on the false branch.
+ SVal nilV = svalBuilder.makeIntVal(0, T);
+ noElems = noElems->bindLoc(ElementV, nilV);
+ }
+
+ // Create the new nodes.
+ MakeNode(Dst, S, Pred, hasElems);
+ MakeNode(Dst, S, Pred, noElems);
+ }
+}
+
+//===----------------------------------------------------------------------===//
+// Transfer function: Objective-C message expressions.
+//===----------------------------------------------------------------------===//
+
+namespace {
+class ObjCMsgWLItem {
+public:
+ ObjCMessageExpr::const_arg_iterator I;
+ ExplodedNode *N;
+
+ ObjCMsgWLItem(const ObjCMessageExpr::const_arg_iterator &i, ExplodedNode *n)
+ : I(i), N(n) {}
+};
+} // end anonymous namespace
+
+void ExprEngine::VisitObjCMessageExpr(const ObjCMessageExpr* ME,
+ ExplodedNode* Pred,
+ ExplodedNodeSet& Dst){
+
+ // Create a worklist to process both the arguments.
+ llvm::SmallVector<ObjCMsgWLItem, 20> WL;
+
+ // But first evaluate the receiver (if any).
+ ObjCMessageExpr::const_arg_iterator AI = ME->arg_begin(), AE = ME->arg_end();
+ if (const Expr *Receiver = ME->getInstanceReceiver()) {
+ ExplodedNodeSet Tmp;
+ Visit(Receiver, Pred, Tmp);
+
+ if (Tmp.empty())
+ return;
+
+ for (ExplodedNodeSet::iterator I=Tmp.begin(), E=Tmp.end(); I!=E; ++I)
+ WL.push_back(ObjCMsgWLItem(AI, *I));
+ }
+ else
+ WL.push_back(ObjCMsgWLItem(AI, Pred));
+
+ // Evaluate the arguments.
+ ExplodedNodeSet ArgsEvaluated;
+ while (!WL.empty()) {
+ ObjCMsgWLItem Item = WL.back();
+ WL.pop_back();
+
+ if (Item.I == AE) {
+ ArgsEvaluated.insert(Item.N);
+ continue;
+ }
+
+ // Evaluate the subexpression.
+ ExplodedNodeSet Tmp;
+
+ // FIXME: [Objective-C++] handle arguments that are references
+ Visit(*Item.I, Item.N, Tmp);
+
+ // Enqueue evaluating the next argument on the worklist.
+ ++(Item.I);
+ for (ExplodedNodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI)
+ WL.push_back(ObjCMsgWLItem(Item.I, *NI));
+ }
+
+ // Now that the arguments are processed, handle the ObjC message.
+ VisitObjCMessage(ME, ArgsEvaluated, Dst);
+}
+
+void ExprEngine::VisitObjCMessage(const ObjCMessage &msg,
+ ExplodedNodeSet &Src, ExplodedNodeSet& Dst) {
+
+ // Handle the previsits checks.
+ ExplodedNodeSet DstPrevisit;
+ getCheckerManager().runCheckersForPreObjCMessage(DstPrevisit, Src, msg,*this);
+
+ // Proceed with evaluate the message expression.
+ ExplodedNodeSet dstEval;
+
+ for (ExplodedNodeSet::iterator DI = DstPrevisit.begin(),
+ DE = DstPrevisit.end(); DI != DE; ++DI) {
+
+ ExplodedNode *Pred = *DI;
+ bool RaisesException = false;
+ unsigned oldSize = dstEval.size();
+ SaveAndRestore<bool> OldSink(Builder->BuildSinks);
+ SaveOr OldHasGen(Builder->hasGeneratedNode);
+
+ if (const Expr *Receiver = msg.getInstanceReceiver()) {
+ const GRState *state = GetState(Pred);
+ SVal recVal = state->getSVal(Receiver);
+ if (!recVal.isUndef()) {
+ // Bifurcate the state into nil and non-nil ones.
+ DefinedOrUnknownSVal receiverVal = cast<DefinedOrUnknownSVal>(recVal);
+
+ const GRState *notNilState, *nilState;
+ llvm::tie(notNilState, nilState) = state->assume(receiverVal);
+
+ // There are three cases: can be nil or non-nil, must be nil, must be
+ // non-nil. We ignore must be nil, and merge the rest two into non-nil.
+ if (nilState && !notNilState) {
+ dstEval.insert(Pred);
+ continue;
+ }
+
+ // Check if the "raise" message was sent.
+ assert(notNilState);
+ if (msg.getSelector() == RaiseSel)
+ RaisesException = true;
+
+ // Check if we raise an exception. For now treat these as sinks.
+ // Eventually we will want to handle exceptions properly.
+ if (RaisesException)
+ Builder->BuildSinks = true;
+
+ // Dispatch to plug-in transfer function.
+ evalObjCMessage(dstEval, msg, Pred, notNilState);
+ }
+ }
+ else if (const ObjCInterfaceDecl *Iface = msg.getReceiverInterface()) {
+ IdentifierInfo* ClsName = Iface->getIdentifier();
+ Selector S = msg.getSelector();
+
+ // Check for special instance methods.
+ if (!NSExceptionII) {
+ ASTContext& Ctx = getContext();
+ NSExceptionII = &Ctx.Idents.get("NSException");
+ }
+
+ if (ClsName == NSExceptionII) {
+ enum { NUM_RAISE_SELECTORS = 2 };
+
+ // Lazily create a cache of the selectors.
+ if (!NSExceptionInstanceRaiseSelectors) {
+ ASTContext& Ctx = getContext();
+ NSExceptionInstanceRaiseSelectors =
+ new Selector[NUM_RAISE_SELECTORS];
+ llvm::SmallVector<IdentifierInfo*, NUM_RAISE_SELECTORS> II;
+ unsigned idx = 0;
+
+ // raise:format:
+ II.push_back(&Ctx.Idents.get("raise"));
+ II.push_back(&Ctx.Idents.get("format"));
+ NSExceptionInstanceRaiseSelectors[idx++] =
+ Ctx.Selectors.getSelector(II.size(), &II[0]);
+
+ // raise:format::arguments:
+ II.push_back(&Ctx.Idents.get("arguments"));
+ NSExceptionInstanceRaiseSelectors[idx++] =
+ Ctx.Selectors.getSelector(II.size(), &II[0]);
+ }
+
+ for (unsigned i = 0; i < NUM_RAISE_SELECTORS; ++i)
+ if (S == NSExceptionInstanceRaiseSelectors[i]) {
+ RaisesException = true;
+ break;
+ }
+ }
+
+ // Check if we raise an exception. For now treat these as sinks.
+ // Eventually we will want to handle exceptions properly.
+ if (RaisesException)
+ Builder->BuildSinks = true;
+
+ // Dispatch to plug-in transfer function.
+ evalObjCMessage(dstEval, msg, Pred, Builder->GetState(Pred));
+ }
+
+ // Handle the case where no nodes where generated. Auto-generate that
+ // contains the updated state if we aren't generating sinks.
+ if (!Builder->BuildSinks && dstEval.size() == oldSize &&
+ !Builder->hasGeneratedNode)
+ MakeNode(dstEval, msg.getOriginExpr(), Pred, GetState(Pred));
+ }
+
+ // Finally, perform the post-condition check of the ObjCMessageExpr and store
+ // the created nodes in 'Dst'.
+ getCheckerManager().runCheckersForPostObjCMessage(Dst, dstEval, msg, *this);
+}
+
+//===----------------------------------------------------------------------===//
+// Transfer functions: Miscellaneous statements.
+//===----------------------------------------------------------------------===//
+
+void ExprEngine::VisitCast(const CastExpr *CastE, const Expr *Ex,
+ ExplodedNode *Pred, ExplodedNodeSet &Dst) {
+
+ ExplodedNodeSet S1;
+ Visit(Ex, Pred, S1);
+ ExplodedNodeSet S2;
+ getCheckerManager().runCheckersForPreStmt(S2, S1, CastE, *this);
+
+ if (CastE->getCastKind() == CK_LValueToRValue ||
+ CastE->getCastKind() == CK_GetObjCProperty) {
+ for (ExplodedNodeSet::iterator I = S2.begin(), E = S2.end(); I!=E; ++I) {
+ ExplodedNode *subExprNode = *I;
+ const GRState *state = GetState(subExprNode);
+ evalLoad(Dst, CastE, subExprNode, state, state->getSVal(Ex));
+ }
+ return;
+ }
+
+ // All other casts.
+ QualType T = CastE->getType();
+ QualType ExTy = Ex->getType();
+
+ if (const ExplicitCastExpr *ExCast=dyn_cast_or_null<ExplicitCastExpr>(CastE))
+ T = ExCast->getTypeAsWritten();
+
+ for (ExplodedNodeSet::iterator I = S2.begin(), E = S2.end(); I != E; ++I) {
+ Pred = *I;
+
+ switch (CastE->getCastKind()) {
+ case CK_ToVoid:
+ Dst.Add(Pred);
+ continue;
+ case CK_LValueToRValue:
+ case CK_NoOp:
+ case CK_FunctionToPointerDecay: {
+ // Copy the SVal of Ex to CastE.
+ const GRState *state = GetState(Pred);
+ SVal V = state->getSVal(Ex);
+ state = state->BindExpr(CastE, V);
+ MakeNode(Dst, CastE, Pred, state);
+ continue;
+ }
+ case CK_GetObjCProperty:
+ case CK_Dependent:
+ case CK_ArrayToPointerDecay:
+ case CK_BitCast:
+ case CK_LValueBitCast:
+ case CK_IntegralCast:
+ case CK_NullToPointer:
+ case CK_IntegralToPointer:
+ case CK_PointerToIntegral:
+ case CK_PointerToBoolean:
+ case CK_IntegralToBoolean:
+ case CK_IntegralToFloating:
+ case CK_FloatingToIntegral:
+ case CK_FloatingToBoolean:
+ case CK_FloatingCast:
+ case CK_FloatingRealToComplex:
+ case CK_FloatingComplexToReal:
+ case CK_FloatingComplexToBoolean:
+ case CK_FloatingComplexCast:
+ case CK_FloatingComplexToIntegralComplex:
+ case CK_IntegralRealToComplex:
+ case CK_IntegralComplexToReal:
+ case CK_IntegralComplexToBoolean:
+ case CK_IntegralComplexCast:
+ case CK_IntegralComplexToFloatingComplex:
+ case CK_AnyPointerToObjCPointerCast:
+ case CK_AnyPointerToBlockPointerCast:
+ case CK_ObjCObjectLValueCast: {
+ // Delegate to SValBuilder to process.
+ const GRState* state = GetState(Pred);
+ SVal V = state->getSVal(Ex);
+ V = svalBuilder.evalCast(V, T, ExTy);
+ state = state->BindExpr(CastE, V);
+ MakeNode(Dst, CastE, Pred, state);
+ continue;
+ }
+ case CK_DerivedToBase:
+ case CK_UncheckedDerivedToBase: {
+ // For DerivedToBase cast, delegate to the store manager.
+ const GRState *state = GetState(Pred);
+ SVal val = state->getSVal(Ex);
+ val = getStoreManager().evalDerivedToBase(val, T);
+ state = state->BindExpr(CastE, val);
+ MakeNode(Dst, CastE, Pred, state);
+ continue;
+ }
+ // Various C++ casts that are not handled yet.
+ case CK_Dynamic:
+ case CK_ToUnion:
+ case CK_BaseToDerived:
+ case CK_NullToMemberPointer:
+ case CK_BaseToDerivedMemberPointer:
+ case CK_DerivedToBaseMemberPointer:
+ case CK_UserDefinedConversion:
+ case CK_ConstructorConversion:
+ case CK_VectorSplat:
+ case CK_MemberPointerToBoolean: {
+ // Recover some path-sensitivty by conjuring a new value.
+ QualType resultType = CastE->getType();
+ if (CastE->isLValue())
+ resultType = getContext().getPointerType(resultType);
+
+ SVal result =
+ svalBuilder.getConjuredSymbolVal(NULL, CastE, resultType,
+ Builder->getCurrentBlockCount());
+
+ const GRState *state = GetState(Pred)->BindExpr(CastE, result);
+ MakeNode(Dst, CastE, Pred, state);
+ continue;
+ }
+ }
+ }
+}
+
+void ExprEngine::VisitCompoundLiteralExpr(const CompoundLiteralExpr* CL,
+ ExplodedNode* Pred,
+ ExplodedNodeSet& Dst) {
+ const InitListExpr* ILE
+ = cast<InitListExpr>(CL->getInitializer()->IgnoreParens());
+ ExplodedNodeSet Tmp;
+ Visit(ILE, Pred, Tmp);
+
+ for (ExplodedNodeSet::iterator I = Tmp.begin(), EI = Tmp.end(); I!=EI; ++I) {
+ const GRState* state = GetState(*I);
+ SVal ILV = state->getSVal(ILE);
+ const LocationContext *LC = (*I)->getLocationContext();
+ state = state->bindCompoundLiteral(CL, LC, ILV);
+
+ if (CL->isLValue()) {
+ MakeNode(Dst, CL, *I, state->BindExpr(CL, state->getLValue(CL, LC)));
+ }
+ else
+ MakeNode(Dst, CL, *I, state->BindExpr(CL, ILV));
+ }
+}
+
+void ExprEngine::VisitDeclStmt(const DeclStmt *DS, ExplodedNode *Pred,
+ ExplodedNodeSet& Dst) {
+
+ // The CFG has one DeclStmt per Decl.
+ const Decl* D = *DS->decl_begin();
+
+ if (!D || !isa<VarDecl>(D))
+ return;
+
+ const VarDecl* VD = dyn_cast<VarDecl>(D);
+ const Expr* InitEx = VD->getInit();
+
+ // FIXME: static variables may have an initializer, but the second
+ // time a function is called those values may not be current.
+ ExplodedNodeSet Tmp;
+
+ if (InitEx) {
+ if (VD->getType()->isReferenceType() && !InitEx->isLValue()) {
+ // If the initializer is C++ record type, it should already has a
+ // temp object.
+ if (!InitEx->getType()->isRecordType())
+ CreateCXXTemporaryObject(InitEx, Pred, Tmp);
+ else
+ Tmp.Add(Pred);
+ } else
+ Visit(InitEx, Pred, Tmp);
+ } else
+ Tmp.Add(Pred);
+
+ ExplodedNodeSet Tmp2;
+ getCheckerManager().runCheckersForPreStmt(Tmp2, Tmp, DS, *this);
+
+ for (ExplodedNodeSet::iterator I=Tmp2.begin(), E=Tmp2.end(); I!=E; ++I) {
+ ExplodedNode *N = *I;
+ const GRState *state = GetState(N);
+
+ // Decls without InitExpr are not initialized explicitly.
+ const LocationContext *LC = N->getLocationContext();
+
+ if (InitEx) {
+ SVal InitVal = state->getSVal(InitEx);
+
+ // We bound the temp obj region to the CXXConstructExpr. Now recover
+ // the lazy compound value when the variable is not a reference.
+ if (AMgr.getLangOptions().CPlusPlus && VD->getType()->isRecordType() &&
+ !VD->getType()->isReferenceType() && isa<loc::MemRegionVal>(InitVal)){
+ InitVal = state->getSVal(cast<loc::MemRegionVal>(InitVal).getRegion());
+ assert(isa<nonloc::LazyCompoundVal>(InitVal));
+ }
+
+ // Recover some path-sensitivity if a scalar value evaluated to
+ // UnknownVal.
+ if ((InitVal.isUnknown() ||
+ !getConstraintManager().canReasonAbout(InitVal)) &&
+ !VD->getType()->isReferenceType()) {
+ InitVal = svalBuilder.getConjuredSymbolVal(NULL, InitEx,
+ Builder->getCurrentBlockCount());
+ }
+
+ evalBind(Dst, DS, *I, state,
+ loc::MemRegionVal(state->getRegion(VD, LC)), InitVal, true);
+ }
+ else {
+ state = state->bindDeclWithNoInit(state->getRegion(VD, LC));
+ MakeNode(Dst, DS, *I, state);
+ }
+ }
+}
+
+namespace {
+ // This class is used by VisitInitListExpr as an item in a worklist
+ // for processing the values contained in an InitListExpr.
+class InitListWLItem {
+public:
+ llvm::ImmutableList<SVal> Vals;
+ ExplodedNode* N;
+ InitListExpr::const_reverse_iterator Itr;
+
+ InitListWLItem(ExplodedNode* n, llvm::ImmutableList<SVal> vals,
+ InitListExpr::const_reverse_iterator itr)
+ : Vals(vals), N(n), Itr(itr) {}
+};
+}
+
+
+void ExprEngine::VisitInitListExpr(const InitListExpr* E, ExplodedNode* Pred,
+ ExplodedNodeSet& Dst) {
+
+ const GRState* state = GetState(Pred);
+ QualType T = getContext().getCanonicalType(E->getType());
+ unsigned NumInitElements = E->getNumInits();
+
+ if (T->isArrayType() || T->isRecordType() || T->isVectorType()) {
+ llvm::ImmutableList<SVal> StartVals = getBasicVals().getEmptySValList();
+
+ // Handle base case where the initializer has no elements.
+ // e.g: static int* myArray[] = {};
+ if (NumInitElements == 0) {
+ SVal V = svalBuilder.makeCompoundVal(T, StartVals);
+ MakeNode(Dst, E, Pred, state->BindExpr(E, V));
+ return;
+ }
+
+ // Create a worklist to process the initializers.
+ llvm::SmallVector<InitListWLItem, 10> WorkList;
+ WorkList.reserve(NumInitElements);
+ WorkList.push_back(InitListWLItem(Pred, StartVals, E->rbegin()));
+ InitListExpr::const_reverse_iterator ItrEnd = E->rend();
+ assert(!(E->rbegin() == E->rend()));
+
+ // Process the worklist until it is empty.
+ while (!WorkList.empty()) {
+ InitListWLItem X = WorkList.back();
+ WorkList.pop_back();
+
+ ExplodedNodeSet Tmp;
+ Visit(*X.Itr, X.N, Tmp);
+
+ InitListExpr::const_reverse_iterator NewItr = X.Itr + 1;
+
+ for (ExplodedNodeSet::iterator NI=Tmp.begin(),NE=Tmp.end();NI!=NE;++NI) {
+ // Get the last initializer value.
+ state = GetState(*NI);
+ SVal InitV = state->getSVal(cast<Expr>(*X.Itr));
+
+ // Construct the new list of values by prepending the new value to
+ // the already constructed list.
+ llvm::ImmutableList<SVal> NewVals =
+ getBasicVals().consVals(InitV, X.Vals);
+
+ if (NewItr == ItrEnd) {
+ // Now we have a list holding all init values. Make CompoundValData.
+ SVal V = svalBuilder.makeCompoundVal(T, NewVals);
+
+ // Make final state and node.
+ MakeNode(Dst, E, *NI, state->BindExpr(E, V));
+ }
+ else {
+ // Still some initializer values to go. Push them onto the worklist.
+ WorkList.push_back(InitListWLItem(*NI, NewVals, NewItr));
+ }
+ }
+ }
+
+ return;
+ }
+
+ if (Loc::isLocType(T) || T->isIntegerType()) {
+ assert (E->getNumInits() == 1);
+ ExplodedNodeSet Tmp;
+ const Expr* Init = E->getInit(0);
+ Visit(Init, Pred, Tmp);
+ for (ExplodedNodeSet::iterator I=Tmp.begin(), EI=Tmp.end(); I != EI; ++I) {
+ state = GetState(*I);
+ MakeNode(Dst, E, *I, state->BindExpr(E, state->getSVal(Init)));
+ }
+ return;
+ }
+
+ assert(0 && "unprocessed InitListExpr type");
+}
+
+/// VisitUnaryExprOrTypeTraitExpr - Transfer function for sizeof(type).
+void ExprEngine::VisitUnaryExprOrTypeTraitExpr(
+ const UnaryExprOrTypeTraitExpr* Ex,
+ ExplodedNode* Pred,
+ ExplodedNodeSet& Dst) {
+ QualType T = Ex->getTypeOfArgument();
+
+ if (Ex->getKind() == UETT_SizeOf) {
+ if (!T->isIncompleteType() && !T->isConstantSizeType()) {
+ assert(T->isVariableArrayType() && "Unknown non-constant-sized type.");
+
+ // FIXME: Add support for VLA type arguments, not just VLA expressions.
+ // When that happens, we should probably refactor VLASizeChecker's code.
+ if (Ex->isArgumentType()) {
+ Dst.Add(Pred);
+ return;
+ }
+
+ // Get the size by getting the extent of the sub-expression.
+ // First, visit the sub-expression to find its region.
+ const Expr *Arg = Ex->getArgumentExpr();
+ ExplodedNodeSet Tmp;
+ Visit(Arg, Pred, Tmp);
+
+ for (ExplodedNodeSet::iterator I=Tmp.begin(), E=Tmp.end(); I!=E; ++I) {
+ const GRState* state = GetState(*I);
+ const MemRegion *MR = state->getSVal(Arg).getAsRegion();
+
+ // If the subexpression can't be resolved to a region, we don't know
+ // anything about its size. Just leave the state as is and continue.
+ if (!MR) {
+ Dst.Add(*I);
+ continue;
+ }
+
+ // The result is the extent of the VLA.
+ SVal Extent = cast<SubRegion>(MR)->getExtent(svalBuilder);
+ MakeNode(Dst, Ex, *I, state->BindExpr(Ex, Extent));
+ }
+
+ return;
+ }
+ else if (T->getAs<ObjCObjectType>()) {
+ // Some code tries to take the sizeof an ObjCObjectType, relying that
+ // the compiler has laid out its representation. Just report Unknown
+ // for these.
+ Dst.Add(Pred);
+ return;
+ }
+ }
+
+ Expr::EvalResult Result;
+ Ex->Evaluate(Result, getContext());
+ CharUnits amt = CharUnits::fromQuantity(Result.Val.getInt().getZExtValue());
+
+ MakeNode(Dst, Ex, Pred,
+ GetState(Pred)->BindExpr(Ex,
+ svalBuilder.makeIntVal(amt.getQuantity(), Ex->getType())));
+}
+
+void ExprEngine::VisitOffsetOfExpr(const OffsetOfExpr* OOE,
+ ExplodedNode* Pred, ExplodedNodeSet& Dst) {
+ Expr::EvalResult Res;
+ if (OOE->Evaluate(Res, getContext()) && Res.Val.isInt()) {
+ const APSInt &IV = Res.Val.getInt();
+ assert(IV.getBitWidth() == getContext().getTypeSize(OOE->getType()));
+ assert(OOE->getType()->isIntegerType());
+ assert(IV.isSigned() == OOE->getType()->isSignedIntegerType());
+ SVal X = svalBuilder.makeIntVal(IV);
+ MakeNode(Dst, OOE, Pred, GetState(Pred)->BindExpr(OOE, X));
+ return;
+ }
+ // FIXME: Handle the case where __builtin_offsetof is not a constant.
+ Dst.Add(Pred);
+}
+
+void ExprEngine::VisitUnaryOperator(const UnaryOperator* U,
+ ExplodedNode* Pred,
+ ExplodedNodeSet& Dst) {
+
+ switch (U->getOpcode()) {
+
+ default:
+ break;
+
+ case UO_Real: {
+ const Expr* Ex = U->getSubExpr()->IgnoreParens();
+ ExplodedNodeSet Tmp;
+ Visit(Ex, Pred, Tmp);
+
+ for (ExplodedNodeSet::iterator I=Tmp.begin(), E=Tmp.end(); I!=E; ++I) {
+
+ // FIXME: We don't have complex SValues yet.
+ if (Ex->getType()->isAnyComplexType()) {
+ // Just report "Unknown."
+ Dst.Add(*I);
+ continue;
+ }
+
+ // For all other types, UO_Real is an identity operation.
+ assert (U->getType() == Ex->getType());
+ const GRState* state = GetState(*I);
+ MakeNode(Dst, U, *I, state->BindExpr(U, state->getSVal(Ex)));
+ }
+
+ return;
+ }
+
+ case UO_Imag: {
+
+ const Expr* Ex = U->getSubExpr()->IgnoreParens();
+ ExplodedNodeSet Tmp;
+ Visit(Ex, Pred, Tmp);
+
+ for (ExplodedNodeSet::iterator I=Tmp.begin(), E=Tmp.end(); I!=E; ++I) {
+ // FIXME: We don't have complex SValues yet.
+ if (Ex->getType()->isAnyComplexType()) {
+ // Just report "Unknown."
+ Dst.Add(*I);
+ continue;
+ }
+
+ // For all other types, UO_Imag returns 0.
+ const GRState* state = GetState(*I);
+ SVal X = svalBuilder.makeZeroVal(Ex->getType());
+ MakeNode(Dst, U, *I, state->BindExpr(U, X));
+ }
+
+ return;
+ }
+
+ case UO_Plus:
+ assert(!U->isLValue());
+ // FALL-THROUGH.
+ case UO_Deref:
+ case UO_AddrOf:
+ case UO_Extension: {
+
+ // Unary "+" is a no-op, similar to a parentheses. We still have places
+ // where it may be a block-level expression, so we need to
+ // generate an extra node that just propagates the value of the
+ // subexpression.
+
+ const Expr* Ex = U->getSubExpr()->IgnoreParens();
+ ExplodedNodeSet Tmp;
+ Visit(Ex, Pred, Tmp);
+
+ for (ExplodedNodeSet::iterator I=Tmp.begin(), E=Tmp.end(); I!=E; ++I) {
+ const GRState* state = GetState(*I);
+ MakeNode(Dst, U, *I, state->BindExpr(U, state->getSVal(Ex)));
+ }
+
+ return;
+ }
+
+ case UO_LNot:
+ case UO_Minus:
+ case UO_Not: {
+ assert (!U->isLValue());
+ const Expr* Ex = U->getSubExpr()->IgnoreParens();
+ ExplodedNodeSet Tmp;
+ Visit(Ex, Pred, Tmp);
+
+ for (ExplodedNodeSet::iterator I=Tmp.begin(), E=Tmp.end(); I!=E; ++I) {
+ const GRState* state = GetState(*I);
+
+ // Get the value of the subexpression.
+ SVal V = state->getSVal(Ex);
+
+ if (V.isUnknownOrUndef()) {
+ MakeNode(Dst, U, *I, state->BindExpr(U, V));
+ continue;
+ }
+
+// QualType DstT = getContext().getCanonicalType(U->getType());
+// QualType SrcT = getContext().getCanonicalType(Ex->getType());
+//
+// if (DstT != SrcT) // Perform promotions.
+// V = evalCast(V, DstT);
+//
+// if (V.isUnknownOrUndef()) {
+// MakeNode(Dst, U, *I, BindExpr(St, U, V));
+// continue;
+// }
+
+ switch (U->getOpcode()) {
+ default:
+ assert(false && "Invalid Opcode.");
+ break;
+
+ case UO_Not:
+ // FIXME: Do we need to handle promotions?
+ state = state->BindExpr(U, evalComplement(cast<NonLoc>(V)));
+ break;
+
+ case UO_Minus:
+ // FIXME: Do we need to handle promotions?
+ state = state->BindExpr(U, evalMinus(cast<NonLoc>(V)));
+ break;
+
+ case UO_LNot:
+
+ // C99 6.5.3.3: "The expression !E is equivalent to (0==E)."
+ //
+ // Note: technically we do "E == 0", but this is the same in the
+ // transfer functions as "0 == E".
+ SVal Result;
+
+ if (isa<Loc>(V)) {
+ Loc X = svalBuilder.makeNull();
+ Result = evalBinOp(state, BO_EQ, cast<Loc>(V), X,
+ U->getType());
+ }
+ else {
+ nonloc::ConcreteInt X(getBasicVals().getValue(0, Ex->getType()));
+ Result = evalBinOp(state, BO_EQ, cast<NonLoc>(V), X,
+ U->getType());
+ }
+
+ state = state->BindExpr(U, Result);
+
+ break;
+ }
+
+ MakeNode(Dst, U, *I, state);
+ }
+
+ return;
+ }
+ }
+
+ // Handle ++ and -- (both pre- and post-increment).
+ assert (U->isIncrementDecrementOp());
+ ExplodedNodeSet Tmp;
+ const Expr* Ex = U->getSubExpr()->IgnoreParens();
+ Visit(Ex, Pred, Tmp);
+
+ for (ExplodedNodeSet::iterator I = Tmp.begin(), E = Tmp.end(); I!=E; ++I) {
+
+ const GRState* state = GetState(*I);
+ SVal loc = state->getSVal(Ex);
+
+ // Perform a load.
+ ExplodedNodeSet Tmp2;
+ evalLoad(Tmp2, Ex, *I, state, loc);
+
+ for (ExplodedNodeSet::iterator I2=Tmp2.begin(), E2=Tmp2.end();I2!=E2;++I2) {
+
+ state = GetState(*I2);
+ SVal V2_untested = state->getSVal(Ex);
+
+ // Propagate unknown and undefined values.
+ if (V2_untested.isUnknownOrUndef()) {
+ MakeNode(Dst, U, *I2, state->BindExpr(U, V2_untested));
+ continue;
+ }
+ DefinedSVal V2 = cast<DefinedSVal>(V2_untested);
+
+ // Handle all other values.
+ BinaryOperator::Opcode Op = U->isIncrementOp() ? BO_Add
+ : BO_Sub;
+
+ // If the UnaryOperator has non-location type, use its type to create the
+ // constant value. If the UnaryOperator has location type, create the
+ // constant with int type and pointer width.
+ SVal RHS;
+
+ if (U->getType()->isAnyPointerType())
+ RHS = svalBuilder.makeArrayIndex(1);
+ else
+ RHS = svalBuilder.makeIntVal(1, U->getType());
+
+ SVal Result = evalBinOp(state, Op, V2, RHS, U->getType());
+
+ // Conjure a new symbol if necessary to recover precision.
+ if (Result.isUnknown() || !getConstraintManager().canReasonAbout(Result)){
+ DefinedOrUnknownSVal SymVal =
+ svalBuilder.getConjuredSymbolVal(NULL, Ex,
+ Builder->getCurrentBlockCount());
+ Result = SymVal;
+
+ // If the value is a location, ++/-- should always preserve
+ // non-nullness. Check if the original value was non-null, and if so
+ // propagate that constraint.
+ if (Loc::isLocType(U->getType())) {
+ DefinedOrUnknownSVal Constraint =
+ svalBuilder.evalEQ(state, V2,svalBuilder.makeZeroVal(U->getType()));
+
+ if (!state->assume(Constraint, true)) {
+ // It isn't feasible for the original value to be null.
+ // Propagate this constraint.
+ Constraint = svalBuilder.evalEQ(state, SymVal,
+ svalBuilder.makeZeroVal(U->getType()));
+
+
+ state = state->assume(Constraint, false);
+ assert(state);
+ }
+ }
+ }
+
+ // Since the lvalue-to-rvalue conversion is explicit in the AST,
+ // we bind an l-value if the operator is prefix and an lvalue (in C++).
+ if (U->isLValue())
+ state = state->BindExpr(U, loc);
+ else
+ state = state->BindExpr(U, V2);
+
+ // Perform the store.
+ evalStore(Dst, NULL, U, *I2, state, loc, Result);
+ }
+ }
+}
+
+void ExprEngine::VisitAsmStmt(const AsmStmt* A, ExplodedNode* Pred,
+ ExplodedNodeSet& Dst) {
+ VisitAsmStmtHelperOutputs(A, A->begin_outputs(), A->end_outputs(), Pred, Dst);
+}
+
+void ExprEngine::VisitAsmStmtHelperOutputs(const AsmStmt* A,
+ AsmStmt::const_outputs_iterator I,
+ AsmStmt::const_outputs_iterator E,
+ ExplodedNode* Pred, ExplodedNodeSet& Dst) {
+ if (I == E) {
+ VisitAsmStmtHelperInputs(A, A->begin_inputs(), A->end_inputs(), Pred, Dst);
+ return;
+ }
+
+ ExplodedNodeSet Tmp;
+ Visit(*I, Pred, Tmp);
+ ++I;
+
+ for (ExplodedNodeSet::iterator NI = Tmp.begin(), NE = Tmp.end();NI != NE;++NI)
+ VisitAsmStmtHelperOutputs(A, I, E, *NI, Dst);
+}
+
+void ExprEngine::VisitAsmStmtHelperInputs(const AsmStmt* A,
+ AsmStmt::const_inputs_iterator I,
+ AsmStmt::const_inputs_iterator E,
+ ExplodedNode* Pred,
+ ExplodedNodeSet& Dst) {
+ if (I == E) {
+
+ // We have processed both the inputs and the outputs. All of the outputs
+ // should evaluate to Locs. Nuke all of their values.
+
+ // FIXME: Some day in the future it would be nice to allow a "plug-in"
+ // which interprets the inline asm and stores proper results in the
+ // outputs.
+
+ const GRState* state = GetState(Pred);
+
+ for (AsmStmt::const_outputs_iterator OI = A->begin_outputs(),
+ OE = A->end_outputs(); OI != OE; ++OI) {
+
+ SVal X = state->getSVal(*OI);
+ assert (!isa<NonLoc>(X)); // Should be an Lval, or unknown, undef.
+
+ if (isa<Loc>(X))
+ state = state->bindLoc(cast<Loc>(X), UnknownVal());
+ }
+
+ MakeNode(Dst, A, Pred, state);
+ return;
+ }
+
+ ExplodedNodeSet Tmp;
+ Visit(*I, Pred, Tmp);
+
+ ++I;
+
+ for (ExplodedNodeSet::iterator NI = Tmp.begin(), NE = Tmp.end(); NI!=NE; ++NI)
+ VisitAsmStmtHelperInputs(A, I, E, *NI, Dst);
+}
+
+void ExprEngine::VisitReturnStmt(const ReturnStmt *RS, ExplodedNode *Pred,
+ ExplodedNodeSet &Dst) {
+ ExplodedNodeSet Src;
+ if (const Expr *RetE = RS->getRetValue()) {
+ // Record the returned expression in the state. It will be used in
+ // processCallExit to bind the return value to the call expr.
+ {
+ static int tag = 0;
+ const GRState *state = GetState(Pred);
+ state = state->set<ReturnExpr>(RetE);
+ Pred = Builder->generateNode(RetE, state, Pred, &tag);
+ }
+ // We may get a NULL Pred because we generated a cached node.
+ if (Pred)
+ Visit(RetE, Pred, Src);
+ }
+ else {
+ Src.Add(Pred);
+ }
+
+ ExplodedNodeSet CheckedSet;
+ getCheckerManager().runCheckersForPreStmt(CheckedSet, Src, RS, *this);
+
+ for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end();
+ I != E; ++I) {
+
+ assert(Builder && "StmtNodeBuilder must be defined.");
+
+ Pred = *I;
+ unsigned size = Dst.size();
+
+ SaveAndRestore<bool> OldSink(Builder->BuildSinks);
+ SaveOr OldHasGen(Builder->hasGeneratedNode);
+
+ getTF().evalReturn(Dst, *this, *Builder, RS, Pred);
+
+ // Handle the case where no nodes where generated.
+ if (!Builder->BuildSinks && Dst.size() == size &&
+ !Builder->hasGeneratedNode)
+ MakeNode(Dst, RS, Pred, GetState(Pred));
+ }
+}
+
+//===----------------------------------------------------------------------===//
+// Transfer functions: Binary operators.
+//===----------------------------------------------------------------------===//
+
+void ExprEngine::VisitBinaryOperator(const BinaryOperator* B,
+ ExplodedNode* Pred,
+ ExplodedNodeSet& Dst) {
+ ExplodedNodeSet Tmp1;
+ Expr* LHS = B->getLHS()->IgnoreParens();
+ Expr* RHS = B->getRHS()->IgnoreParens();
+
+ Visit(LHS, Pred, Tmp1);
+ ExplodedNodeSet Tmp3;
+
+ for (ExplodedNodeSet::iterator I1=Tmp1.begin(), E1=Tmp1.end(); I1!=E1; ++I1) {
+ SVal LeftV = GetState(*I1)->getSVal(LHS);
+ ExplodedNodeSet Tmp2;
+ Visit(RHS, *I1, Tmp2);
+
+ ExplodedNodeSet CheckedSet;
+ getCheckerManager().runCheckersForPreStmt(CheckedSet, Tmp2, B, *this);
+
+ // With both the LHS and RHS evaluated, process the operation itself.
+
+ for (ExplodedNodeSet::iterator I2=CheckedSet.begin(), E2=CheckedSet.end();
+ I2 != E2; ++I2) {
+
+ const GRState *state = GetState(*I2);
+ SVal RightV = state->getSVal(RHS);
+
+ BinaryOperator::Opcode Op = B->getOpcode();
+
+ if (Op == BO_Assign) {
+ // EXPERIMENTAL: "Conjured" symbols.
+ // FIXME: Handle structs.
+ if (RightV.isUnknown() ||!getConstraintManager().canReasonAbout(RightV))
+ {
+ unsigned Count = Builder->getCurrentBlockCount();
+ RightV = svalBuilder.getConjuredSymbolVal(NULL, B->getRHS(), Count);
+ }
+
+ SVal ExprVal = B->isLValue() ? LeftV : RightV;
+
+ // Simulate the effects of a "store": bind the value of the RHS
+ // to the L-Value represented by the LHS.
+ evalStore(Tmp3, B, LHS, *I2, state->BindExpr(B, ExprVal), LeftV,RightV);
+ continue;
+ }
+
+ if (!B->isAssignmentOp()) {
+ // Process non-assignments except commas or short-circuited
+ // logical expressions (LAnd and LOr).
+ SVal Result = evalBinOp(state, Op, LeftV, RightV, B->getType());
+
+ if (Result.isUnknown()) {
+ MakeNode(Tmp3, B, *I2, state);
+ continue;
+ }
+
+ state = state->BindExpr(B, Result);
+
+ MakeNode(Tmp3, B, *I2, state);
+ continue;
+ }
+
+ assert (B->isCompoundAssignmentOp());
+
+ switch (Op) {
+ default:
+ assert(0 && "Invalid opcode for compound assignment.");
+ case BO_MulAssign: Op = BO_Mul; break;
+ case BO_DivAssign: Op = BO_Div; break;
+ case BO_RemAssign: Op = BO_Rem; break;
+ case BO_AddAssign: Op = BO_Add; break;
+ case BO_SubAssign: Op = BO_Sub; break;
+ case BO_ShlAssign: Op = BO_Shl; break;
+ case BO_ShrAssign: Op = BO_Shr; break;
+ case BO_AndAssign: Op = BO_And; break;
+ case BO_XorAssign: Op = BO_Xor; break;
+ case BO_OrAssign: Op = BO_Or; break;
+ }
+
+ // Perform a load (the LHS). This performs the checks for
+ // null dereferences, and so on.
+ ExplodedNodeSet Tmp4;
+ SVal location = state->getSVal(LHS);
+ evalLoad(Tmp4, LHS, *I2, state, location);
+
+ for (ExplodedNodeSet::iterator I4=Tmp4.begin(), E4=Tmp4.end(); I4!=E4;
+ ++I4) {
+ state = GetState(*I4);
+ SVal V = state->getSVal(LHS);
+
+ // Get the computation type.
+ QualType CTy =
+ cast<CompoundAssignOperator>(B)->getComputationResultType();
+ CTy = getContext().getCanonicalType(CTy);
+
+ QualType CLHSTy =
+ cast<CompoundAssignOperator>(B)->getComputationLHSType();
+ CLHSTy = getContext().getCanonicalType(CLHSTy);
+
+ QualType LTy = getContext().getCanonicalType(LHS->getType());
+
+ // Promote LHS.
+ V = svalBuilder.evalCast(V, CLHSTy, LTy);
+
+ // Compute the result of the operation.
+ SVal Result = svalBuilder.evalCast(evalBinOp(state, Op, V, RightV, CTy),
+ B->getType(), CTy);
+
+ // EXPERIMENTAL: "Conjured" symbols.
+ // FIXME: Handle structs.
+
+ SVal LHSVal;
+
+ if (Result.isUnknown() ||
+ !getConstraintManager().canReasonAbout(Result)) {
+
+ unsigned Count = Builder->getCurrentBlockCount();
+
+ // The symbolic value is actually for the type of the left-hand side
+ // expression, not the computation type, as this is the value the
+ // LValue on the LHS will bind to.
+ LHSVal = svalBuilder.getConjuredSymbolVal(NULL, B->getRHS(), LTy, Count);
+
+ // However, we need to convert the symbol to the computation type.
+ Result = svalBuilder.evalCast(LHSVal, CTy, LTy);
+ }
+ else {
+ // The left-hand side may bind to a different value then the
+ // computation type.
+ LHSVal = svalBuilder.evalCast(Result, LTy, CTy);
+ }
+
+ // In C++, assignment and compound assignment operators return an
+ // lvalue.
+ if (B->isLValue())
+ state = state->BindExpr(B, location);
+ else
+ state = state->BindExpr(B, Result);
+
+ evalStore(Tmp3, B, LHS, *I4, state, location, LHSVal);
+ }
+ }
+ }
+
+ getCheckerManager().runCheckersForPostStmt(Dst, Tmp3, B, *this);
+}
+
+//===----------------------------------------------------------------------===//
+// Visualization.
+//===----------------------------------------------------------------------===//
+
+#ifndef NDEBUG
+static ExprEngine* GraphPrintCheckerState;
+static SourceManager* GraphPrintSourceManager;
+
+namespace llvm {
+template<>
+struct DOTGraphTraits<ExplodedNode*> :
+ public DefaultDOTGraphTraits {
+
+ DOTGraphTraits (bool isSimple=false) : DefaultDOTGraphTraits(isSimple) {}
+
+ // FIXME: Since we do not cache error nodes in ExprEngine now, this does not
+ // work.
+ static std::string getNodeAttributes(const ExplodedNode* N, void*) {
+
+#if 0
+ // FIXME: Replace with a general scheme to tell if the node is
+ // an error node.
+ if (GraphPrintCheckerState->isImplicitNullDeref(N) ||
+ GraphPrintCheckerState->isExplicitNullDeref(N) ||
+ GraphPrintCheckerState->isUndefDeref(N) ||
+ GraphPrintCheckerState->isUndefStore(N) ||
+ GraphPrintCheckerState->isUndefControlFlow(N) ||
+ GraphPrintCheckerState->isUndefResult(N) ||
+ GraphPrintCheckerState->isBadCall(N) ||
+ GraphPrintCheckerState->isUndefArg(N))
+ return "color=\"red\",style=\"filled\"";
+
+ if (GraphPrintCheckerState->isNoReturnCall(N))
+ return "color=\"blue\",style=\"filled\"";
+#endif
+ return "";
+ }
+
+ static std::string getNodeLabel(const ExplodedNode* N, void*){
+
+ std::string sbuf;
+ llvm::raw_string_ostream Out(sbuf);
+
+ // Program Location.
+ ProgramPoint Loc = N->getLocation();
+
+ switch (Loc.getKind()) {
+ case ProgramPoint::BlockEntranceKind:
+ Out << "Block Entrance: B"
+ << cast<BlockEntrance>(Loc).getBlock()->getBlockID();
+ break;
+
+ case ProgramPoint::BlockExitKind:
+ assert (false);
+ break;
+
+ case ProgramPoint::CallEnterKind:
+ Out << "CallEnter";
+ break;
+
+ case ProgramPoint::CallExitKind:
+ Out << "CallExit";
+ break;
+
+ default: {
+ if (StmtPoint *L = dyn_cast<StmtPoint>(&Loc)) {
+ const Stmt* S = L->getStmt();
+ SourceLocation SLoc = S->getLocStart();
+
+ Out << S->getStmtClassName() << ' ' << (void*) S << ' ';
+ LangOptions LO; // FIXME.
+ S->printPretty(Out, 0, PrintingPolicy(LO));
+
+ if (SLoc.isFileID()) {
+ Out << "\\lline="
+ << GraphPrintSourceManager->getInstantiationLineNumber(SLoc)
+ << " col="
+ << GraphPrintSourceManager->getInstantiationColumnNumber(SLoc)
+ << "\\l";
+ }
+
+ if (isa<PreStmt>(Loc))
+ Out << "\\lPreStmt\\l;";
+ else if (isa<PostLoad>(Loc))
+ Out << "\\lPostLoad\\l;";
+ else if (isa<PostStore>(Loc))
+ Out << "\\lPostStore\\l";
+ else if (isa<PostLValue>(Loc))
+ Out << "\\lPostLValue\\l";
+
+#if 0
+ // FIXME: Replace with a general scheme to determine
+ // the name of the check.
+ if (GraphPrintCheckerState->isImplicitNullDeref(N))
+ Out << "\\|Implicit-Null Dereference.\\l";
+ else if (GraphPrintCheckerState->isExplicitNullDeref(N))
+ Out << "\\|Explicit-Null Dereference.\\l";
+ else if (GraphPrintCheckerState->isUndefDeref(N))
+ Out << "\\|Dereference of undefialied value.\\l";
+ else if (GraphPrintCheckerState->isUndefStore(N))
+ Out << "\\|Store to Undefined Loc.";
+ else if (GraphPrintCheckerState->isUndefResult(N))
+ Out << "\\|Result of operation is undefined.";
+ else if (GraphPrintCheckerState->isNoReturnCall(N))
+ Out << "\\|Call to function marked \"noreturn\".";
+ else if (GraphPrintCheckerState->isBadCall(N))
+ Out << "\\|Call to NULL/Undefined.";
+ else if (GraphPrintCheckerState->isUndefArg(N))
+ Out << "\\|Argument in call is undefined";
+#endif
+
+ break;
+ }
+
+ const BlockEdge& E = cast<BlockEdge>(Loc);
+ Out << "Edge: (B" << E.getSrc()->getBlockID() << ", B"
+ << E.getDst()->getBlockID() << ')';
+
+ if (const Stmt* T = E.getSrc()->getTerminator()) {
+
+ SourceLocation SLoc = T->getLocStart();
+
+ Out << "\\|Terminator: ";
+ LangOptions LO; // FIXME.
+ E.getSrc()->printTerminator(Out, LO);
+
+ if (SLoc.isFileID()) {
+ Out << "\\lline="
+ << GraphPrintSourceManager->getInstantiationLineNumber(SLoc)
+ << " col="
+ << GraphPrintSourceManager->getInstantiationColumnNumber(SLoc);
+ }
+
+ if (isa<SwitchStmt>(T)) {
+ const Stmt* Label = E.getDst()->getLabel();
+
+ if (Label) {
+ if (const CaseStmt* C = dyn_cast<CaseStmt>(Label)) {
+ Out << "\\lcase ";
+ LangOptions LO; // FIXME.
+ C->getLHS()->printPretty(Out, 0, PrintingPolicy(LO));
+
+ if (const Stmt* RHS = C->getRHS()) {
+ Out << " .. ";
+ RHS->printPretty(Out, 0, PrintingPolicy(LO));
+ }
+
+ Out << ":";
+ }
+ else {
+ assert (isa<DefaultStmt>(Label));
+ Out << "\\ldefault:";
+ }
+ }
+ else
+ Out << "\\l(implicit) default:";
+ }
+ else if (isa<IndirectGotoStmt>(T)) {
+ // FIXME
+ }
+ else {
+ Out << "\\lCondition: ";
+ if (*E.getSrc()->succ_begin() == E.getDst())
+ Out << "true";
+ else
+ Out << "false";
+ }
+
+ Out << "\\l";
+ }
+
+#if 0
+ // FIXME: Replace with a general scheme to determine
+ // the name of the check.
+ if (GraphPrintCheckerState->isUndefControlFlow(N)) {
+ Out << "\\|Control-flow based on\\lUndefined value.\\l";
+ }
+#endif
+ }
+ }
+
+ const GRState *state = N->getState();
+ Out << "\\|StateID: " << (void*) state
+ << " NodeID: " << (void*) N << "\\|";
+ state->printDOT(Out, *N->getLocationContext()->getCFG());
+ Out << "\\l";
+ return Out.str();
+ }
+};
+} // end llvm namespace
+#endif
+
+#ifndef NDEBUG
+template <typename ITERATOR>
+ExplodedNode* GetGraphNode(ITERATOR I) { return *I; }
+
+template <> ExplodedNode*
+GetGraphNode<llvm::DenseMap<ExplodedNode*, Expr*>::iterator>
+ (llvm::DenseMap<ExplodedNode*, Expr*>::iterator I) {
+ return I->first;
+}
+#endif
+
+void ExprEngine::ViewGraph(bool trim) {
+#ifndef NDEBUG
+ if (trim) {
+ std::vector<ExplodedNode*> Src;
+
+ // Flush any outstanding reports to make sure we cover all the nodes.
+ // This does not cause them to get displayed.
+ for (BugReporter::iterator I=BR.begin(), E=BR.end(); I!=E; ++I)
+ const_cast<BugType*>(*I)->FlushReports(BR);
+
+ // Iterate through the reports and get their nodes.
+ for (BugReporter::EQClasses_iterator
+ EI = BR.EQClasses_begin(), EE = BR.EQClasses_end(); EI != EE; ++EI) {
+ BugReportEquivClass& EQ = *EI;
+ const BugReport &R = **EQ.begin();
+ ExplodedNode *N = const_cast<ExplodedNode*>(R.getErrorNode());
+ if (N) Src.push_back(N);
+ }
+
+ ViewGraph(&Src[0], &Src[0]+Src.size());
+ }
+ else {
+ GraphPrintCheckerState = this;
+ GraphPrintSourceManager = &getContext().getSourceManager();
+
+ llvm::ViewGraph(*G.roots_begin(), "ExprEngine");
+
+ GraphPrintCheckerState = NULL;
+ GraphPrintSourceManager = NULL;
+ }
+#endif
+}
+
+void ExprEngine::ViewGraph(ExplodedNode** Beg, ExplodedNode** End) {
+#ifndef NDEBUG
+ GraphPrintCheckerState = this;
+ GraphPrintSourceManager = &getContext().getSourceManager();
+
+ std::auto_ptr<ExplodedGraph> TrimmedG(G.Trim(Beg, End).first);
+
+ if (!TrimmedG.get())
+ llvm::errs() << "warning: Trimmed ExplodedGraph is empty.\n";
+ else
+ llvm::ViewGraph(*TrimmedG->roots_begin(), "TrimmedExprEngine");
+
+ GraphPrintCheckerState = NULL;
+ GraphPrintSourceManager = NULL;
+#endif
+}
diff --git a/lib/StaticAnalyzer/Core/FlatStore.cpp b/lib/StaticAnalyzer/Core/FlatStore.cpp
index 99a5ead..7bdca6b 100644
--- a/lib/StaticAnalyzer/Core/FlatStore.cpp
+++ b/lib/StaticAnalyzer/Core/FlatStore.cpp
@@ -90,6 +90,19 @@ StoreManager *ento::CreateFlatStoreManager(GRStateManager &StMgr) {
}
SVal FlatStoreManager::Retrieve(Store store, Loc L, QualType T) {
+ // For access to concrete addresses, return UnknownVal. Checks
+ // for null dereferences (and similar errors) are done by checkers, not
+ // the Store.
+ // FIXME: We can consider lazily symbolicating such memory, but we really
+ // should defer this when we can reason easily about symbolicating arrays
+ // of bytes.
+ if (isa<loc::ConcreteInt>(L)) {
+ return UnknownVal();
+ }
+ if (!isa<loc::MemRegionVal>(L)) {
+ return UnknownVal();
+ }
+
const MemRegion *R = cast<loc::MemRegionVal>(L).getRegion();
RegionInterval RI = RegionToInterval(R);
// FIXME: FlatStore should handle regions with unknown intervals.
diff --git a/lib/StaticAnalyzer/Core/ObjCMessage.cpp b/lib/StaticAnalyzer/Core/ObjCMessage.cpp
index 2e370d6..c005819 100644
--- a/lib/StaticAnalyzer/Core/ObjCMessage.cpp
+++ b/lib/StaticAnalyzer/Core/ObjCMessage.cpp
@@ -37,6 +37,35 @@ Selector ObjCMessage::getSelector() const {
return propE->getGetterSelector();
}
+ObjCMethodFamily ObjCMessage::getMethodFamily() const {
+ assert(isValid() && "This ObjCMessage is uninitialized!");
+ // Case 1. Explicit message send.
+ if (const ObjCMessageExpr *msgE = dyn_cast<ObjCMessageExpr>(MsgOrPropE))
+ return msgE->getMethodFamily();
+
+ const ObjCPropertyRefExpr *propE = cast<ObjCPropertyRefExpr>(MsgOrPropE);
+
+ // Case 2. Reference to implicit property.
+ if (propE->isImplicitProperty()) {
+ if (isPropertySetter())
+ return propE->getImplicitPropertySetter()->getMethodFamily();
+ else
+ return propE->getImplicitPropertyGetter()->getMethodFamily();
+ }
+
+ // Case 3. Reference to explicit property.
+ const ObjCPropertyDecl *prop = propE->getExplicitProperty();
+ if (isPropertySetter()) {
+ if (prop->getSetterMethodDecl())
+ return prop->getSetterMethodDecl()->getMethodFamily();
+ return prop->getSetterName().getMethodFamily();
+ } else {
+ if (prop->getGetterMethodDecl())
+ return prop->getGetterMethodDecl()->getMethodFamily();
+ return prop->getGetterName().getMethodFamily();
+ }
+}
+
const ObjCMethodDecl *ObjCMessage::getMethodDecl() const {
assert(isValid() && "This ObjCMessage is uninitialized!");
if (const ObjCMessageExpr *msgE = dyn_cast<ObjCMessageExpr>(MsgOrPropE))
@@ -80,13 +109,27 @@ const Expr *ObjCMessage::getArgExpr(unsigned i) const {
}
QualType CallOrObjCMessage::getResultType(ASTContext &ctx) const {
+ QualType resultTy;
+ bool isLVal = false;
+
if (CallE) {
+ isLVal = CallE->isLValue();
const Expr *Callee = CallE->getCallee();
if (const FunctionDecl *FD = State->getSVal(Callee).getAsFunctionDecl())
- return FD->getResultType();
- return CallE->getType();
+ resultTy = FD->getResultType();
+ else
+ resultTy = CallE->getType();
+ }
+ else {
+ isLVal = isa<ObjCMessageExpr>(Msg.getOriginExpr()) &&
+ Msg.getOriginExpr()->isLValue();
+ resultTy = Msg.getResultType(ctx);
}
- return Msg.getResultType(ctx);
+
+ if (isLVal)
+ resultTy = ctx.getPointerType(resultTy);
+
+ return resultTy;
}
SVal CallOrObjCMessage::getArgSValAsScalarOrLoc(unsigned i) const {
@@ -97,3 +140,10 @@ SVal CallOrObjCMessage::getArgSValAsScalarOrLoc(unsigned i) const {
return Msg.getArgSVal(i, State);
return UnknownVal();
}
+
+SVal CallOrObjCMessage::getCXXCallee() const {
+ assert(isCXXCall());
+ const Expr *callee =
+ cast<CXXMemberCallExpr>(CallE)->getImplicitObjectArgument();
+ return State->getSVal(callee);
+}
diff --git a/lib/StaticAnalyzer/Core/RegionStore.cpp b/lib/StaticAnalyzer/Core/RegionStore.cpp
index 19e0e12..4522f97 100644
--- a/lib/StaticAnalyzer/Core/RegionStore.cpp
+++ b/lib/StaticAnalyzer/Core/RegionStore.cpp
@@ -337,6 +337,9 @@ public: // Part of public interface to class.
SVal RetrieveFieldOrElementCommon(Store store, const TypedRegion *R,
QualType Ty, const MemRegion *superR);
+
+ SVal RetrieveLazyBinding(const MemRegion *lazyBindingRegion,
+ Store lazyBindingStore);
/// Retrieve the values in a struct and return a CompoundVal, used when doing
/// struct copy:
@@ -355,7 +358,8 @@ public: // Part of public interface to class.
/// Get the state and region whose binding this region R corresponds to.
std::pair<Store, const MemRegion*>
- GetLazyBinding(RegionBindings B, const MemRegion *R);
+ GetLazyBinding(RegionBindings B, const MemRegion *R,
+ const MemRegion *originalRegion);
StoreRef CopyLazyBindings(nonloc::LazyCompoundVal V, Store store,
const TypedRegion *R);
@@ -684,11 +688,11 @@ void invalidateRegionsWorker::VisitBaseRegion(const MemRegion *baseR) {
QualType T = TR->getValueType();
// Invalidate the binding.
- if (T->isStructureType()) {
+ if (T->isStructureOrClassType()) {
// Invalidate the region by setting its default value to
// conjured symbol. The type of the symbol is irrelavant.
- DefinedOrUnknownSVal V = svalBuilder.getConjuredSymbolVal(baseR, Ex, Ctx.IntTy,
- Count);
+ DefinedOrUnknownSVal V =
+ svalBuilder.getConjuredSymbolVal(baseR, Ex, Ctx.IntTy, Count);
B = RM.addBinding(B, baseR, BindingKey::Default, V);
return;
}
@@ -976,15 +980,20 @@ SVal RegionStoreManager::Retrieve(Store store, Loc L, QualType T) {
}
std::pair<Store, const MemRegion *>
-RegionStoreManager::GetLazyBinding(RegionBindings B, const MemRegion *R) {
- if (Optional<SVal> OV = getDirectBinding(B, R))
- if (const nonloc::LazyCompoundVal *V =
- dyn_cast<nonloc::LazyCompoundVal>(OV.getPointer()))
- return std::make_pair(V->getStore(), V->getRegion());
-
+RegionStoreManager::GetLazyBinding(RegionBindings B, const MemRegion *R,
+ const MemRegion *originalRegion) {
+
+ if (originalRegion != R) {
+ if (Optional<SVal> OV = getDefaultBinding(B, R)) {
+ if (const nonloc::LazyCompoundVal *V =
+ dyn_cast<nonloc::LazyCompoundVal>(OV.getPointer()))
+ return std::make_pair(V->getStore(), V->getRegion());
+ }
+ }
+
if (const ElementRegion *ER = dyn_cast<ElementRegion>(R)) {
const std::pair<Store, const MemRegion *> &X =
- GetLazyBinding(B, ER->getSuperRegion());
+ GetLazyBinding(B, ER->getSuperRegion(), originalRegion);
if (X.second)
return std::make_pair(X.first,
@@ -992,7 +1001,7 @@ RegionStoreManager::GetLazyBinding(RegionBindings B, const MemRegion *R) {
}
else if (const FieldRegion *FR = dyn_cast<FieldRegion>(R)) {
const std::pair<Store, const MemRegion *> &X =
- GetLazyBinding(B, FR->getSuperRegion());
+ GetLazyBinding(B, FR->getSuperRegion(), originalRegion);
if (X.second)
return std::make_pair(X.first,
@@ -1003,12 +1012,13 @@ RegionStoreManager::GetLazyBinding(RegionBindings B, const MemRegion *R) {
else if (const CXXBaseObjectRegion *baseReg =
dyn_cast<CXXBaseObjectRegion>(R)) {
const std::pair<Store, const MemRegion *> &X =
- GetLazyBinding(B, baseReg->getSuperRegion());
+ GetLazyBinding(B, baseReg->getSuperRegion(), originalRegion);
if (X.second)
return std::make_pair(X.first,
MRMgr.getCXXBaseObjectRegionWithSuper(baseReg, X.second));
}
+
// The NULL MemRegion indicates an non-existent lazy binding. A NULL Store is
// possible for a valid lazy binding.
return std::make_pair((Store) 0, (const MemRegion *) 0);
@@ -1098,14 +1108,19 @@ RegionStoreManager::RetrieveDerivedDefaultValue(RegionBindings B,
QualType Ty) {
if (const Optional<SVal> &D = getDefaultBinding(B, superR)) {
- if (SymbolRef parentSym = D->getAsSymbol())
+ const SVal &val = D.getValue();
+ if (SymbolRef parentSym = val.getAsSymbol())
return svalBuilder.getDerivedRegionValueSymbolVal(parentSym, R);
- if (D->isZeroConstant())
+ if (val.isZeroConstant())
return svalBuilder.makeZeroVal(Ty);
- if (D->isUnknownOrUndef())
- return *D;
+ if (val.isUnknownOrUndef())
+ return val;
+
+ // Lazy bindings are handled later.
+ if (isa<nonloc::LazyCompoundVal>(val))
+ return Optional<SVal>();
assert(0 && "Unknown default value");
}
@@ -1113,6 +1128,15 @@ RegionStoreManager::RetrieveDerivedDefaultValue(RegionBindings B,
return Optional<SVal>();
}
+SVal RegionStoreManager::RetrieveLazyBinding(const MemRegion *lazyBindingRegion,
+ Store lazyBindingStore) {
+ if (const ElementRegion *ER = dyn_cast<ElementRegion>(lazyBindingRegion))
+ return RetrieveElement(lazyBindingStore, ER);
+
+ return RetrieveField(lazyBindingStore,
+ cast<FieldRegion>(lazyBindingRegion));
+}
+
SVal RegionStoreManager::RetrieveFieldOrElementCommon(Store store,
const TypedRegion *R,
QualType Ty,
@@ -1140,14 +1164,10 @@ SVal RegionStoreManager::RetrieveFieldOrElementCommon(Store store,
// Lazy binding?
Store lazyBindingStore = NULL;
const MemRegion *lazyBindingRegion = NULL;
- llvm::tie(lazyBindingStore, lazyBindingRegion) = GetLazyBinding(B, R);
+ llvm::tie(lazyBindingStore, lazyBindingRegion) = GetLazyBinding(B, R, R);
- if (lazyBindingRegion) {
- if (const ElementRegion *ER = dyn_cast<ElementRegion>(lazyBindingRegion))
- return RetrieveElement(lazyBindingStore, ER);
- return RetrieveField(lazyBindingStore,
- cast<FieldRegion>(lazyBindingRegion));
- }
+ if (lazyBindingRegion)
+ return RetrieveLazyBinding(lazyBindingRegion, lazyBindingStore);
if (R->hasStackNonParametersStorage()) {
if (const ElementRegion *ER = dyn_cast<ElementRegion>(R)) {
@@ -1250,12 +1270,12 @@ SVal RegionStoreManager::RetrieveLazySymbol(const TypedRegion *R) {
SVal RegionStoreManager::RetrieveStruct(Store store, const TypedRegion* R) {
QualType T = R->getValueType();
assert(T->isStructureOrClassType());
- return svalBuilder.makeLazyCompoundVal(store, R);
+ return svalBuilder.makeLazyCompoundVal(StoreRef(store, *this), R);
}
SVal RegionStoreManager::RetrieveArray(Store store, const TypedRegion * R) {
assert(Ctx.getAsConstantArrayType(R->getValueType()));
- return svalBuilder.makeLazyCompoundVal(store, R);
+ return svalBuilder.makeLazyCompoundVal(StoreRef(store, *this), R);
}
//===----------------------------------------------------------------------===//
@@ -1378,7 +1398,8 @@ StoreRef RegionStoreManager::BindArray(Store store, const TypedRegion* R,
// Treat the string as a lazy compound value.
nonloc::LazyCompoundVal LCV =
- cast<nonloc::LazyCompoundVal>(svalBuilder.makeLazyCompoundVal(store, S));
+ cast<nonloc::LazyCompoundVal>(svalBuilder.
+ makeLazyCompoundVal(StoreRef(store, *this), S));
return CopyLazyBindings(LCV, store, R);
}
@@ -1529,7 +1550,7 @@ StoreRef RegionStoreManager::CopyLazyBindings(nonloc::LazyCompoundVal V,
// Now copy the bindings. This amounts to just binding 'V' to 'R'. This
// results in a zero-copy algorithm.
- return StoreRef(addBinding(B, R, BindingKey::Direct,
+ return StoreRef(addBinding(B, R, BindingKey::Default,
V).getRootWithoutRetain(), *this);
}
diff --git a/lib/StaticAnalyzer/Core/SValBuilder.cpp b/lib/StaticAnalyzer/Core/SValBuilder.cpp
index b0fd497..71f2b4a 100644
--- a/lib/StaticAnalyzer/Core/SValBuilder.cpp
+++ b/lib/StaticAnalyzer/Core/SValBuilder.cpp
@@ -25,12 +25,12 @@ using namespace ento;
// Basic SVal creation.
//===----------------------------------------------------------------------===//
-DefinedOrUnknownSVal SValBuilder::makeZeroVal(QualType T) {
- if (Loc::isLocType(T))
+DefinedOrUnknownSVal SValBuilder::makeZeroVal(QualType type) {
+ if (Loc::isLocType(type))
return makeNull();
- if (T->isIntegerType())
- return makeIntVal(0, T);
+ if (type->isIntegerType())
+ return makeIntVal(0, type);
// FIXME: Handle floats.
// FIXME: Handle structs.
@@ -39,44 +39,44 @@ DefinedOrUnknownSVal SValBuilder::makeZeroVal(QualType T) {
NonLoc SValBuilder::makeNonLoc(const SymExpr *lhs, BinaryOperator::Opcode op,
- const llvm::APSInt& v, QualType T) {
+ const llvm::APSInt& rhs, QualType type) {
// The Environment ensures we always get a persistent APSInt in
// BasicValueFactory, so we don't need to get the APSInt from
// BasicValueFactory again.
- assert(!Loc::isLocType(T));
- return nonloc::SymExprVal(SymMgr.getSymIntExpr(lhs, op, v, T));
+ assert(!Loc::isLocType(type));
+ return nonloc::SymExprVal(SymMgr.getSymIntExpr(lhs, op, rhs, type));
}
NonLoc SValBuilder::makeNonLoc(const SymExpr *lhs, BinaryOperator::Opcode op,
- const SymExpr *rhs, QualType T) {
+ const SymExpr *rhs, QualType type) {
assert(SymMgr.getType(lhs) == SymMgr.getType(rhs));
- assert(!Loc::isLocType(T));
- return nonloc::SymExprVal(SymMgr.getSymSymExpr(lhs, op, rhs, T));
+ assert(!Loc::isLocType(type));
+ return nonloc::SymExprVal(SymMgr.getSymSymExpr(lhs, op, rhs, type));
}
-SVal SValBuilder::convertToArrayIndex(SVal V) {
- if (V.isUnknownOrUndef())
- return V;
+SVal SValBuilder::convertToArrayIndex(SVal val) {
+ if (val.isUnknownOrUndef())
+ return val;
// Common case: we have an appropriately sized integer.
- if (nonloc::ConcreteInt* CI = dyn_cast<nonloc::ConcreteInt>(&V)) {
+ if (nonloc::ConcreteInt* CI = dyn_cast<nonloc::ConcreteInt>(&val)) {
const llvm::APSInt& I = CI->getValue();
if (I.getBitWidth() == ArrayIndexWidth && I.isSigned())
- return V;
+ return val;
}
- return evalCastNL(cast<NonLoc>(V), ArrayIndexTy);
+ return evalCastFromNonLoc(cast<NonLoc>(val), ArrayIndexTy);
}
DefinedOrUnknownSVal
-SValBuilder::getRegionValueSymbolVal(const TypedRegion* R) {
- QualType T = R->getValueType();
+SValBuilder::getRegionValueSymbolVal(const TypedRegion* region) {
+ QualType T = region->getValueType();
if (!SymbolManager::canSymbolicate(T))
return UnknownVal();
- SymbolRef sym = SymMgr.getRegionValueSymbol(R);
+ SymbolRef sym = SymMgr.getRegionValueSymbol(region);
if (Loc::isLocType(T))
return loc::MemRegionVal(MemMgr.getSymbolicRegion(sym));
@@ -84,15 +84,15 @@ SValBuilder::getRegionValueSymbolVal(const TypedRegion* R) {
return nonloc::SymbolVal(sym);
}
-DefinedOrUnknownSVal SValBuilder::getConjuredSymbolVal(const void *SymbolTag,
- const Expr *E,
- unsigned Count) {
- QualType T = E->getType();
+DefinedOrUnknownSVal SValBuilder::getConjuredSymbolVal(const void *symbolTag,
+ const Expr *expr,
+ unsigned count) {
+ QualType T = expr->getType();
if (!SymbolManager::canSymbolicate(T))
return UnknownVal();
- SymbolRef sym = SymMgr.getConjuredSymbol(E, Count, SymbolTag);
+ SymbolRef sym = SymMgr.getConjuredSymbol(expr, count, symbolTag);
if (Loc::isLocType(T))
return loc::MemRegionVal(MemMgr.getSymbolicRegion(sym));
@@ -100,31 +100,32 @@ DefinedOrUnknownSVal SValBuilder::getConjuredSymbolVal(const void *SymbolTag,
return nonloc::SymbolVal(sym);
}
-DefinedOrUnknownSVal SValBuilder::getConjuredSymbolVal(const void *SymbolTag,
- const Expr *E,
- QualType T,
- unsigned Count) {
+DefinedOrUnknownSVal SValBuilder::getConjuredSymbolVal(const void *symbolTag,
+ const Expr *expr,
+ QualType type,
+ unsigned count) {
- if (!SymbolManager::canSymbolicate(T))
+ if (!SymbolManager::canSymbolicate(type))
return UnknownVal();
- SymbolRef sym = SymMgr.getConjuredSymbol(E, T, Count, SymbolTag);
+ SymbolRef sym = SymMgr.getConjuredSymbol(expr, type, count, symbolTag);
- if (Loc::isLocType(T))
+ if (Loc::isLocType(type))
return loc::MemRegionVal(MemMgr.getSymbolicRegion(sym));
return nonloc::SymbolVal(sym);
}
-DefinedSVal SValBuilder::getMetadataSymbolVal(const void *SymbolTag,
- const MemRegion *MR,
- const Expr *E, QualType T,
- unsigned Count) {
- assert(SymbolManager::canSymbolicate(T) && "Invalid metadata symbol type");
+DefinedSVal SValBuilder::getMetadataSymbolVal(const void *symbolTag,
+ const MemRegion *region,
+ const Expr *expr, QualType type,
+ unsigned count) {
+ assert(SymbolManager::canSymbolicate(type) && "Invalid metadata symbol type");
- SymbolRef sym = SymMgr.getMetadataSymbol(MR, E, T, Count, SymbolTag);
+ SymbolRef sym =
+ SymMgr.getMetadataSymbol(region, expr, type, count, symbolTag);
- if (Loc::isLocType(T))
+ if (Loc::isLocType(type))
return loc::MemRegionVal(MemMgr.getSymbolicRegion(sym));
return nonloc::SymbolVal(sym);
@@ -132,13 +133,13 @@ DefinedSVal SValBuilder::getMetadataSymbolVal(const void *SymbolTag,
DefinedOrUnknownSVal
SValBuilder::getDerivedRegionValueSymbolVal(SymbolRef parentSymbol,
- const TypedRegion *R) {
- QualType T = R->getValueType();
+ const TypedRegion *region) {
+ QualType T = region->getValueType();
if (!SymbolManager::canSymbolicate(T))
return UnknownVal();
- SymbolRef sym = SymMgr.getDerivedSymbol(parentSymbol, R);
+ SymbolRef sym = SymMgr.getDerivedSymbol(parentSymbol, region);
if (Loc::isLocType(T))
return loc::MemRegionVal(MemMgr.getSymbolicRegion(sym));
@@ -146,53 +147,53 @@ SValBuilder::getDerivedRegionValueSymbolVal(SymbolRef parentSymbol,
return nonloc::SymbolVal(sym);
}
-DefinedSVal SValBuilder::getFunctionPointer(const FunctionDecl* FD) {
- return loc::MemRegionVal(MemMgr.getFunctionTextRegion(FD));
+DefinedSVal SValBuilder::getFunctionPointer(const FunctionDecl* func) {
+ return loc::MemRegionVal(MemMgr.getFunctionTextRegion(func));
}
-DefinedSVal SValBuilder::getBlockPointer(const BlockDecl *D,
- CanQualType locTy,
- const LocationContext *LC) {
+DefinedSVal SValBuilder::getBlockPointer(const BlockDecl *block,
+ CanQualType locTy,
+ const LocationContext *locContext) {
const BlockTextRegion *BC =
- MemMgr.getBlockTextRegion(D, locTy, LC->getAnalysisContext());
- const BlockDataRegion *BD = MemMgr.getBlockDataRegion(BC, LC);
+ MemMgr.getBlockTextRegion(block, locTy, locContext->getAnalysisContext());
+ const BlockDataRegion *BD = MemMgr.getBlockDataRegion(BC, locContext);
return loc::MemRegionVal(BD);
}
//===----------------------------------------------------------------------===//
-SVal SValBuilder::evalBinOp(const GRState *ST, BinaryOperator::Opcode Op,
- SVal L, SVal R, QualType T) {
+SVal SValBuilder::evalBinOp(const GRState *state, BinaryOperator::Opcode op,
+ SVal lhs, SVal rhs, QualType type) {
- if (L.isUndef() || R.isUndef())
+ if (lhs.isUndef() || rhs.isUndef())
return UndefinedVal();
- if (L.isUnknown() || R.isUnknown())
+ if (lhs.isUnknown() || rhs.isUnknown())
return UnknownVal();
- if (isa<Loc>(L)) {
- if (isa<Loc>(R))
- return evalBinOpLL(ST, Op, cast<Loc>(L), cast<Loc>(R), T);
+ if (isa<Loc>(lhs)) {
+ if (isa<Loc>(rhs))
+ return evalBinOpLL(state, op, cast<Loc>(lhs), cast<Loc>(rhs), type);
- return evalBinOpLN(ST, Op, cast<Loc>(L), cast<NonLoc>(R), T);
+ return evalBinOpLN(state, op, cast<Loc>(lhs), cast<NonLoc>(rhs), type);
}
- if (isa<Loc>(R)) {
+ if (isa<Loc>(rhs)) {
// Support pointer arithmetic where the addend is on the left
// and the pointer on the right.
- assert(Op == BO_Add);
+ assert(op == BO_Add);
// Commute the operands.
- return evalBinOpLN(ST, Op, cast<Loc>(R), cast<NonLoc>(L), T);
+ return evalBinOpLN(state, op, cast<Loc>(rhs), cast<NonLoc>(lhs), type);
}
- return evalBinOpNN(ST, Op, cast<NonLoc>(L), cast<NonLoc>(R), T);
+ return evalBinOpNN(state, op, cast<NonLoc>(lhs), cast<NonLoc>(rhs), type);
}
-DefinedOrUnknownSVal SValBuilder::evalEQ(const GRState *ST,
- DefinedOrUnknownSVal L,
- DefinedOrUnknownSVal R) {
- return cast<DefinedOrUnknownSVal>(evalBinOp(ST, BO_EQ, L, R,
+DefinedOrUnknownSVal SValBuilder::evalEQ(const GRState *state,
+ DefinedOrUnknownSVal lhs,
+ DefinedOrUnknownSVal rhs) {
+ return cast<DefinedOrUnknownSVal>(evalBinOp(state, BO_EQ, lhs, rhs,
Context.IntTy));
}
@@ -213,11 +214,11 @@ SVal SValBuilder::evalCast(SVal val, QualType castTy, QualType originalTy) {
// Check for casts from integers to integers.
if (castTy->isIntegerType() && originalTy->isIntegerType())
- return evalCastNL(cast<NonLoc>(val), castTy);
+ return evalCastFromNonLoc(cast<NonLoc>(val), castTy);
// Check for casts from pointers to integers.
if (castTy->isIntegerType() && Loc::isLocType(originalTy))
- return evalCastL(cast<Loc>(val), castTy);
+ return evalCastFromLoc(cast<Loc>(val), castTy);
// Check for casts from integers to pointers.
if (Loc::isLocType(castTy) && originalTy->isIntegerType()) {
@@ -256,7 +257,7 @@ SVal SValBuilder::evalCast(SVal val, QualType castTy, QualType originalTy) {
// need the original decayed type.
// QualType elemTy = cast<ArrayType>(originalTy)->getElementType();
// QualType pointerTy = C.getPointerType(elemTy);
- return evalCastL(cast<Loc>(val), castTy);
+ return evalCastFromLoc(cast<Loc>(val), castTy);
}
// Check for casts from a region to a specific type.
@@ -305,6 +306,6 @@ SVal SValBuilder::evalCast(SVal val, QualType castTy, QualType originalTy) {
DispatchCast:
// All other cases.
- return isa<Loc>(val) ? evalCastL(cast<Loc>(val), castTy)
- : evalCastNL(cast<NonLoc>(val), castTy);
+ return isa<Loc>(val) ? evalCastFromLoc(cast<Loc>(val), castTy)
+ : evalCastFromNonLoc(cast<NonLoc>(val), castTy);
}
diff --git a/lib/StaticAnalyzer/Core/SimpleConstraintManager.cpp b/lib/StaticAnalyzer/Core/SimpleConstraintManager.cpp
index e0b61ab..1ee694e 100644
--- a/lib/StaticAnalyzer/Core/SimpleConstraintManager.cpp
+++ b/lib/StaticAnalyzer/Core/SimpleConstraintManager.cpp
@@ -15,7 +15,6 @@
#include "SimpleConstraintManager.h"
#include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
#include "clang/StaticAnalyzer/Core/PathSensitive/GRState.h"
-#include "clang/StaticAnalyzer/Core/PathSensitive/Checker.h"
namespace clang {
diff --git a/lib/StaticAnalyzer/Core/SimpleSValBuilder.cpp b/lib/StaticAnalyzer/Core/SimpleSValBuilder.cpp
index 9a46bd6..5d80251 100644
--- a/lib/StaticAnalyzer/Core/SimpleSValBuilder.cpp
+++ b/lib/StaticAnalyzer/Core/SimpleSValBuilder.cpp
@@ -20,8 +20,8 @@ using namespace ento;
namespace {
class SimpleSValBuilder : public SValBuilder {
protected:
- virtual SVal evalCastNL(NonLoc val, QualType castTy);
- virtual SVal evalCastL(Loc val, QualType castTy);
+ virtual SVal evalCastFromNonLoc(NonLoc val, QualType castTy);
+ virtual SVal evalCastFromLoc(Loc val, QualType castTy);
public:
SimpleSValBuilder(llvm::BumpPtrAllocator &alloc, ASTContext &context,
@@ -57,7 +57,7 @@ SValBuilder *ento::createSimpleSValBuilder(llvm::BumpPtrAllocator &alloc,
// Transfer function for Casts.
//===----------------------------------------------------------------------===//
-SVal SimpleSValBuilder::evalCastNL(NonLoc val, QualType castTy) {
+SVal SimpleSValBuilder::evalCastFromNonLoc(NonLoc val, QualType castTy) {
bool isLocType = Loc::isLocType(castTy);
@@ -106,7 +106,7 @@ SVal SimpleSValBuilder::evalCastNL(NonLoc val, QualType castTy) {
return makeIntVal(i);
}
-SVal SimpleSValBuilder::evalCastL(Loc val, QualType castTy) {
+SVal SimpleSValBuilder::evalCastFromLoc(Loc val, QualType castTy) {
// Casts from pointers -> pointers, just return the lval.
//
@@ -255,11 +255,12 @@ SVal SimpleSValBuilder::MakeSymIntVal(const SymExpr *LHS,
}
// Idempotent ops (like a*1) can still change the type of an expression.
- // Wrap the LHS up in a NonLoc again and let evalCastNL do the dirty work.
+ // Wrap the LHS up in a NonLoc again and let evalCastFromNonLoc do the
+ // dirty work.
if (isIdempotent) {
if (SymbolRef LHSSym = dyn_cast<SymbolData>(LHS))
- return evalCastNL(nonloc::SymbolVal(LHSSym), resultTy);
- return evalCastNL(nonloc::SymExprVal(LHS), resultTy);
+ return evalCastFromNonLoc(nonloc::SymbolVal(LHSSym), resultTy);
+ return evalCastFromNonLoc(nonloc::SymExprVal(LHS), resultTy);
}
// If we reach this point, the expression cannot be simplified.
@@ -289,7 +290,7 @@ SVal SimpleSValBuilder::evalBinOpNN(const GRState *state,
return makeIntVal(0, resultTy);
case BO_Or:
case BO_And:
- return evalCastNL(lhs, resultTy);
+ return evalCastFromNonLoc(lhs, resultTy);
}
while (1) {
@@ -552,7 +553,7 @@ SVal SimpleSValBuilder::evalBinOpLL(const GRState *state,
default:
break;
case BO_Sub:
- return evalCastL(lhs, resultTy);
+ return evalCastFromLoc(lhs, resultTy);
case BO_EQ:
case BO_LE:
case BO_LT:
@@ -588,7 +589,7 @@ SVal SimpleSValBuilder::evalBinOpLL(const GRState *state,
SVal ResultVal = cast<loc::ConcreteInt>(lhs).evalBinOp(BasicVals, op,
*rInt);
if (Loc *Result = dyn_cast<Loc>(&ResultVal))
- return evalCastL(*Result, resultTy);
+ return evalCastFromLoc(*Result, resultTy);
else
return UnknownVal();
}
@@ -633,7 +634,7 @@ SVal SimpleSValBuilder::evalBinOpLL(const GRState *state,
default:
break;
case BO_Sub:
- return evalCastL(lhs, resultTy);
+ return evalCastFromLoc(lhs, resultTy);
case BO_EQ:
case BO_LT:
case BO_LE:
@@ -698,7 +699,7 @@ SVal SimpleSValBuilder::evalBinOpLL(const GRState *state,
NonLoc *LeftIndex = dyn_cast<NonLoc>(&LeftIndexVal);
if (!LeftIndex)
return UnknownVal();
- LeftIndexVal = evalCastNL(*LeftIndex, resultTy);
+ LeftIndexVal = evalCastFromNonLoc(*LeftIndex, resultTy);
LeftIndex = dyn_cast<NonLoc>(&LeftIndexVal);
if (!LeftIndex)
return UnknownVal();
@@ -708,7 +709,7 @@ SVal SimpleSValBuilder::evalBinOpLL(const GRState *state,
NonLoc *RightIndex = dyn_cast<NonLoc>(&RightIndexVal);
if (!RightIndex)
return UnknownVal();
- RightIndexVal = evalCastNL(*RightIndex, resultTy);
+ RightIndexVal = evalCastFromNonLoc(*RightIndex, resultTy);
RightIndex = dyn_cast<NonLoc>(&RightIndexVal);
if (!RightIndex)
return UnknownVal();
@@ -872,7 +873,8 @@ SVal SimpleSValBuilder::evalBinOpLN(const GRState *state,
QualType elementType;
if (const ElementRegion *elemReg = dyn_cast<ElementRegion>(region)) {
- index = evalBinOpNN(state, BO_Add, elemReg->getIndex(), rhs,
+ assert(op == BO_Add || op == BO_Sub);
+ index = evalBinOpNN(state, op, elemReg->getIndex(), rhs,
getArrayIndexType());
superR = elemReg->getSuperRegion();
elementType = elemReg->getElementType();
diff --git a/lib/StaticAnalyzer/Core/Store.cpp b/lib/StaticAnalyzer/Core/Store.cpp
index 7225170..b936738 100644
--- a/lib/StaticAnalyzer/Core/Store.cpp
+++ b/lib/StaticAnalyzer/Core/Store.cpp
@@ -230,9 +230,9 @@ SVal StoreManager::CastRetrievedVal(SVal V, const TypedRegion *R,
}
if (const Loc *L = dyn_cast<Loc>(&V))
- return svalBuilder.evalCastL(*L, castTy);
+ return svalBuilder.evalCastFromLoc(*L, castTy);
else if (const NonLoc *NL = dyn_cast<NonLoc>(&V))
- return svalBuilder.evalCastNL(*NL, castTy);
+ return svalBuilder.evalCastFromNonLoc(*NL, castTy);
return V;
}
OpenPOWER on IntegriCloud