Deleted Added
full compact
UninitializedValues.cpp (218893) UninitializedValues.cpp (221345)
1//==- UninitializedValues.cpp - Find Uninitialized Values -------*- C++ --*-==//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
1//==- UninitializedValues.cpp - Find Uninitialized Values -------*- C++ --*-==//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements Uninitialized Values analysis for source-level CFGs.
10// This file implements uninitialized values analysis for source-level CFGs.
11//
12//===----------------------------------------------------------------------===//
13
11//
12//===----------------------------------------------------------------------===//
13
14#include "clang/Analysis/Analyses/UninitializedValues.h"
14#include <utility>
15#include "llvm/ADT/Optional.h"
16#include "llvm/ADT/SmallVector.h"
17#include "llvm/ADT/BitVector.h"
18#include "llvm/ADT/DenseMap.h"
19#include "clang/AST/Decl.h"
20#include "clang/Analysis/CFG.h"
21#include "clang/Analysis/AnalysisContext.h"
15#include "clang/Analysis/Visitors/CFGRecStmtDeclVisitor.h"
22#include "clang/Analysis/Visitors/CFGRecStmtDeclVisitor.h"
16#include "clang/Analysis/AnalysisDiagnostic.h"
17#include "clang/AST/ASTContext.h"
18#include "clang/Analysis/FlowSensitive/DataflowSolver.h"
23#include "clang/Analysis/Analyses/UninitializedValues.h"
24#include "clang/Analysis/Support/SaveAndRestore.h"
19
25
20#include "llvm/ADT/SmallPtrSet.h"
21
22using namespace clang;
23
26using namespace clang;
27
24//===----------------------------------------------------------------------===//
25// Dataflow initialization logic.
26//===----------------------------------------------------------------------===//
28static bool isTrackedVar(const VarDecl *vd, const DeclContext *dc) {
29 if (vd->isLocalVarDecl() && !vd->hasGlobalStorage() &&
30 !vd->isExceptionVariable() &&
31 vd->getDeclContext() == dc) {
32 QualType ty = vd->getType();
33 return ty->isScalarType() || ty->isVectorType();
34 }
35 return false;
36}
27
37
28namespace {
38//------------------------------------------------------------------------====//
39// DeclToIndex: a mapping from Decls we track to value indices.
40//====------------------------------------------------------------------------//
29
41
30class RegisterDecls
31 : public CFGRecStmtDeclVisitor<RegisterDecls> {
32
33 UninitializedValues::AnalysisDataTy& AD;
42namespace {
43class DeclToIndex {
44 llvm::DenseMap<const VarDecl *, unsigned> map;
34public:
45public:
35 RegisterDecls(UninitializedValues::AnalysisDataTy& ad) : AD(ad) {}
36
37 void VisitVarDecl(VarDecl* VD) { AD.Register(VD); }
38 CFG& getCFG() { return AD.getCFG(); }
46 DeclToIndex() {}
47
48 /// Compute the actual mapping from declarations to bits.
49 void computeMap(const DeclContext &dc);
50
51 /// Return the number of declarations in the map.
52 unsigned size() const { return map.size(); }
53
54 /// Returns the bit vector index for a given declaration.
55 llvm::Optional<unsigned> getValueIndex(const VarDecl *d) const;
39};
56};
57}
40
58
41} // end anonymous namespace
59void DeclToIndex::computeMap(const DeclContext &dc) {
60 unsigned count = 0;
61 DeclContext::specific_decl_iterator<VarDecl> I(dc.decls_begin()),
62 E(dc.decls_end());
63 for ( ; I != E; ++I) {
64 const VarDecl *vd = *I;
65 if (isTrackedVar(vd, &dc))
66 map[vd] = count++;
67 }
68}
42
69
43void UninitializedValues::InitializeValues(const CFG& cfg) {
44 RegisterDecls R(getAnalysisData());
45 cfg.VisitBlockStmts(R);
70llvm::Optional<unsigned> DeclToIndex::getValueIndex(const VarDecl *d) const {
71 llvm::DenseMap<const VarDecl *, unsigned>::const_iterator I = map.find(d);
72 if (I == map.end())
73 return llvm::Optional<unsigned>();
74 return I->second;
46}
47
75}
76
48//===----------------------------------------------------------------------===//
49// Transfer functions.
50//===----------------------------------------------------------------------===//
77//------------------------------------------------------------------------====//
78// CFGBlockValues: dataflow values for CFG blocks.
79//====------------------------------------------------------------------------//
51
80
81// These values are defined in such a way that a merge can be done using
82// a bitwise OR.
83enum Value { Unknown = 0x0, /* 00 */
84 Initialized = 0x1, /* 01 */
85 Uninitialized = 0x2, /* 10 */
86 MayUninitialized = 0x3 /* 11 */ };
87
88static bool isUninitialized(const Value v) {
89 return v >= Uninitialized;
90}
91static bool isAlwaysUninit(const Value v) {
92 return v == Uninitialized;
93}
94
52namespace {
95namespace {
53class TransferFuncs
54 : public CFGStmtVisitor<TransferFuncs,bool> {
96class ValueVector {
97 llvm::BitVector vec;
98public:
99 ValueVector() {}
100 ValueVector(unsigned size) : vec(size << 1) {}
101 void resize(unsigned n) { vec.resize(n << 1); }
102 void merge(const ValueVector &rhs) { vec |= rhs.vec; }
103 bool operator!=(const ValueVector &rhs) const { return vec != rhs.vec; }
104 void reset() { vec.reset(); }
105
106 class reference {
107 ValueVector &vv;
108 const unsigned idx;
55
109
56 UninitializedValues::ValTy V;
57 UninitializedValues::AnalysisDataTy& AD;
110 reference(); // Undefined
111 public:
112 reference(ValueVector &vv, unsigned idx) : vv(vv), idx(idx) {}
113 ~reference() {}
114
115 reference &operator=(Value v) {
116 vv.vec[idx << 1] = (((unsigned) v) & 0x1) ? true : false;
117 vv.vec[(idx << 1) | 1] = (((unsigned) v) & 0x2) ? true : false;
118 return *this;
119 }
120 operator Value() {
121 unsigned x = (vv.vec[idx << 1] ? 1 : 0) | (vv.vec[(idx << 1) | 1] ? 2 :0);
122 return (Value) x;
123 }
124 };
125
126 reference operator[](unsigned idx) { return reference(*this, idx); }
127};
128
129typedef std::pair<ValueVector *, ValueVector *> BVPair;
130
131class CFGBlockValues {
132 const CFG &cfg;
133 BVPair *vals;
134 ValueVector scratch;
135 DeclToIndex declToIndex;
136
137 ValueVector &lazyCreate(ValueVector *&bv);
58public:
138public:
59 TransferFuncs(UninitializedValues::AnalysisDataTy& ad) : AD(ad) {}
139 CFGBlockValues(const CFG &cfg);
140 ~CFGBlockValues();
141
142 unsigned getNumEntries() const { return declToIndex.size(); }
143
144 void computeSetOfDeclarations(const DeclContext &dc);
145 ValueVector &getValueVector(const CFGBlock *block,
146 const CFGBlock *dstBlock);
60
147
61 UninitializedValues::ValTy& getVal() { return V; }
62 CFG& getCFG() { return AD.getCFG(); }
148 BVPair &getValueVectors(const CFGBlock *block, bool shouldLazyCreate);
63
149
64 void SetTopValue(UninitializedValues::ValTy& X) {
65 X.setDeclValues(AD);
66 X.resetBlkExprValues(AD);
150 void mergeIntoScratch(ValueVector const &source, bool isFirst);
151 bool updateValueVectorWithScratch(const CFGBlock *block);
152 bool updateValueVectors(const CFGBlock *block, const BVPair &newVals);
153
154 bool hasNoDeclarations() const {
155 return declToIndex.size() == 0;
67 }
156 }
157
158 bool hasEntry(const VarDecl *vd) const {
159 return declToIndex.getValueIndex(vd).hasValue();
160 }
161
162 bool hasValues(const CFGBlock *block);
163
164 void resetScratch();
165 ValueVector &getScratch() { return scratch; }
166
167 ValueVector::reference operator[](const VarDecl *vd);
168};
169} // end anonymous namespace
68
170
69 bool VisitDeclRefExpr(DeclRefExpr* DR);
70 bool VisitBinaryOperator(BinaryOperator* B);
71 bool VisitUnaryOperator(UnaryOperator* U);
72 bool VisitStmt(Stmt* S);
73 bool VisitCallExpr(CallExpr* C);
74 bool VisitDeclStmt(DeclStmt* D);
75 bool VisitAbstractConditionalOperator(AbstractConditionalOperator* C);
76 bool BlockStmt_VisitObjCForCollectionStmt(ObjCForCollectionStmt* S);
171CFGBlockValues::CFGBlockValues(const CFG &c) : cfg(c), vals(0) {
172 unsigned n = cfg.getNumBlockIDs();
173 if (!n)
174 return;
175 vals = new std::pair<ValueVector*, ValueVector*>[n];
176 memset((void*)vals, 0, sizeof(*vals) * n);
177}
77
178
78 bool Visit(Stmt *S);
79 bool BlockStmt_VisitExpr(Expr* E);
179CFGBlockValues::~CFGBlockValues() {
180 unsigned n = cfg.getNumBlockIDs();
181 if (n == 0)
182 return;
183 for (unsigned i = 0; i < n; ++i) {
184 delete vals[i].first;
185 delete vals[i].second;
186 }
187 delete [] vals;
188}
80
189
81 void VisitTerminator(CFGBlock* B) { }
82
83 void setCurrentBlock(const CFGBlock *block) {}
84};
190void CFGBlockValues::computeSetOfDeclarations(const DeclContext &dc) {
191 declToIndex.computeMap(dc);
192 scratch.resize(declToIndex.size());
193}
85
194
86static const bool Initialized = false;
87static const bool Uninitialized = true;
195ValueVector &CFGBlockValues::lazyCreate(ValueVector *&bv) {
196 if (!bv)
197 bv = new ValueVector(declToIndex.size());
198 return *bv;
199}
88
200
89bool TransferFuncs::VisitDeclRefExpr(DeclRefExpr* DR) {
201/// This function pattern matches for a '&&' or '||' that appears at
202/// the beginning of a CFGBlock that also (1) has a terminator and
203/// (2) has no other elements. If such an expression is found, it is returned.
204static BinaryOperator *getLogicalOperatorInChain(const CFGBlock *block) {
205 if (block->empty())
206 return 0;
90
207
91 if (VarDecl* VD = dyn_cast<VarDecl>(DR->getDecl()))
92 if (VD->isLocalVarDecl()) {
208 const CFGStmt *cstmt = block->front().getAs<CFGStmt>();
209 if (!cstmt)
210 return 0;
93
211
94 if (AD.Observer)
95 AD.Observer->ObserveDeclRefExpr(V, AD, DR, VD);
212 BinaryOperator *b = llvm::dyn_cast_or_null<BinaryOperator>(cstmt->getStmt());
213
214 if (!b || !b->isLogicalOp())
215 return 0;
216
217 if (block->pred_size() == 2 &&
218 ((block->succ_size() == 2 && block->getTerminatorCondition() == b) ||
219 block->size() == 1))
220 return b;
221
222 return 0;
223}
96
224
97 // Pseudo-hack to prevent cascade of warnings. If an accessed variable
98 // is uninitialized, then we are already going to flag a warning for
99 // this variable, which a "source" of uninitialized values.
100 // We can otherwise do a full "taint" of uninitialized values. The
101 // client has both options by toggling AD.FullUninitTaint.
225ValueVector &CFGBlockValues::getValueVector(const CFGBlock *block,
226 const CFGBlock *dstBlock) {
227 unsigned idx = block->getBlockID();
228 if (dstBlock && getLogicalOperatorInChain(block)) {
229 if (*block->succ_begin() == dstBlock)
230 return lazyCreate(vals[idx].first);
231 assert(*(block->succ_begin()+1) == dstBlock);
232 return lazyCreate(vals[idx].second);
233 }
102
234
103 if (AD.FullUninitTaint)
104 return V(VD,AD);
105 }
235 assert(vals[idx].second == 0);
236 return lazyCreate(vals[idx].first);
237}
106
238
107 return Initialized;
239bool CFGBlockValues::hasValues(const CFGBlock *block) {
240 unsigned idx = block->getBlockID();
241 return vals[idx].second != 0;
108}
109
242}
243
110static VarDecl* FindBlockVarDecl(Expr* E) {
244BVPair &CFGBlockValues::getValueVectors(const clang::CFGBlock *block,
245 bool shouldLazyCreate) {
246 unsigned idx = block->getBlockID();
247 lazyCreate(vals[idx].first);
248 if (shouldLazyCreate)
249 lazyCreate(vals[idx].second);
250 return vals[idx];
251}
111
252
112 // Blast through casts and parentheses to find any DeclRefExprs that
113 // refer to a block VarDecl.
253void CFGBlockValues::mergeIntoScratch(ValueVector const &source,
254 bool isFirst) {
255 if (isFirst)
256 scratch = source;
257 else
258 scratch.merge(source);
259}
260#if 0
261static void printVector(const CFGBlock *block, ValueVector &bv,
262 unsigned num) {
263
264 llvm::errs() << block->getBlockID() << " :";
265 for (unsigned i = 0; i < bv.size(); ++i) {
266 llvm::errs() << ' ' << bv[i];
267 }
268 llvm::errs() << " : " << num << '\n';
269}
270#endif
114
271
115 if (DeclRefExpr* DR = dyn_cast<DeclRefExpr>(E->IgnoreParenCasts()))
116 if (VarDecl* VD = dyn_cast<VarDecl>(DR->getDecl()))
117 if (VD->isLocalVarDecl()) return VD;
272bool CFGBlockValues::updateValueVectorWithScratch(const CFGBlock *block) {
273 ValueVector &dst = getValueVector(block, 0);
274 bool changed = (dst != scratch);
275 if (changed)
276 dst = scratch;
277#if 0
278 printVector(block, scratch, 0);
279#endif
280 return changed;
281}
118
282
119 return NULL;
283bool CFGBlockValues::updateValueVectors(const CFGBlock *block,
284 const BVPair &newVals) {
285 BVPair &vals = getValueVectors(block, true);
286 bool changed = *newVals.first != *vals.first ||
287 *newVals.second != *vals.second;
288 *vals.first = *newVals.first;
289 *vals.second = *newVals.second;
290#if 0
291 printVector(block, *vals.first, 1);
292 printVector(block, *vals.second, 2);
293#endif
294 return changed;
120}
121
295}
296
122bool TransferFuncs::VisitBinaryOperator(BinaryOperator* B) {
297void CFGBlockValues::resetScratch() {
298 scratch.reset();
299}
123
300
124 if (VarDecl* VD = FindBlockVarDecl(B->getLHS()))
125 if (B->isAssignmentOp()) {
126 if (B->getOpcode() == BO_Assign)
127 return V(VD,AD) = Visit(B->getRHS());
128 else // Handle +=, -=, *=, etc. We do want '&', not '&&'.
129 return V(VD,AD) = Visit(B->getLHS()) & Visit(B->getRHS());
130 }
301ValueVector::reference CFGBlockValues::operator[](const VarDecl *vd) {
302 const llvm::Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
303 assert(idx.hasValue());
304 return scratch[idx.getValue()];
305}
131
306
132 return VisitStmt(B);
307//------------------------------------------------------------------------====//
308// Worklist: worklist for dataflow analysis.
309//====------------------------------------------------------------------------//
310
311namespace {
312class DataflowWorklist {
313 llvm::SmallVector<const CFGBlock *, 20> worklist;
314 llvm::BitVector enqueuedBlocks;
315public:
316 DataflowWorklist(const CFG &cfg) : enqueuedBlocks(cfg.getNumBlockIDs()) {}
317
318 void enqueue(const CFGBlock *block);
319 void enqueueSuccessors(const CFGBlock *block);
320 const CFGBlock *dequeue();
321
322};
133}
134
323}
324
135bool TransferFuncs::VisitDeclStmt(DeclStmt* S) {
136 for (DeclStmt::decl_iterator I=S->decl_begin(), E=S->decl_end(); I!=E; ++I) {
137 VarDecl *VD = dyn_cast<VarDecl>(*I);
138 if (VD && VD->isLocalVarDecl()) {
139 if (Stmt* I = VD->getInit()) {
140 // Visit the subexpression to check for uses of uninitialized values,
141 // even if we don't propagate that value.
142 bool isSubExprUninit = Visit(I);
143 V(VD,AD) = AD.FullUninitTaint ? isSubExprUninit : Initialized;
144 }
145 else {
146 // Special case for declarations of array types. For things like:
147 //
148 // char x[10];
149 //
150 // we should treat "x" as being initialized, because the variable
151 // "x" really refers to the memory block. Clearly x[1] is
152 // uninitialized, but expressions like "(char *) x" really do refer to
153 // an initialized value. This simple dataflow analysis does not reason
154 // about the contents of arrays, although it could be potentially
155 // extended to do so if the array were of constant size.
156 if (VD->getType()->isArrayType())
157 V(VD,AD) = Initialized;
158 else
159 V(VD,AD) = Uninitialized;
160 }
161 }
325void DataflowWorklist::enqueue(const CFGBlock *block) {
326 if (!block)
327 return;
328 unsigned idx = block->getBlockID();
329 if (enqueuedBlocks[idx])
330 return;
331 worklist.push_back(block);
332 enqueuedBlocks[idx] = true;
333}
334
335void DataflowWorklist::enqueueSuccessors(const clang::CFGBlock *block) {
336 for (CFGBlock::const_succ_iterator I = block->succ_begin(),
337 E = block->succ_end(); I != E; ++I) {
338 enqueue(*I);
162 }
339 }
163 return Uninitialized; // Value is never consumed.
164}
165
340}
341
166bool TransferFuncs::VisitCallExpr(CallExpr* C) {
167 VisitChildren(C);
168 return Initialized;
342const CFGBlock *DataflowWorklist::dequeue() {
343 if (worklist.empty())
344 return 0;
345 const CFGBlock *b = worklist.back();
346 worklist.pop_back();
347 enqueuedBlocks[b->getBlockID()] = false;
348 return b;
169}
170
349}
350
171bool TransferFuncs::VisitUnaryOperator(UnaryOperator* U) {
172 switch (U->getOpcode()) {
173 case UO_AddrOf: {
174 VarDecl* VD = FindBlockVarDecl(U->getSubExpr());
175 if (VD && VD->isLocalVarDecl())
176 return V(VD,AD) = Initialized;
177 break;
178 }
351//------------------------------------------------------------------------====//
352// Transfer function for uninitialized values analysis.
353//====------------------------------------------------------------------------//
179
354
180 default:
181 break;
355namespace {
356class FindVarResult {
357 const VarDecl *vd;
358 const DeclRefExpr *dr;
359public:
360 FindVarResult(VarDecl *vd, DeclRefExpr *dr) : vd(vd), dr(dr) {}
361
362 const DeclRefExpr *getDeclRefExpr() const { return dr; }
363 const VarDecl *getDecl() const { return vd; }
364};
365
366class TransferFunctions : public CFGRecStmtVisitor<TransferFunctions> {
367 CFGBlockValues &vals;
368 const CFG &cfg;
369 AnalysisContext &ac;
370 UninitVariablesHandler *handler;
371 const DeclRefExpr *currentDR;
372 const Expr *currentVoidCast;
373 const bool flagBlockUses;
374public:
375 TransferFunctions(CFGBlockValues &vals, const CFG &cfg,
376 AnalysisContext &ac,
377 UninitVariablesHandler *handler,
378 bool flagBlockUses)
379 : vals(vals), cfg(cfg), ac(ac), handler(handler), currentDR(0),
380 currentVoidCast(0), flagBlockUses(flagBlockUses) {}
381
382 const CFG &getCFG() { return cfg; }
383 void reportUninit(const DeclRefExpr *ex, const VarDecl *vd,
384 bool isAlwaysUninit);
385
386 void VisitBlockExpr(BlockExpr *be);
387 void VisitDeclStmt(DeclStmt *ds);
388 void VisitDeclRefExpr(DeclRefExpr *dr);
389 void VisitUnaryOperator(UnaryOperator *uo);
390 void VisitBinaryOperator(BinaryOperator *bo);
391 void VisitCastExpr(CastExpr *ce);
392 void VisitUnaryExprOrTypeTraitExpr(UnaryExprOrTypeTraitExpr *se);
393 void VisitCXXTypeidExpr(CXXTypeidExpr *E);
394 void BlockStmt_VisitObjCForCollectionStmt(ObjCForCollectionStmt *fs);
395
396 bool isTrackedVar(const VarDecl *vd) {
397 return ::isTrackedVar(vd, cast<DeclContext>(ac.getDecl()));
182 }
398 }
399
400 FindVarResult findBlockVarDecl(Expr *ex);
401};
402}
183
403
184 return Visit(U->getSubExpr());
404void TransferFunctions::reportUninit(const DeclRefExpr *ex,
405 const VarDecl *vd, bool isAlwaysUnit) {
406 if (handler) handler->handleUseOfUninitVariable(ex, vd, isAlwaysUnit);
185}
186
407}
408
187bool
188TransferFuncs::BlockStmt_VisitObjCForCollectionStmt(ObjCForCollectionStmt* S) {
189 // This represents a use of the 'collection'
190 bool x = Visit(S->getCollection());
409FindVarResult TransferFunctions::findBlockVarDecl(Expr* ex) {
410 if (DeclRefExpr* dr = dyn_cast<DeclRefExpr>(ex->IgnoreParenCasts()))
411 if (VarDecl *vd = dyn_cast<VarDecl>(dr->getDecl()))
412 if (isTrackedVar(vd))
413 return FindVarResult(vd, dr);
414 return FindVarResult(0, 0);
415}
191
416
192 if (x == Uninitialized)
193 return Uninitialized;
194
417void TransferFunctions::BlockStmt_VisitObjCForCollectionStmt(
418 ObjCForCollectionStmt *fs) {
419
420 Visit(fs->getCollection());
421
195 // This represents an initialization of the 'element' value.
422 // This represents an initialization of the 'element' value.
196 Stmt* Element = S->getElement();
197 VarDecl* VD = 0;
198
199 if (DeclStmt* DS = dyn_cast<DeclStmt>(Element))
200 VD = cast<VarDecl>(DS->getSingleDecl());
423 Stmt *element = fs->getElement();
424 const VarDecl* vd = 0;
425
426 if (DeclStmt* ds = dyn_cast<DeclStmt>(element)) {
427 vd = cast<VarDecl>(ds->getSingleDecl());
428 if (!isTrackedVar(vd))
429 vd = 0;
430 }
201 else {
431 else {
202 Expr* ElemExpr = cast<Expr>(Element)->IgnoreParens();
203
204 // Initialize the value of the reference variable.
432 // Initialize the value of the reference variable.
205 if (DeclRefExpr* DR = dyn_cast<DeclRefExpr>(ElemExpr))
206 VD = cast<VarDecl>(DR->getDecl());
207 else
208 return Visit(ElemExpr);
433 const FindVarResult &res = findBlockVarDecl(cast<Expr>(element));
434 vd = res.getDecl();
435 if (!vd) {
436 Visit(element);
437 return;
438 }
209 }
439 }
440
441 if (vd)
442 vals[vd] = Initialized;
443}
210
444
211 V(VD,AD) = Initialized;
212 return Initialized;
445void TransferFunctions::VisitBlockExpr(BlockExpr *be) {
446 if (!flagBlockUses || !handler)
447 return;
448 const BlockDecl *bd = be->getBlockDecl();
449 for (BlockDecl::capture_const_iterator i = bd->capture_begin(),
450 e = bd->capture_end() ; i != e; ++i) {
451 const VarDecl *vd = i->getVariable();
452 if (!vd->hasLocalStorage())
453 continue;
454 if (!isTrackedVar(vd))
455 continue;
456 if (i->isByRef()) {
457 vals[vd] = Initialized;
458 continue;
459 }
460 Value v = vals[vd];
461 if (isUninitialized(v))
462 handler->handleUseOfUninitVariable(be, vd, isAlwaysUninit(v));
463 }
213}
214
464}
465
466void TransferFunctions::VisitDeclStmt(DeclStmt *ds) {
467 for (DeclStmt::decl_iterator DI = ds->decl_begin(), DE = ds->decl_end();
468 DI != DE; ++DI) {
469 if (VarDecl *vd = dyn_cast<VarDecl>(*DI)) {
470 if (isTrackedVar(vd)) {
471 if (Expr *init = vd->getInit()) {
472 Visit(init);
215
473
216bool TransferFuncs::
217VisitAbstractConditionalOperator(AbstractConditionalOperator* C) {
218 Visit(C->getCond());
474 // If the initializer consists solely of a reference to itself, we
475 // explicitly mark the variable as uninitialized. This allows code
476 // like the following:
477 //
478 // int x = x;
479 //
480 // to deliberately leave a variable uninitialized. Different analysis
481 // clients can detect this pattern and adjust their reporting
482 // appropriately, but we need to continue to analyze subsequent uses
483 // of the variable.
484 DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(init->IgnoreParenImpCasts());
485 vals[vd] = (DRE && DRE->getDecl() == vd) ? Uninitialized
486 : Initialized;
487 }
488 } else if (Stmt *init = vd->getInit()) {
489 Visit(init);
490 }
491 }
492 }
493}
219
494
220 bool rhsResult = Visit(C->getFalseExpr());
221 // Handle the GNU extension for missing LHS.
222 if (isa<ConditionalOperator>(C))
223 return Visit(C->getTrueExpr()) & rhsResult; // Yes: we want &, not &&.
224 else
225 return rhsResult;
495void TransferFunctions::VisitDeclRefExpr(DeclRefExpr *dr) {
496 // We assume that DeclRefExprs wrapped in an lvalue-to-rvalue cast
497 // cannot be block-level expressions. Therefore, we determine if
498 // a DeclRefExpr is involved in a "load" by comparing it to the current
499 // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr.
500 // If a DeclRefExpr is not involved in a load, we are essentially computing
501 // its address, either for assignment to a reference or via the '&' operator.
502 // In such cases, treat the variable as being initialized, since this
503 // analysis isn't powerful enough to do alias tracking.
504 if (dr != currentDR)
505 if (const VarDecl *vd = dyn_cast<VarDecl>(dr->getDecl()))
506 if (isTrackedVar(vd))
507 vals[vd] = Initialized;
226}
227
508}
509
228bool TransferFuncs::VisitStmt(Stmt* S) {
229 bool x = Initialized;
510void TransferFunctions::VisitBinaryOperator(clang::BinaryOperator *bo) {
511 if (bo->isAssignmentOp()) {
512 const FindVarResult &res = findBlockVarDecl(bo->getLHS());
513 if (const VarDecl* vd = res.getDecl()) {
514 // We assume that DeclRefExprs wrapped in a BinaryOperator "assignment"
515 // cannot be block-level expressions. Therefore, we determine if
516 // a DeclRefExpr is involved in a "load" by comparing it to the current
517 // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr.
518 SaveAndRestore<const DeclRefExpr*> lastDR(currentDR,
519 res.getDeclRefExpr());
520 Visit(bo->getRHS());
521 Visit(bo->getLHS());
230
522
231 // We don't stop at the first subexpression that is Uninitialized because
232 // evaluating some subexpressions may result in propogating "Uninitialized"
233 // or "Initialized" to variables referenced in the other subexpressions.
234 for (Stmt::child_range I = S->children(); I; ++I)
235 if (*I && Visit(*I) == Uninitialized) x = Uninitialized;
523 ValueVector::reference val = vals[vd];
524 if (isUninitialized(val)) {
525 if (bo->getOpcode() != BO_Assign)
526 reportUninit(res.getDeclRefExpr(), vd, isAlwaysUninit(val));
527 val = Initialized;
528 }
529 return;
530 }
531 }
532 Visit(bo->getRHS());
533 Visit(bo->getLHS());
534}
236
535
237 return x;
536void TransferFunctions::VisitUnaryOperator(clang::UnaryOperator *uo) {
537 switch (uo->getOpcode()) {
538 case clang::UO_PostDec:
539 case clang::UO_PostInc:
540 case clang::UO_PreDec:
541 case clang::UO_PreInc: {
542 const FindVarResult &res = findBlockVarDecl(uo->getSubExpr());
543 if (const VarDecl *vd = res.getDecl()) {
544 // We assume that DeclRefExprs wrapped in a unary operator ++/--
545 // cannot be block-level expressions. Therefore, we determine if
546 // a DeclRefExpr is involved in a "load" by comparing it to the current
547 // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr.
548 SaveAndRestore<const DeclRefExpr*> lastDR(currentDR,
549 res.getDeclRefExpr());
550 Visit(uo->getSubExpr());
551
552 ValueVector::reference val = vals[vd];
553 if (isUninitialized(val)) {
554 reportUninit(res.getDeclRefExpr(), vd, isAlwaysUninit(val));
555 // Don't cascade warnings.
556 val = Initialized;
557 }
558 return;
559 }
560 break;
561 }
562 default:
563 break;
564 }
565 Visit(uo->getSubExpr());
238}
239
566}
567
240bool TransferFuncs::Visit(Stmt *S) {
241 if (AD.isTracked(static_cast<Expr*>(S))) return V(static_cast<Expr*>(S),AD);
242 else return static_cast<CFGStmtVisitor<TransferFuncs,bool>*>(this)->Visit(S);
568void TransferFunctions::VisitCastExpr(clang::CastExpr *ce) {
569 if (ce->getCastKind() == CK_LValueToRValue) {
570 const FindVarResult &res = findBlockVarDecl(ce->getSubExpr());
571 if (const VarDecl *vd = res.getDecl()) {
572 // We assume that DeclRefExprs wrapped in an lvalue-to-rvalue cast
573 // cannot be block-level expressions. Therefore, we determine if
574 // a DeclRefExpr is involved in a "load" by comparing it to the current
575 // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr.
576 // Here we update 'currentDR' to be the one associated with this
577 // lvalue-to-rvalue cast. Then, when we analyze the DeclRefExpr, we
578 // will know that we are not computing its lvalue for other purposes
579 // than to perform a load.
580 SaveAndRestore<const DeclRefExpr*> lastDR(currentDR,
581 res.getDeclRefExpr());
582 Visit(ce->getSubExpr());
583 if (currentVoidCast != ce) {
584 Value val = vals[vd];
585 if (isUninitialized(val)) {
586 reportUninit(res.getDeclRefExpr(), vd, isAlwaysUninit(val));
587 // Don't cascade warnings.
588 vals[vd] = Initialized;
589 }
590 }
591 return;
592 }
593 }
594 else if (CStyleCastExpr *cse = dyn_cast<CStyleCastExpr>(ce)) {
595 if (cse->getType()->isVoidType()) {
596 // e.g. (void) x;
597 SaveAndRestore<const Expr *>
598 lastVoidCast(currentVoidCast, cse->getSubExpr()->IgnoreParens());
599 Visit(cse->getSubExpr());
600 return;
601 }
602 }
603 Visit(ce->getSubExpr());
243}
244
604}
605
245bool TransferFuncs::BlockStmt_VisitExpr(Expr* E) {
246 bool x = static_cast<CFGStmtVisitor<TransferFuncs,bool>*>(this)->Visit(E);
247 if (AD.isTracked(E)) V(E,AD) = x;
248 return x;
606void TransferFunctions::VisitUnaryExprOrTypeTraitExpr(
607 UnaryExprOrTypeTraitExpr *se) {
608 if (se->getKind() == UETT_SizeOf) {
609 if (se->getType()->isConstantSizeType())
610 return;
611 // Handle VLAs.
612 Visit(se->getArgumentExpr());
613 }
249}
250
614}
615
251} // end anonymous namespace
252
253//===----------------------------------------------------------------------===//
254// Merge operator.
255//
256// In our transfer functions we take the approach that any
257// combination of uninitialized values, e.g.
258// Uninitialized + ___ = Uninitialized.
259//
260// Merges take the same approach, preferring soundness. At a confluence point,
261// if any predecessor has a variable marked uninitialized, the value is
262// uninitialized at the confluence point.
263//===----------------------------------------------------------------------===//
264
265namespace {
266 typedef StmtDeclBitVector_Types::Union Merge;
267 typedef DataflowSolver<UninitializedValues,TransferFuncs,Merge> Solver;
616void TransferFunctions::VisitCXXTypeidExpr(CXXTypeidExpr *E) {
617 // typeid(expression) is potentially evaluated when the argument is
618 // a glvalue of polymorphic type. (C++ 5.2.8p2-3)
619 if (!E->isTypeOperand() && E->Classify(ac.getASTContext()).isGLValue()) {
620 QualType SubExprTy = E->getExprOperand()->getType();
621 if (const RecordType *Record = SubExprTy->getAs<RecordType>())
622 if (cast<CXXRecordDecl>(Record->getDecl())->isPolymorphic())
623 Visit(E->getExprOperand());
624 }
268}
269
625}
626
270//===----------------------------------------------------------------------===//
271// Uninitialized values checker. Scan an AST and flag variable uses
272//===----------------------------------------------------------------------===//
627//------------------------------------------------------------------------====//
628// High-level "driver" logic for uninitialized values analysis.
629//====------------------------------------------------------------------------//
273
630
274UninitializedValues_ValueTypes::ObserverTy::~ObserverTy() {}
631static bool runOnBlock(const CFGBlock *block, const CFG &cfg,
632 AnalysisContext &ac, CFGBlockValues &vals,
633 llvm::BitVector &wasAnalyzed,
634 UninitVariablesHandler *handler = 0,
635 bool flagBlockUses = false) {
636
637 wasAnalyzed[block->getBlockID()] = true;
638
639 if (const BinaryOperator *b = getLogicalOperatorInChain(block)) {
640 CFGBlock::const_pred_iterator itr = block->pred_begin();
641 BVPair vA = vals.getValueVectors(*itr, false);
642 ++itr;
643 BVPair vB = vals.getValueVectors(*itr, false);
275
644
276namespace {
277class UninitializedValuesChecker
278 : public UninitializedValues::ObserverTy {
645 BVPair valsAB;
646
647 if (b->getOpcode() == BO_LAnd) {
648 // Merge the 'F' bits from the first and second.
649 vals.mergeIntoScratch(*(vA.second ? vA.second : vA.first), true);
650 vals.mergeIntoScratch(*(vB.second ? vB.second : vB.first), false);
651 valsAB.first = vA.first;
652 valsAB.second = &vals.getScratch();
653 }
654 else {
655 // Merge the 'T' bits from the first and second.
656 assert(b->getOpcode() == BO_LOr);
657 vals.mergeIntoScratch(*vA.first, true);
658 vals.mergeIntoScratch(*vB.first, false);
659 valsAB.first = &vals.getScratch();
660 valsAB.second = vA.second ? vA.second : vA.first;
661 }
662 return vals.updateValueVectors(block, valsAB);
663 }
279
664
280 ASTContext &Ctx;
281 Diagnostic &Diags;
282 llvm::SmallPtrSet<VarDecl*,10> AlreadyWarned;
665 // Default behavior: merge in values of predecessor blocks.
666 vals.resetScratch();
667 bool isFirst = true;
668 for (CFGBlock::const_pred_iterator I = block->pred_begin(),
669 E = block->pred_end(); I != E; ++I) {
670 vals.mergeIntoScratch(vals.getValueVector(*I, block), isFirst);
671 isFirst = false;
672 }
673 // Apply the transfer function.
674 TransferFunctions tf(vals, cfg, ac, handler, flagBlockUses);
675 for (CFGBlock::const_iterator I = block->begin(), E = block->end();
676 I != E; ++I) {
677 if (const CFGStmt *cs = dyn_cast<CFGStmt>(&*I)) {
678 tf.BlockStmt_Visit(cs->getStmt());
679 }
680 }
681 return vals.updateValueVectorWithScratch(block);
682}
283
683
284public:
285 UninitializedValuesChecker(ASTContext &ctx, Diagnostic &diags)
286 : Ctx(ctx), Diags(diags) {}
684void clang::runUninitializedVariablesAnalysis(const DeclContext &dc,
685 const CFG &cfg,
686 AnalysisContext &ac,
687 UninitVariablesHandler &handler) {
688 CFGBlockValues vals(cfg);
689 vals.computeSetOfDeclarations(dc);
690 if (vals.hasNoDeclarations())
691 return;
287
692
288 virtual void ObserveDeclRefExpr(UninitializedValues::ValTy& V,
289 UninitializedValues::AnalysisDataTy& AD,
290 DeclRefExpr* DR, VarDecl* VD) {
693 // Mark all variables uninitialized at the entry.
694 const CFGBlock &entry = cfg.getEntry();
695 for (CFGBlock::const_succ_iterator i = entry.succ_begin(),
696 e = entry.succ_end(); i != e; ++i) {
697 if (const CFGBlock *succ = *i) {
698 ValueVector &vec = vals.getValueVector(&entry, succ);
699 const unsigned n = vals.getNumEntries();
700 for (unsigned j = 0; j < n ; ++j) {
701 vec[j] = Uninitialized;
702 }
703 }
704 }
291
705
292 assert ( AD.isTracked(VD) && "Unknown VarDecl.");
706 // Proceed with the workist.
707 DataflowWorklist worklist(cfg);
708 llvm::BitVector previouslyVisited(cfg.getNumBlockIDs());
709 worklist.enqueueSuccessors(&cfg.getEntry());
710 llvm::BitVector wasAnalyzed(cfg.getNumBlockIDs(), false);
293
711
294 if (V(VD,AD) == Uninitialized)
295 if (AlreadyWarned.insert(VD))
296 Diags.Report(Ctx.getFullLoc(DR->getSourceRange().getBegin()),
297 diag::warn_uninit_val);
712 while (const CFGBlock *block = worklist.dequeue()) {
713 // Did the block change?
714 bool changed = runOnBlock(block, cfg, ac, vals, wasAnalyzed);
715 if (changed || !previouslyVisited[block->getBlockID()])
716 worklist.enqueueSuccessors(block);
717 previouslyVisited[block->getBlockID()] = true;
298 }
718 }
299};
300} // end anonymous namespace
719
720 // Run through the blocks one more time, and report uninitialized variabes.
721 for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) {
722 if (wasAnalyzed[(*BI)->getBlockID()])
723 runOnBlock(*BI, cfg, ac, vals, wasAnalyzed, &handler,
724 /* flagBlockUses */ true);
725 }
726}
301
727
302namespace clang {
303void CheckUninitializedValues(CFG& cfg, ASTContext &Ctx, Diagnostic &Diags,
304 bool FullUninitTaint) {
728UninitVariablesHandler::~UninitVariablesHandler() {}
305
729
306 // Compute the uninitialized values information.
307 UninitializedValues U(cfg);
308 U.getAnalysisData().FullUninitTaint = FullUninitTaint;
309 Solver S(U);
310 S.runOnCFG(cfg);
311
312 // Scan for DeclRefExprs that use uninitialized values.
313 UninitializedValuesChecker Observer(Ctx,Diags);
314 U.getAnalysisData().Observer = &Observer;
315 S.runOnAllBlocks(cfg);
316}
317} // end namespace clang