39#include "llvm/ADT/DenseMap.h"
40#include "llvm/ADT/ImmutableMap.h"
41#include "llvm/ADT/STLExtras.h"
42#include "llvm/ADT/ScopeExit.h"
43#include "llvm/ADT/SmallVector.h"
44#include "llvm/ADT/StringRef.h"
45#include "llvm/Support/Allocator.h"
46#include "llvm/Support/ErrorHandling.h"
47#include "llvm/Support/TrailingObjects.h"
48#include "llvm/Support/raw_ostream.h"
67 const Expr *DeclExp, StringRef Kind) {
81class CapExprSet :
public SmallVector<CapabilityExpr, 4> {
84 void push_back_nodup(
const CapabilityExpr &CapE) {
85 if (llvm::none_of(*
this, [=](
const CapabilityExpr &CapE2) {
101 enum FactEntryKind { Lockable, ScopedLockable };
112 const FactEntryKind Kind : 8;
118 SourceKind Source : 8;
121 SourceLocation AcquireLoc;
124 ~FactEntry() =
default;
127 FactEntry(FactEntryKind FK,
const CapabilityExpr &CE,
LockKind LK,
128 SourceLocation Loc, SourceKind Src)
129 : CapabilityExpr(CE), Kind(FK), LKind(LK), Source(Src), AcquireLoc(Loc) {}
132 SourceLocation loc()
const {
return AcquireLoc; }
133 FactEntryKind getFactEntryKind()
const {
return Kind; }
135 bool asserted()
const {
return Source == Asserted; }
136 bool declared()
const {
return Source == Declared; }
137 bool managed()
const {
return Source == Managed; }
140 handleRemovalFromIntersection(
const FactSet &FSet, FactManager &FactMan,
142 ThreadSafetyHandler &Handler)
const = 0;
143 virtual void handleLock(FactSet &FSet, FactManager &FactMan,
144 const FactEntry &entry,
145 ThreadSafetyHandler &Handler)
const = 0;
146 virtual void handleUnlock(FactSet &FSet, FactManager &FactMan,
147 const CapabilityExpr &Cp, SourceLocation UnlockLoc,
149 ThreadSafetyHandler &Handler)
const = 0;
157using FactID =
unsigned short;
163 llvm::BumpPtrAllocator &Alloc;
164 std::vector<const FactEntry *> Facts;
167 FactManager(llvm::BumpPtrAllocator &Alloc) : Alloc(Alloc) {}
169 template <
typename T,
typename... ArgTypes>
170 T *createFact(ArgTypes &&...Args) {
171 static_assert(std::is_trivially_destructible_v<T>);
172 return T::create(Alloc, std::forward<ArgTypes>(Args)...);
175 FactID newFact(
const FactEntry *Entry) {
176 Facts.push_back(Entry);
177 assert(Facts.size() - 1 <= std::numeric_limits<FactID>::max() &&
178 "FactID space exhausted");
179 return static_cast<unsigned short>(Facts.size() - 1);
182 const FactEntry &operator[](FactID F)
const {
return *Facts[F]; }
194 using FactVec = SmallVector<FactID, 4>;
199 using iterator = FactVec::iterator;
200 using const_iterator = FactVec::const_iterator;
202 iterator begin() {
return FactIDs.begin(); }
203 const_iterator begin()
const {
return FactIDs.begin(); }
205 iterator end() {
return FactIDs.end(); }
206 const_iterator end()
const {
return FactIDs.end(); }
208 bool isEmpty()
const {
return FactIDs.size() == 0; }
211 bool isEmpty(FactManager &FactMan)
const {
212 for (
const auto FID : *
this) {
213 if (!FactMan[FID].negative())
219 void addLockByID(FactID ID) { FactIDs.push_back(ID); }
221 FactID addLock(FactManager &FM,
const FactEntry *Entry) {
222 FactID F = FM.newFact(Entry);
223 FactIDs.push_back(F);
227 bool removeLock(FactManager& FM,
const CapabilityExpr &CapE) {
228 unsigned n = FactIDs.size();
232 for (
unsigned i = 0; i < n-1; ++i) {
233 if (FM[FactIDs[i]].
matches(CapE)) {
234 FactIDs[i] = FactIDs[n-1];
239 if (FM[FactIDs[n-1]].
matches(CapE)) {
246 std::optional<FactID> replaceLock(FactManager &FM, iterator It,
247 const FactEntry *Entry) {
250 FactID F = FM.newFact(Entry);
255 std::optional<FactID> replaceLock(FactManager &FM,
const CapabilityExpr &CapE,
256 const FactEntry *Entry) {
257 return replaceLock(FM, findLockIter(FM, CapE), Entry);
260 iterator findLockIter(FactManager &FM,
const CapabilityExpr &CapE) {
261 return llvm::find_if(*
this,
262 [&](FactID ID) {
return FM[
ID].matches(CapE); });
265 const FactEntry *findLock(FactManager &FM,
const CapabilityExpr &CapE)
const {
267 llvm::find_if(*
this, [&](FactID ID) {
return FM[
ID].matches(CapE); });
268 return I != end() ? &FM[*I] :
nullptr;
271 const FactEntry *findLockUniv(FactManager &FM,
272 const CapabilityExpr &CapE)
const {
273 auto I = llvm::find_if(
274 *
this, [&](FactID ID) ->
bool {
return FM[
ID].matchesUniv(CapE); });
275 return I != end() ? &FM[*I] :
nullptr;
278 const FactEntry *findPartialMatch(FactManager &FM,
279 const CapabilityExpr &CapE)
const {
280 auto I = llvm::find_if(*
this, [&](FactID ID) ->
bool {
281 return FM[
ID].partiallyMatches(CapE);
283 return I != end() ? &FM[*I] :
nullptr;
286 bool containsMutexDecl(FactManager &FM,
const ValueDecl* Vd)
const {
287 auto I = llvm::find_if(
288 *
this, [&](FactID ID) ->
bool {
return FM[
ID].valueDecl() == Vd; });
293class ThreadSafetyAnalyzer;
308 BeforeInfo() =
default;
309 BeforeInfo(BeforeInfo &&) =
default;
313 llvm::DenseMap<const ValueDecl *, std::unique_ptr<BeforeInfo>>;
314 using CycleMap = llvm::DenseMap<const ValueDecl *, bool>;
320 ThreadSafetyAnalyzer& Analyzer);
323 ThreadSafetyAnalyzer &Analyzer);
327 ThreadSafetyAnalyzer& Analyzer,
340class LocalVariableMap;
342using LocalVarContext = llvm::ImmutableMap<const NamedDecl *, unsigned>;
345enum CFGBlockSide { CBS_Entry, CBS_Exit };
358 LocalVarContext EntryContext;
361 LocalVarContext ExitContext;
364 SourceLocation EntryLoc;
367 SourceLocation ExitLoc;
373 bool Reachable =
false;
375 const FactSet &getSet(CFGBlockSide Side)
const {
376 return Side == CBS_Entry ? EntrySet : ExitSet;
379 SourceLocation getLocation(CFGBlockSide Side)
const {
380 return Side == CBS_Entry ? EntryLoc : ExitLoc;
384 CFGBlockInfo(LocalVarContext EmptyCtx)
385 : EntryContext(EmptyCtx), ExitContext(EmptyCtx) {}
388 static CFGBlockInfo getEmptyBlockInfo(LocalVariableMap &M);
404class LocalVariableMap {
406 using Context = LocalVarContext;
412 struct VarDefinition {
414 friend class LocalVariableMap;
417 const NamedDecl *Dec;
420 const Expr *Exp =
nullptr;
428 bool isReference()
const {
return !Exp; }
432 VarDefinition(
const NamedDecl *D,
const Expr *E, Context
C)
433 : Dec(D), Exp(E), Ctx(
C) {}
436 VarDefinition(
const NamedDecl *D,
unsigned R, Context
C)
437 : Dec(D), Ref(R), Ctx(
C) {}
441 Context::Factory ContextFactory;
442 std::vector<VarDefinition> VarDefinitions;
443 std::vector<std::pair<const Stmt *, Context>> SavedContexts;
448 VarDefinitions.push_back(VarDefinition(
nullptr, 0u, getEmptyContext()));
452 const VarDefinition* lookup(
const NamedDecl *D, Context Ctx) {
453 const unsigned *i = Ctx.lookup(D);
456 assert(*i < VarDefinitions.size());
457 return &VarDefinitions[*i];
463 const Expr* lookupExpr(
const NamedDecl *D, Context &Ctx) {
464 const unsigned *P = Ctx.lookup(D);
470 if (VarDefinitions[i].Exp) {
471 Ctx = VarDefinitions[i].Ctx;
472 return VarDefinitions[i].Exp;
474 i = VarDefinitions[i].Ref;
479 Context getEmptyContext() {
return ContextFactory.getEmptyMap(); }
484 Context getNextContext(
unsigned &CtxIndex,
const Stmt *S, Context
C) {
485 if (SavedContexts[CtxIndex+1].first == S) {
487 Context
Result = SavedContexts[CtxIndex].second;
493 void dumpVarDefinitionName(
unsigned i) {
495 llvm::errs() <<
"Undefined";
498 const NamedDecl *
Dec = VarDefinitions[i].Dec;
500 llvm::errs() <<
"<<NULL>>";
503 Dec->printName(llvm::errs());
504 llvm::errs() <<
"." << i <<
" " << ((
const void*) Dec);
509 for (
unsigned i = 1, e = VarDefinitions.size(); i < e; ++i) {
510 const Expr *Exp = VarDefinitions[i].Exp;
511 unsigned Ref = VarDefinitions[i].Ref;
513 dumpVarDefinitionName(i);
514 llvm::errs() <<
" = ";
515 if (Exp) Exp->
dump();
517 dumpVarDefinitionName(Ref);
518 llvm::errs() <<
"\n";
524 void dumpContext(Context
C) {
525 for (Context::iterator I =
C.begin(), E =
C.end(); I != E; ++I) {
526 const NamedDecl *D = I.getKey();
528 llvm::errs() <<
" -> ";
529 dumpVarDefinitionName(I.getData());
530 llvm::errs() <<
"\n";
535 void traverseCFG(CFG *CFGraph,
const PostOrderCFGView *SortedGraph,
536 std::vector<CFGBlockInfo> &BlockInfo);
539 friend class VarMapBuilder;
542 unsigned getCanonicalDefinitionID(
unsigned ID) {
543 while (ID > 0 && VarDefinitions[ID].isReference())
544 ID = VarDefinitions[
ID].Ref;
549 unsigned getContextIndex() {
return SavedContexts.size()-1; }
552 void saveContext(
const Stmt *S, Context
C) {
553 SavedContexts.push_back(std::make_pair(S,
C));
558 Context addDefinition(
const NamedDecl *D,
const Expr *Exp, Context Ctx) {
559 assert(!Ctx.contains(D));
560 unsigned newID = VarDefinitions.size();
561 Context NewCtx = ContextFactory.add(Ctx, D, newID);
562 VarDefinitions.push_back(VarDefinition(D, Exp, Ctx));
567 Context addReference(
const NamedDecl *D,
unsigned i, Context Ctx) {
568 unsigned newID = VarDefinitions.size();
569 Context NewCtx = ContextFactory.add(Ctx, D, newID);
570 VarDefinitions.push_back(VarDefinition(D, i, Ctx));
576 Context updateDefinition(
const NamedDecl *D, Expr *Exp, Context Ctx) {
577 if (Ctx.contains(D)) {
578 unsigned newID = VarDefinitions.size();
579 Context NewCtx = ContextFactory.remove(Ctx, D);
580 NewCtx = ContextFactory.add(NewCtx, D, newID);
581 VarDefinitions.push_back(VarDefinition(D, Exp, Ctx));
589 Context clearDefinition(
const NamedDecl *D, Context Ctx) {
590 Context NewCtx = Ctx;
591 if (NewCtx.contains(D)) {
592 NewCtx = ContextFactory.remove(NewCtx, D);
593 NewCtx = ContextFactory.add(NewCtx, D, 0);
599 Context removeDefinition(
const NamedDecl *D, Context Ctx) {
600 Context NewCtx = Ctx;
601 if (NewCtx.contains(D)) {
602 NewCtx = ContextFactory.remove(NewCtx, D);
607 Context intersectContexts(Context C1, Context C2);
608 Context createReferenceContext(Context
C);
609 void intersectBackEdge(Context C1, Context C2);
615CFGBlockInfo CFGBlockInfo::getEmptyBlockInfo(LocalVariableMap &M) {
616 return CFGBlockInfo(M.getEmptyContext());
622class VarMapBuilder :
public ConstStmtVisitor<VarMapBuilder> {
624 LocalVariableMap* VMap;
625 LocalVariableMap::Context Ctx;
627 VarMapBuilder(LocalVariableMap *VM, LocalVariableMap::Context
C)
628 : VMap(VM), Ctx(
C) {}
630 void VisitDeclStmt(
const DeclStmt *S);
631 void VisitBinaryOperator(
const BinaryOperator *BO);
632 void VisitCallExpr(
const CallExpr *CE);
638void VarMapBuilder::VisitDeclStmt(
const DeclStmt *S) {
639 bool modifiedCtx =
false;
641 for (
const auto *D : DGrp) {
642 if (
const auto *VD = dyn_cast_or_null<VarDecl>(D)) {
643 const Expr *E = VD->getInit();
646 QualType
T = VD->getType();
647 if (
T.isTrivialType(VD->getASTContext())) {
648 Ctx = VMap->addDefinition(VD, E, Ctx);
654 VMap->saveContext(S, Ctx);
658void VarMapBuilder::VisitBinaryOperator(
const BinaryOperator *BO) {
665 if (
const auto *DRE = dyn_cast<DeclRefExpr>(LHSExp)) {
666 const ValueDecl *VDec = DRE->getDecl();
667 if (Ctx.lookup(VDec)) {
669 Ctx = VMap->updateDefinition(VDec, BO->
getRHS(), Ctx);
672 Ctx = VMap->clearDefinition(VDec, Ctx);
673 VMap->saveContext(BO, Ctx);
679void VarMapBuilder::VisitCallExpr(
const CallExpr *CE) {
689 if (II->isStr(
"bind") || II->isStr(
"bind_front"))
695 for (
unsigned Idx = 0; Idx < CE->
getNumArgs(); ++Idx) {
701 QualType ParamType = PVD->
getType();
704 const ValueDecl *VDec =
nullptr;
707 if (
const auto *DRE = dyn_cast<DeclRefExpr>(Arg))
708 VDec = DRE->getDecl();
712 if (
const auto *UO = dyn_cast<UnaryOperator>(Arg)) {
713 if (UO->getOpcode() == UO_AddrOf) {
714 const Expr *SubE = UO->getSubExpr()->IgnoreParenCasts();
715 if (
const auto *DRE = dyn_cast<DeclRefExpr>(SubE))
716 VDec = DRE->getDecl();
721 if (VDec && Ctx.lookup(VDec)) {
722 Ctx = VMap->clearDefinition(VDec, Ctx);
723 VMap->saveContext(CE, Ctx);
731LocalVariableMap::Context
732LocalVariableMap::intersectContexts(Context C1, Context C2) {
734 for (
const auto &P : C1) {
735 const NamedDecl *
Dec = P.first;
736 const unsigned *I2 = C2.lookup(Dec);
740 }
else if (getCanonicalDefinitionID(P.second) !=
741 getCanonicalDefinitionID(*I2)) {
753LocalVariableMap::Context LocalVariableMap::createReferenceContext(Context
C) {
754 Context
Result = getEmptyContext();
755 for (
const auto &P :
C)
763void LocalVariableMap::intersectBackEdge(Context C1, Context C2) {
764 for (
const auto &P : C1) {
765 const unsigned I1 = P.second;
766 VarDefinition *VDef = &VarDefinitions[I1];
767 assert(VDef->isReference());
769 const unsigned *I2 = C2.lookup(P.first);
778 if (getCanonicalDefinitionID(VDef->Ref) != getCanonicalDefinitionID(*I2)) {
821void LocalVariableMap::traverseCFG(CFG *CFGraph,
822 const PostOrderCFGView *SortedGraph,
823 std::vector<CFGBlockInfo> &BlockInfo) {
824 PostOrderCFGView::CFGBlockSet VisitedBlocks(CFGraph);
826 for (
const auto *CurrBlock : *SortedGraph) {
827 unsigned CurrBlockID = CurrBlock->getBlockID();
828 CFGBlockInfo *CurrBlockInfo = &BlockInfo[CurrBlockID];
830 VisitedBlocks.insert(CurrBlock);
833 bool HasBackEdges =
false;
836 PE = CurrBlock->pred_end(); PI != PE; ++PI) {
838 if (*PI ==
nullptr || !VisitedBlocks.alreadySet(*PI)) {
843 unsigned PrevBlockID = (*PI)->getBlockID();
844 CFGBlockInfo *PrevBlockInfo = &BlockInfo[PrevBlockID];
847 CurrBlockInfo->EntryContext = PrevBlockInfo->ExitContext;
851 CurrBlockInfo->EntryContext =
852 intersectContexts(CurrBlockInfo->EntryContext,
853 PrevBlockInfo->ExitContext);
860 CurrBlockInfo->EntryContext =
861 createReferenceContext(CurrBlockInfo->EntryContext);
864 saveContext(
nullptr, CurrBlockInfo->EntryContext);
865 CurrBlockInfo->EntryIndex = getContextIndex();
868 VarMapBuilder VMapBuilder(
this, CurrBlockInfo->EntryContext);
869 for (
const auto &BI : *CurrBlock) {
870 switch (BI.getKind()) {
872 CFGStmt CS = BI.castAs<CFGStmt>();
873 VMapBuilder.Visit(CS.
getStmt());
880 CurrBlockInfo->ExitContext = VMapBuilder.Ctx;
884 SE = CurrBlock->succ_end(); SI != SE; ++SI) {
886 if (*SI ==
nullptr || !VisitedBlocks.alreadySet(*SI))
889 CFGBlock *FirstLoopBlock = *SI;
890 Context LoopBegin = BlockInfo[FirstLoopBlock->
getBlockID()].EntryContext;
891 Context LoopEnd = CurrBlockInfo->ExitContext;
892 intersectBackEdge(LoopBegin, LoopEnd);
898 saveContext(
nullptr, BlockInfo[exitID].ExitContext);
905 std::vector<CFGBlockInfo> &BlockInfo) {
906 for (
const auto *CurrBlock : *SortedGraph) {
907 CFGBlockInfo *CurrBlockInfo = &BlockInfo[CurrBlock->getBlockID()];
911 if (
const Stmt *S = CurrBlock->getTerminatorStmt()) {
912 CurrBlockInfo->EntryLoc = CurrBlockInfo->ExitLoc = S->
getBeginLoc();
915 BE = CurrBlock->rend(); BI != BE; ++BI) {
917 if (std::optional<CFGStmt> CS = BI->getAs<
CFGStmt>()) {
918 CurrBlockInfo->ExitLoc = CS->getStmt()->getBeginLoc();
924 if (CurrBlockInfo->ExitLoc.
isValid()) {
927 for (
const auto &BI : *CurrBlock) {
929 if (std::optional<CFGStmt> CS = BI.getAs<
CFGStmt>()) {
930 CurrBlockInfo->EntryLoc = CS->getStmt()->getBeginLoc();
934 }
else if (CurrBlock->pred_size() == 1 && *CurrBlock->pred_begin() &&
935 CurrBlock != &CFGraph->
getExit()) {
938 CurrBlockInfo->EntryLoc = CurrBlockInfo->ExitLoc =
939 BlockInfo[(*CurrBlock->pred_begin())->getBlockID()].ExitLoc;
940 }
else if (CurrBlock->succ_size() == 1 && *CurrBlock->succ_begin()) {
943 CurrBlockInfo->EntryLoc = CurrBlockInfo->ExitLoc =
944 BlockInfo[(*CurrBlock->succ_begin())->getBlockID()].EntryLoc;
951class LockableFactEntry final :
public FactEntry {
956 unsigned int ReentrancyDepth = 0;
958 LockableFactEntry(
const CapabilityExpr &CE,
LockKind LK, SourceLocation Loc,
960 : FactEntry(Lockable, CE, LK, Loc, Src) {}
963 static LockableFactEntry *
create(llvm::BumpPtrAllocator &Alloc,
964 const LockableFactEntry &
Other) {
968 static LockableFactEntry *
create(llvm::BumpPtrAllocator &Alloc,
969 const CapabilityExpr &CE,
LockKind LK,
971 SourceKind Src = Acquired) {
972 return new (
Alloc) LockableFactEntry(CE, LK, Loc, Src);
975 unsigned int getReentrancyDepth()
const {
return ReentrancyDepth; }
978 handleRemovalFromIntersection(
const FactSet &FSet, FactManager &FactMan,
980 ThreadSafetyHandler &Handler)
const override {
981 if (!asserted() && !negative() && !isUniversal()) {
987 void handleLock(FactSet &FSet, FactManager &FactMan,
const FactEntry &entry,
988 ThreadSafetyHandler &Handler)
const override {
989 if (
const FactEntry *RFact = tryReenter(FactMan, entry.kind())) {
991 FSet.replaceLock(FactMan, entry, RFact);
998 void handleUnlock(FactSet &FSet, FactManager &FactMan,
999 const CapabilityExpr &Cp, SourceLocation UnlockLoc,
1001 ThreadSafetyHandler &Handler)
const override {
1002 FSet.removeLock(FactMan, Cp);
1004 if (
const FactEntry *RFact = leaveReentrant(FactMan)) {
1006 FSet.addLock(FactMan, RFact);
1008 FSet.addLock(FactMan, FactMan.createFact<LockableFactEntry>(
1015 const FactEntry *tryReenter(FactManager &FactMan,
1019 if (
kind() != ReenterKind)
1021 auto *NewFact = FactMan.createFact<LockableFactEntry>(*this);
1022 NewFact->ReentrancyDepth++;
1028 const FactEntry *leaveReentrant(FactManager &FactMan)
const {
1029 if (!ReentrancyDepth)
1031 assert(reentrant());
1032 auto *NewFact = FactMan.createFact<LockableFactEntry>(*this);
1033 NewFact->ReentrancyDepth--;
1037 static bool classof(
const FactEntry *A) {
1038 return A->getFactEntryKind() == Lockable;
1042enum UnderlyingCapabilityKind {
1045 UCK_ReleasedExclusive,
1048struct UnderlyingCapability {
1050 UnderlyingCapabilityKind Kind;
1053class ScopedLockableFactEntry final
1055 private llvm::TrailingObjects<ScopedLockableFactEntry,
1056 UnderlyingCapability> {
1057 friend TrailingObjects;
1060 const unsigned ManagedCapacity;
1061 unsigned ManagedSize = 0;
1063 ScopedLockableFactEntry(
const CapabilityExpr &CE, SourceLocation Loc,
1064 SourceKind Src,
unsigned ManagedCapacity)
1065 : FactEntry(ScopedLockable, CE,
LK_Exclusive, Loc, Src),
1066 ManagedCapacity(ManagedCapacity) {}
1068 void addManaged(
const CapabilityExpr &M, UnderlyingCapabilityKind UCK) {
1069 assert(ManagedSize < ManagedCapacity);
1070 new (getTrailingObjects() + ManagedSize) UnderlyingCapability{M, UCK};
1074 ArrayRef<UnderlyingCapability> getManaged()
const {
1075 return getTrailingObjects(ManagedSize);
1079 static ScopedLockableFactEntry *
create(llvm::BumpPtrAllocator &Alloc,
1080 const CapabilityExpr &CE,
1081 SourceLocation Loc, SourceKind Src,
1082 unsigned ManagedCapacity) {
1084 Alloc.Allocate(totalSizeToAlloc<UnderlyingCapability>(ManagedCapacity),
1085 alignof(ScopedLockableFactEntry));
1086 return new (
Storage) ScopedLockableFactEntry(CE, Loc, Src, ManagedCapacity);
1089 CapExprSet getUnderlyingMutexes()
const {
1090 CapExprSet UnderlyingMutexesSet;
1091 for (
const UnderlyingCapability &UnderlyingMutex : getManaged())
1092 UnderlyingMutexesSet.push_back(UnderlyingMutex.Cap);
1093 return UnderlyingMutexesSet;
1100 void addLock(
const CapabilityExpr &M) { addManaged(M, UCK_Acquired); }
1102 void addExclusiveUnlock(
const CapabilityExpr &M) {
1103 addManaged(M, UCK_ReleasedExclusive);
1106 void addSharedUnlock(
const CapabilityExpr &M) {
1107 addManaged(M, UCK_ReleasedShared);
1112 handleRemovalFromIntersection(
const FactSet &FSet, FactManager &FactMan,
1114 ThreadSafetyHandler &Handler)
const override {
1118 for (
const auto &UnderlyingMutex : getManaged()) {
1119 const auto *Entry = FSet.findLock(FactMan, UnderlyingMutex.Cap);
1120 if ((UnderlyingMutex.Kind == UCK_Acquired && Entry) ||
1121 (UnderlyingMutex.Kind != UCK_Acquired && !Entry)) {
1125 UnderlyingMutex.Cap.toString(), loc(),
1131 void handleLock(FactSet &FSet, FactManager &FactMan,
const FactEntry &entry,
1132 ThreadSafetyHandler &Handler)
const override {
1133 for (
const auto &UnderlyingMutex : getManaged()) {
1134 if (UnderlyingMutex.Kind == UCK_Acquired)
1135 lock(FSet, FactMan, UnderlyingMutex.Cap, entry.kind(), entry.loc(),
1138 unlock(FSet, FactMan, UnderlyingMutex.Cap, entry.loc(), &Handler);
1142 void handleUnlock(FactSet &FSet, FactManager &FactMan,
1143 const CapabilityExpr &Cp, SourceLocation UnlockLoc,
1145 ThreadSafetyHandler &Handler)
const override {
1146 assert(!Cp.
negative() &&
"Managing object cannot be negative.");
1147 for (
const auto &UnderlyingMutex : getManaged()) {
1150 ThreadSafetyHandler *TSHandler = FullyRemove ?
nullptr : &Handler;
1151 if (UnderlyingMutex.Kind == UCK_Acquired) {
1152 unlock(FSet, FactMan, UnderlyingMutex.Cap, UnlockLoc, TSHandler);
1154 LockKind kind = UnderlyingMutex.Kind == UCK_ReleasedShared
1157 lock(FSet, FactMan, UnderlyingMutex.Cap, kind, UnlockLoc, TSHandler);
1161 FSet.removeLock(FactMan, Cp);
1164 static bool classof(
const FactEntry *A) {
1165 return A->getFactEntryKind() == ScopedLockable;
1169 void lock(FactSet &FSet, FactManager &FactMan,
const CapabilityExpr &Cp,
1171 ThreadSafetyHandler *Handler)
const {
1172 if (
const auto It = FSet.findLockIter(FactMan, Cp); It != FSet.end()) {
1174 if (
const FactEntry *RFact = Fact.tryReenter(FactMan, kind)) {
1176 FSet.replaceLock(FactMan, It, RFact);
1177 }
else if (Handler) {
1181 FSet.removeLock(FactMan, !Cp);
1182 FSet.addLock(FactMan, FactMan.createFact<LockableFactEntry>(Cp, kind, loc,
1187 void unlock(FactSet &FSet, FactManager &FactMan,
const CapabilityExpr &Cp,
1188 SourceLocation loc, ThreadSafetyHandler *Handler)
const {
1189 if (
const auto It = FSet.findLockIter(FactMan, Cp); It != FSet.end()) {
1191 if (
const FactEntry *RFact = Fact.leaveReentrant(FactMan)) {
1193 FSet.replaceLock(FactMan, It, RFact);
1199 FactMan.createFact<LockableFactEntry>(!Cp,
LK_Exclusive, loc));
1200 }
else if (Handler) {
1201 SourceLocation PrevLoc;
1202 if (
const FactEntry *Neg = FSet.findLock(FactMan, !Cp))
1203 PrevLoc =
Neg->loc();
1210class ThreadSafetyAnalyzer {
1211 friend class BuildLockset;
1212 friend class threadSafety::BeforeSet;
1214 llvm::BumpPtrAllocator Bpa;
1215 threadSafety::til::MemRegionRef Arena;
1216 threadSafety::SExprBuilder SxBuilder;
1218 ThreadSafetyHandler &Handler;
1219 const FunctionDecl *CurrentFunction;
1220 LocalVariableMap LocalVarMap;
1222 llvm::SmallDenseMap<const Expr *, til::LiteralPtr *> ConstructedObjects;
1223 FactManager FactMan;
1224 std::vector<CFGBlockInfo> BlockInfo;
1226 BeforeSet *GlobalBeforeSet;
1229 ThreadSafetyAnalyzer(ThreadSafetyHandler &H, BeforeSet *Bset)
1230 : Arena(&Bpa), SxBuilder(Arena), Handler(H), FactMan(Bpa),
1231 GlobalBeforeSet(Bset) {}
1233 bool inCurrentScope(
const CapabilityExpr &CapE);
1235 void addLock(FactSet &FSet,
const FactEntry *Entry,
bool ReqAttr =
false);
1236 void removeLock(FactSet &FSet,
const CapabilityExpr &CapE,
1237 SourceLocation UnlockLoc,
bool FullyRemove,
LockKind Kind);
1239 template <
typename AttrType>
1240 void getMutexIDs(CapExprSet &Mtxs, AttrType *Attr,
const Expr *Exp,
1241 const NamedDecl *D, til::SExpr *
Self =
nullptr);
1243 template <
class AttrType>
1244 void getMutexIDs(CapExprSet &Mtxs, AttrType *Attr,
const Expr *Exp,
1246 const CFGBlock *PredBlock,
const CFGBlock *CurrBlock,
1247 Expr *BrE,
bool Neg);
1249 const CallExpr* getTrylockCallExpr(
const Stmt *
Cond, LocalVarContext
C,
1252 void getEdgeLockset(FactSet &
Result,
const FactSet &ExitSet,
1253 const CFGBlock* PredBlock,
1254 const CFGBlock *CurrBlock);
1256 bool join(
const FactEntry &A,
const FactEntry &B, SourceLocation JoinLoc,
1259 void intersectAndWarn(FactSet &EntrySet,
const FactSet &ExitSet,
1263 void intersectAndWarn(FactSet &EntrySet,
const FactSet &ExitSet,
1265 intersectAndWarn(EntrySet, ExitSet, JoinLoc, LEK, LEK);
1268 void runAnalysis(AnalysisDeclContext &AC);
1270 void warnIfMutexNotHeld(
const FactSet &FSet,
const NamedDecl *D,
1271 const Expr *Exp,
AccessKind AK, Expr *MutexExp,
1273 SourceLocation Loc);
1274 void warnIfMutexHeld(
const FactSet &FSet,
const NamedDecl *D,
const Expr *Exp,
1275 Expr *MutexExp, til::SExpr *
Self, SourceLocation Loc);
1277 void checkAccess(
const FactSet &FSet,
const Expr *Exp,
AccessKind AK,
1279 void checkPtAccess(
const FactSet &FSet,
const Expr *Exp,
AccessKind AK,
1287 ThreadSafetyAnalyzer& Analyzer) {
1289 BeforeInfo *Info =
nullptr;
1293 std::unique_ptr<BeforeInfo> &InfoPtr = BMap[Vd];
1295 InfoPtr.reset(
new BeforeInfo());
1296 Info = InfoPtr.get();
1299 for (
const auto *At : Vd->
attrs()) {
1300 switch (At->getKind()) {
1301 case attr::AcquiredBefore: {
1305 for (
const auto *Arg : A->args()) {
1307 Analyzer.SxBuilder.translateAttrExpr(Arg,
nullptr);
1309 Info->Vect.push_back(Cpvd);
1310 const auto It = BMap.find(Cpvd);
1311 if (It == BMap.end())
1317 case attr::AcquiredAfter: {
1321 for (
const auto *Arg : A->args()) {
1323 Analyzer.SxBuilder.translateAttrExpr(Arg,
nullptr);
1327 ArgInfo->Vect.push_back(Vd);
1340BeforeSet::BeforeInfo *
1342 ThreadSafetyAnalyzer &Analyzer) {
1343 auto It = BMap.find(Vd);
1344 BeforeInfo *Info =
nullptr;
1345 if (It == BMap.end())
1348 Info = It->second.get();
1349 assert(Info &&
"BMap contained nullptr?");
1355 const FactSet& FSet,
1356 ThreadSafetyAnalyzer& Analyzer,
1368 if (Info->Visited == 1)
1371 if (Info->Visited == 2)
1374 if (Info->Vect.empty())
1377 InfoVect.push_back(Info);
1379 for (
const auto *Vdb : Info->Vect) {
1381 if (FSet.containsMutexDecl(Analyzer.FactMan, Vdb)) {
1382 StringRef L1 = StartVd->
getName();
1383 StringRef L2 = Vdb->getName();
1384 Analyzer.Handler.handleLockAcquiredBefore(CapKind, L1, L2, Loc);
1388 if (CycMap.try_emplace(Vd,
true).second) {
1390 Analyzer.Handler.handleBeforeAfterCycle(L1, Vd->
getLocation());
1400 for (
auto *Info : InfoVect)
1406 if (
const auto *CE = dyn_cast<ImplicitCastExpr>(Exp))
1409 if (
const auto *DR = dyn_cast<DeclRefExpr>(Exp))
1410 return DR->getDecl();
1412 if (
const auto *ME = dyn_cast<MemberExpr>(Exp))
1413 return ME->getMemberDecl();
1418bool ThreadSafetyAnalyzer::inCurrentScope(
const CapabilityExpr &CapE) {
1419 const threadSafety::til::SExpr *SExp = CapE.
sexpr();
1420 assert(SExp &&
"Null expressions should be ignored");
1422 if (
const auto *LP = dyn_cast<til::LiteralPtr>(SExp)) {
1423 const ValueDecl *VD = LP->clangDecl();
1435 if (
const auto *P = dyn_cast<til::Project>(SExp)) {
1436 if (!isa_and_nonnull<CXXMethodDecl>(CurrentFunction))
1438 const ValueDecl *VD = P->clangDecl();
1447void ThreadSafetyAnalyzer::addLock(FactSet &FSet,
const FactEntry *Entry,
1449 if (Entry->shouldIgnore())
1452 if (!ReqAttr && !Entry->negative()) {
1454 CapabilityExpr NegC = !*Entry;
1455 const FactEntry *Nen = FSet.findLock(FactMan, NegC);
1457 FSet.removeLock(FactMan, NegC);
1460 if (inCurrentScope(*Entry) && !Entry->asserted() && !Entry->reentrant())
1467 if (!Entry->asserted() && !Entry->declared()) {
1469 Entry->loc(), Entry->getKind());
1472 if (
const FactEntry *Cp = FSet.findLock(FactMan, *Entry)) {
1473 if (!Entry->asserted())
1474 Cp->handleLock(FSet, FactMan, *Entry, Handler);
1476 FSet.addLock(FactMan, Entry);
1482void ThreadSafetyAnalyzer::removeLock(FactSet &FSet,
const CapabilityExpr &Cp,
1483 SourceLocation UnlockLoc,
1484 bool FullyRemove,
LockKind ReceivedKind) {
1488 const FactEntry *LDat = FSet.findLock(FactMan, Cp);
1490 SourceLocation PrevLoc;
1491 if (
const FactEntry *Neg = FSet.findLock(FactMan, !Cp))
1492 PrevLoc =
Neg->loc();
1500 if (ReceivedKind !=
LK_Generic && LDat->kind() != ReceivedKind) {
1502 ReceivedKind, LDat->loc(), UnlockLoc);
1505 LDat->handleUnlock(FSet, FactMan, Cp, UnlockLoc, FullyRemove, Handler);
1510template <
typename AttrType>
1511void ThreadSafetyAnalyzer::getMutexIDs(CapExprSet &Mtxs, AttrType *Attr,
1512 const Expr *Exp,
const NamedDecl *D,
1514 if (Attr->args_size() == 0) {
1523 Mtxs.push_back_nodup(Cp);
1527 for (
const auto *Arg : Attr->args()) {
1535 Mtxs.push_back_nodup(Cp);
1542template <
class AttrType>
1543void ThreadSafetyAnalyzer::getMutexIDs(CapExprSet &Mtxs, AttrType *Attr,
1544 const Expr *Exp,
const NamedDecl *D,
1545 const CFGBlock *PredBlock,
1546 const CFGBlock *CurrBlock,
1547 Expr *BrE,
bool Neg) {
1549 bool branch =
false;
1550 if (
const auto *BLE = dyn_cast_or_null<CXXBoolLiteralExpr>(BrE))
1551 branch = BLE->getValue();
1552 else if (
const auto *ILE = dyn_cast_or_null<IntegerLiteral>(BrE))
1553 branch = ILE->getValue().getBoolValue();
1555 int branchnum = branch ? 0 : 1;
1557 branchnum = !branchnum;
1562 SE = PredBlock->
succ_end(); SI != SE && i < 2; ++SI, ++i) {
1563 if (*SI == CurrBlock && i == branchnum)
1564 getMutexIDs(Mtxs, Attr, Exp, D);
1572 }
else if (
const auto *BLE = dyn_cast<CXXBoolLiteralExpr>(E)) {
1573 TCond = BLE->getValue();
1575 }
else if (
const auto *ILE = dyn_cast<IntegerLiteral>(E)) {
1576 TCond = ILE->getValue().getBoolValue();
1578 }
else if (
auto *CE = dyn_cast<ImplicitCastExpr>(E))
1586const CallExpr* ThreadSafetyAnalyzer::getTrylockCallExpr(
const Stmt *
Cond,
1592 if (
const auto *CallExp = dyn_cast<CallExpr>(
Cond)) {
1593 if (CallExp->getBuiltinCallee() == Builtin::BI__builtin_expect)
1594 return getTrylockCallExpr(CallExp->getArg(0),
C, Negate);
1597 else if (
const auto *PE = dyn_cast<ParenExpr>(
Cond))
1598 return getTrylockCallExpr(PE->getSubExpr(),
C, Negate);
1599 else if (
const auto *CE = dyn_cast<ImplicitCastExpr>(
Cond))
1600 return getTrylockCallExpr(CE->getSubExpr(),
C, Negate);
1601 else if (
const auto *FE = dyn_cast<FullExpr>(
Cond))
1602 return getTrylockCallExpr(FE->getSubExpr(),
C, Negate);
1603 else if (
const auto *DRE = dyn_cast<DeclRefExpr>(
Cond)) {
1604 const Expr *E = LocalVarMap.lookupExpr(DRE->getDecl(),
C);
1605 return getTrylockCallExpr(E,
C, Negate);
1607 else if (
const auto *UOP = dyn_cast<UnaryOperator>(
Cond)) {
1608 if (UOP->getOpcode() == UO_LNot) {
1610 return getTrylockCallExpr(UOP->getSubExpr(),
C, Negate);
1614 else if (
const auto *BOP = dyn_cast<BinaryOperator>(
Cond)) {
1615 if (BOP->getOpcode() == BO_EQ || BOP->getOpcode() == BO_NE) {
1616 if (BOP->getOpcode() == BO_NE)
1621 if (!TCond) Negate = !Negate;
1622 return getTrylockCallExpr(BOP->getLHS(),
C, Negate);
1626 if (!TCond) Negate = !Negate;
1627 return getTrylockCallExpr(BOP->getRHS(),
C, Negate);
1631 if (BOP->getOpcode() == BO_LAnd) {
1633 return getTrylockCallExpr(BOP->getRHS(),
C, Negate);
1635 if (BOP->getOpcode() == BO_LOr)
1636 return getTrylockCallExpr(BOP->getRHS(),
C, Negate);
1638 }
else if (
const auto *COP = dyn_cast<ConditionalOperator>(
Cond)) {
1642 if (TCond && !FCond)
1643 return getTrylockCallExpr(COP->getCond(),
C, Negate);
1644 if (!TCond && FCond) {
1646 return getTrylockCallExpr(COP->getCond(),
C, Negate);
1656void ThreadSafetyAnalyzer::getEdgeLockset(FactSet&
Result,
1657 const FactSet &ExitSet,
1658 const CFGBlock *PredBlock,
1659 const CFGBlock *CurrBlock) {
1667 bool Negate =
false;
1668 const CFGBlockInfo *PredBlockInfo = &BlockInfo[PredBlock->
getBlockID()];
1669 const LocalVarContext &LVarCtx = PredBlockInfo->ExitContext;
1674 [
this, Ctx = LVarCtx](
const NamedDecl *D)
mutable ->
const Expr * {
1675 return LocalVarMap.lookupExpr(D, Ctx);
1678 auto Cleanup = llvm::make_scope_exit(
1681 const auto *Exp = getTrylockCallExpr(
Cond, LVarCtx, Negate);
1685 auto *FunDecl = dyn_cast_or_null<NamedDecl>(Exp->getCalleeDecl());
1686 if (!FunDecl || !FunDecl->hasAttr<TryAcquireCapabilityAttr>())
1689 CapExprSet ExclusiveLocksToAdd;
1690 CapExprSet SharedLocksToAdd;
1693 for (
const auto *Attr : FunDecl->specific_attrs<TryAcquireCapabilityAttr>())
1694 getMutexIDs(Attr->isShared() ? SharedLocksToAdd : ExclusiveLocksToAdd, Attr,
1695 Exp, FunDecl, PredBlock, CurrBlock, Attr->getSuccessValue(),
1699 SourceLocation Loc = Exp->getExprLoc();
1700 for (
const auto &ExclusiveLockToAdd : ExclusiveLocksToAdd)
1701 addLock(
Result, FactMan.createFact<LockableFactEntry>(ExclusiveLockToAdd,
1703 for (
const auto &SharedLockToAdd : SharedLocksToAdd)
1704 addLock(
Result, FactMan.createFact<LockableFactEntry>(SharedLockToAdd,
1715class BuildLockset :
public ConstStmtVisitor<BuildLockset> {
1716 friend class ThreadSafetyAnalyzer;
1718 ThreadSafetyAnalyzer *Analyzer;
1721 const FactSet &FunctionExitFSet;
1722 LocalVariableMap::Context LVarCtx;
1726 void updateLocalVarMapCtx(
const Stmt *S) {
1728 LVarCtx = Analyzer->LocalVarMap.getNextContext(CtxIndex, S, LVarCtx);
1733 [
this, Ctx = LVarCtx](
const NamedDecl *D)
mutable ->
const Expr * {
1734 return Analyzer->LocalVarMap.lookupExpr(D, Ctx);
1740 void checkAccess(
const Expr *Exp,
AccessKind AK,
1742 Analyzer->checkAccess(FSet, Exp, AK, POK);
1744 void checkPtAccess(
const Expr *Exp,
AccessKind AK,
1746 Analyzer->checkPtAccess(FSet, Exp, AK, POK);
1749 void handleCall(
const Expr *Exp,
const NamedDecl *D,
1750 til::SExpr *
Self =
nullptr,
1751 SourceLocation Loc = SourceLocation());
1752 void examineArguments(
const FunctionDecl *FD,
1755 bool SkipFirstParam =
false);
1758 BuildLockset(ThreadSafetyAnalyzer *Anlzr, CFGBlockInfo &Info,
1759 const FactSet &FunctionExitFSet)
1760 : ConstStmtVisitor<BuildLockset>(), Analyzer(Anlzr), FSet(Info.EntrySet),
1761 FunctionExitFSet(FunctionExitFSet), LVarCtx(Info.EntryContext),
1762 CtxIndex(Info.EntryIndex) {
1763 updateLocalVarMapCtx(
nullptr);
1768 void VisitUnaryOperator(
const UnaryOperator *UO);
1769 void VisitBinaryOperator(
const BinaryOperator *BO);
1770 void VisitCastExpr(
const CastExpr *CE);
1771 void VisitCallExpr(
const CallExpr *Exp);
1772 void VisitCXXConstructExpr(
const CXXConstructExpr *Exp);
1773 void VisitDeclStmt(
const DeclStmt *S);
1774 void VisitMaterializeTemporaryExpr(
const MaterializeTemporaryExpr *Exp);
1775 void VisitReturnStmt(
const ReturnStmt *S);
1782void ThreadSafetyAnalyzer::warnIfMutexNotHeld(
1783 const FactSet &FSet,
const NamedDecl *D,
const Expr *Exp,
AccessKind AK,
1785 SourceLocation Loc) {
1797 const FactEntry *LDat = FSet.findLock(FactMan, !Cp);
1800 (!Cp).toString(), Loc);
1806 if (!inCurrentScope(Cp))
1810 LDat = FSet.findLock(FactMan, Cp);
1817 const FactEntry *LDat = FSet.findLockUniv(FactMan, Cp);
1818 bool NoError =
true;
1821 LDat = FSet.findPartialMatch(FactMan, Cp);
1824 std::string PartMatchStr = LDat->toString();
1825 StringRef PartMatchName(PartMatchStr);
1835 if (NoError && LDat && !LDat->isAtLeast(LK)) {
1841void ThreadSafetyAnalyzer::warnIfMutexHeld(
const FactSet &FSet,
1842 const NamedDecl *D,
const Expr *Exp,
1843 Expr *MutexExp, til::SExpr *
Self,
1844 SourceLocation Loc) {
1853 const FactEntry *LDat = FSet.findLock(FactMan, Cp);
1865void ThreadSafetyAnalyzer::checkAccess(
const FactSet &FSet,
const Expr *Exp,
1874 while (
const auto *DRE = dyn_cast<DeclRefExpr>(Exp)) {
1875 const auto *VD = dyn_cast<VarDecl>(DRE->getDecl()->getCanonicalDecl());
1877 if (
const auto *E = VD->getInit()) {
1888 if (
const auto *UO = dyn_cast<UnaryOperator>(Exp)) {
1890 if (UO->getOpcode() == UO_Deref)
1891 checkPtAccess(FSet, UO->getSubExpr(), AK, POK);
1895 if (
const auto *BO = dyn_cast<BinaryOperator>(Exp)) {
1898 return checkAccess(FSet, BO->
getLHS(), AK, POK);
1900 return checkPtAccess(FSet, BO->
getLHS(), AK, POK);
1906 if (
const auto *AE = dyn_cast<ArraySubscriptExpr>(Exp)) {
1907 checkPtAccess(FSet, AE->getLHS(), AK, POK);
1911 if (
const auto *ME = dyn_cast<MemberExpr>(Exp)) {
1913 checkPtAccess(FSet, ME->getBase(), AK, POK);
1915 checkAccess(FSet, ME->getBase(), AK, POK);
1922 if (D->
hasAttr<GuardedVarAttr>() && FSet.isEmpty(FactMan)) {
1927 warnIfMutexNotHeld(FSet, D, Exp, AK, I->getArg(), POK,
nullptr, Loc);
1932void ThreadSafetyAnalyzer::checkPtAccess(
const FactSet &FSet,
const Expr *Exp,
1938 if (
const auto *PE = dyn_cast<ParenExpr>(Exp)) {
1939 Exp = PE->getSubExpr();
1942 if (
const auto *CE = dyn_cast<CastExpr>(Exp)) {
1943 if (CE->getCastKind() == CK_ArrayToPointerDecay) {
1946 checkAccess(FSet, CE->getSubExpr(), AK, POK);
1949 Exp = CE->getSubExpr();
1955 if (
const auto *UO = dyn_cast<UnaryOperator>(Exp)) {
1956 if (UO->getOpcode() == UO_AddrOf) {
1959 checkAccess(FSet, UO->getSubExpr(), AK, POK);
1987 if (D->
hasAttr<PtGuardedVarAttr>() && FSet.isEmpty(FactMan))
1991 warnIfMutexNotHeld(FSet, D, Exp, AK, I->getArg(), PtPOK,
nullptr,
2010void BuildLockset::handleCall(
const Expr *Exp,
const NamedDecl *D,
2011 til::SExpr *
Self, SourceLocation Loc) {
2012 CapExprSet ExclusiveLocksToAdd, SharedLocksToAdd;
2013 CapExprSet ExclusiveLocksToRemove, SharedLocksToRemove, GenericLocksToRemove;
2014 CapExprSet ScopedReqsAndExcludes;
2022 til::LiteralPtr *Placeholder =
2024 [[maybe_unused]]
auto inserted =
2025 Analyzer->ConstructedObjects.insert({Exp, Placeholder});
2026 assert(inserted.second &&
"Are we visiting the same expression again?");
2029 if (TagT->getOriginalDecl()
2030 ->getMostRecentDecl()
2031 ->hasAttr<ScopedLockableAttr>())
2032 Scp = CapabilityExpr(Placeholder, Exp->
getType(),
false);
2039 for(
const Attr *At : D->
attrs()) {
2040 switch (At->getKind()) {
2043 case attr::AcquireCapability: {
2045 Analyzer->getMutexIDs(A->isShared() ? SharedLocksToAdd
2046 : ExclusiveLocksToAdd,
2054 case attr::AssertCapability: {
2056 CapExprSet AssertLocks;
2057 Analyzer->getMutexIDs(AssertLocks, A, Exp, D,
Self);
2058 for (
const auto &AssertLock : AssertLocks)
2060 FSet, Analyzer->FactMan.createFact<LockableFactEntry>(
2062 Loc, FactEntry::Asserted));
2068 case attr::ReleaseCapability: {
2071 Analyzer->getMutexIDs(GenericLocksToRemove, A, Exp, D,
Self);
2072 else if (A->isShared())
2073 Analyzer->getMutexIDs(SharedLocksToRemove, A, Exp, D,
Self);
2075 Analyzer->getMutexIDs(ExclusiveLocksToRemove, A, Exp, D,
Self);
2079 case attr::RequiresCapability: {
2081 for (
auto *Arg : A->args()) {
2082 Analyzer->warnIfMutexNotHeld(FSet, D, Exp,
2087 Analyzer->getMutexIDs(ScopedReqsAndExcludes, A, Exp, D,
Self);
2092 case attr::LocksExcluded: {
2094 for (
auto *Arg : A->args()) {
2095 Analyzer->warnIfMutexHeld(FSet, D, Exp, Arg,
Self, Loc);
2098 Analyzer->getMutexIDs(ScopedReqsAndExcludes, A, Exp, D,
Self);
2109 std::optional<CallExpr::const_arg_range> Args;
2111 if (
const auto *CE = dyn_cast<CallExpr>(Exp))
2112 Args = CE->arguments();
2113 else if (
const auto *CE = dyn_cast<CXXConstructExpr>(Exp))
2114 Args = CE->arguments();
2116 llvm_unreachable(
"Unknown call kind");
2118 const auto *CalledFunction = dyn_cast<FunctionDecl>(D);
2119 if (CalledFunction && Args.has_value()) {
2120 for (
auto [Param, Arg] : zip(CalledFunction->parameters(), *Args)) {
2121 CapExprSet DeclaredLocks;
2122 for (
const Attr *At : Param->attrs()) {
2123 switch (At->getKind()) {
2124 case attr::AcquireCapability: {
2126 Analyzer->getMutexIDs(A->isShared() ? SharedLocksToAdd
2127 : ExclusiveLocksToAdd,
2129 Analyzer->getMutexIDs(DeclaredLocks, A, Exp, D,
Self);
2133 case attr::ReleaseCapability: {
2136 Analyzer->getMutexIDs(GenericLocksToRemove, A, Exp, D,
Self);
2137 else if (A->isShared())
2138 Analyzer->getMutexIDs(SharedLocksToRemove, A, Exp, D,
Self);
2140 Analyzer->getMutexIDs(ExclusiveLocksToRemove, A, Exp, D,
Self);
2141 Analyzer->getMutexIDs(DeclaredLocks, A, Exp, D,
Self);
2145 case attr::RequiresCapability: {
2147 for (
auto *Arg : A->args())
2148 Analyzer->warnIfMutexNotHeld(FSet, D, Exp,
2151 Analyzer->getMutexIDs(DeclaredLocks, A, Exp, D,
Self);
2155 case attr::LocksExcluded: {
2157 for (
auto *Arg : A->args())
2158 Analyzer->warnIfMutexHeld(FSet, D, Exp, Arg,
Self, Loc);
2159 Analyzer->getMutexIDs(DeclaredLocks, A, Exp, D,
Self);
2167 if (DeclaredLocks.empty())
2169 CapabilityExpr Cp(Analyzer->SxBuilder.
translate(Arg,
nullptr),
2170 StringRef(
"mutex"),
false,
false);
2171 if (
const auto *CBTE = dyn_cast<CXXBindTemporaryExpr>(Arg->
IgnoreCasts());
2173 if (
auto Object = Analyzer->ConstructedObjects.find(CBTE->getSubExpr());
2174 Object != Analyzer->ConstructedObjects.end())
2175 Cp = CapabilityExpr(
Object->second, StringRef(
"mutex"),
false,
2178 const FactEntry *Fact = FSet.findLock(Analyzer->FactMan, Cp);
2186 for (
const auto &[a,
b] :
2187 zip_longest(DeclaredLocks, Scope->getUnderlyingMutexes())) {
2188 if (!a.has_value()) {
2191 b.value().getKind(),
b.value().toString());
2192 }
else if (!
b.has_value()) {
2195 a.value().getKind(), a.value().toString());
2196 }
else if (!a.value().equals(
b.value())) {
2199 a.value().getKind(), a.value().toString(),
b.value().toString());
2208 for (
const auto &M : ExclusiveLocksToRemove)
2209 Analyzer->removeLock(FSet, M, Loc, Dtor,
LK_Exclusive);
2210 for (
const auto &M : SharedLocksToRemove)
2211 Analyzer->removeLock(FSet, M, Loc, Dtor,
LK_Shared);
2212 for (
const auto &M : GenericLocksToRemove)
2213 Analyzer->removeLock(FSet, M, Loc, Dtor,
LK_Generic);
2216 FactEntry::SourceKind Source =
2217 !Scp.
shouldIgnore() ? FactEntry::Managed : FactEntry::Acquired;
2218 for (
const auto &M : ExclusiveLocksToAdd)
2219 Analyzer->addLock(FSet, Analyzer->FactMan.createFact<LockableFactEntry>(
2221 for (
const auto &M : SharedLocksToAdd)
2222 Analyzer->addLock(FSet, Analyzer->FactMan.createFact<LockableFactEntry>(
2227 auto *ScopedEntry = Analyzer->FactMan.createFact<ScopedLockableFactEntry>(
2228 Scp, Loc, FactEntry::Acquired,
2229 ExclusiveLocksToAdd.size() + SharedLocksToAdd.size() +
2230 ScopedReqsAndExcludes.size() + ExclusiveLocksToRemove.size() +
2231 SharedLocksToRemove.size());
2232 for (
const auto &M : ExclusiveLocksToAdd)
2233 ScopedEntry->addLock(M);
2234 for (
const auto &M : SharedLocksToAdd)
2235 ScopedEntry->addLock(M);
2236 for (
const auto &M : ScopedReqsAndExcludes)
2237 ScopedEntry->addLock(M);
2238 for (
const auto &M : ExclusiveLocksToRemove)
2239 ScopedEntry->addExclusiveUnlock(M);
2240 for (
const auto &M : SharedLocksToRemove)
2241 ScopedEntry->addSharedUnlock(M);
2242 Analyzer->addLock(FSet, ScopedEntry);
2249void BuildLockset::VisitUnaryOperator(
const UnaryOperator *UO) {
2265void BuildLockset::VisitBinaryOperator(
const BinaryOperator *BO) {
2269 updateLocalVarMapCtx(BO);
2276void BuildLockset::VisitCastExpr(
const CastExpr *CE) {
2282void BuildLockset::examineArguments(
const FunctionDecl *FD,
2285 bool SkipFirstParam) {
2295 if (FD->
hasAttr<NoThreadSafetyAnalysisAttr>())
2298 const ArrayRef<ParmVarDecl *> Params = FD->
parameters();
2299 auto Param = Params.begin();
2304 for (
auto Arg = ArgBegin; Param != Params.end() && Arg != ArgEnd;
2306 QualType Qt = (*Param)->getType();
2314void BuildLockset::VisitCallExpr(
const CallExpr *Exp) {
2315 updateLocalVarMapCtx(Exp);
2317 if (
const auto *CE = dyn_cast<CXXMemberCallExpr>(Exp)) {
2318 const auto *ME = dyn_cast<MemberExpr>(CE->getCallee());
2320 const CXXMethodDecl *MD = CE->getMethodDecl();
2323 if (ME->isArrow()) {
2325 checkPtAccess(CE->getImplicitObjectArgument(),
AK_Read);
2328 checkAccess(CE->getImplicitObjectArgument(),
AK_Read);
2332 examineArguments(CE->getDirectCallee(), CE->arg_begin(), CE->arg_end());
2333 }
else if (
const auto *OE = dyn_cast<CXXOperatorCallExpr>(Exp)) {
2341 case OO_PercentEqual:
2345 case OO_LessLessEqual:
2346 case OO_GreaterGreaterEqual:
2347 checkAccess(OE->getArg(1),
AK_Read);
2357 if (!(OEop == OO_Star && OE->getNumArgs() > 1)) {
2359 checkPtAccess(OE->getArg(0),
AK_Read);
2364 const Expr *Obj = OE->getArg(0);
2369 const FunctionDecl *FD = OE->getDirectCallee();
2370 examineArguments(FD, std::next(OE->arg_begin()), OE->arg_end(),
2379 auto *D = dyn_cast_or_null<NamedDecl>(Exp->
getCalleeDecl());
2385void BuildLockset::VisitCXXConstructExpr(
const CXXConstructExpr *Exp) {
2388 const Expr* Source = Exp->
getArg(0);
2398 if (
auto *CE = dyn_cast<CastExpr>(E))
2401 if (
auto *CE = dyn_cast<CastExpr>(E))
2402 if (CE->
getCastKind() == CK_ConstructorConversion ||
2405 if (
auto *BTE = dyn_cast<CXXBindTemporaryExpr>(E))
2406 E = BTE->getSubExpr();
2410void BuildLockset::VisitDeclStmt(
const DeclStmt *S) {
2411 updateLocalVarMapCtx(S);
2414 if (
auto *VD = dyn_cast_or_null<VarDecl>(D)) {
2415 const Expr *E = VD->getInit();
2421 if (
auto *EWC = dyn_cast<ExprWithCleanups>(E))
2425 if (
auto Object = Analyzer->ConstructedObjects.find(E);
2426 Object != Analyzer->ConstructedObjects.end()) {
2427 Object->second->setClangDecl(VD);
2428 Analyzer->ConstructedObjects.erase(Object);
2434void BuildLockset::VisitMaterializeTemporaryExpr(
2435 const MaterializeTemporaryExpr *Exp) {
2437 if (
auto Object = Analyzer->ConstructedObjects.find(
2439 Object != Analyzer->ConstructedObjects.end()) {
2440 Object->second->setClangDecl(ExtD);
2441 Analyzer->ConstructedObjects.erase(Object);
2446void BuildLockset::VisitReturnStmt(
const ReturnStmt *S) {
2447 if (Analyzer->CurrentFunction ==
nullptr)
2455 const QualType ReturnType =
2458 Analyzer->checkAccess(
2459 FunctionExitFSet, RetVal,
2463 Analyzer->checkPtAccess(
2464 FunctionExitFSet, RetVal,
2474bool ThreadSafetyAnalyzer::join(
const FactEntry &A,
const FactEntry &B,
2475 SourceLocation JoinLoc,
2479 unsigned int ReentrancyDepthA = 0;
2480 unsigned int ReentrancyDepthB = 0;
2482 if (
const auto *LFE = dyn_cast<LockableFactEntry>(&A))
2483 ReentrancyDepthA = LFE->getReentrancyDepth();
2484 if (
const auto *LFE = dyn_cast<LockableFactEntry>(&B))
2485 ReentrancyDepthB = LFE->getReentrancyDepth();
2487 if (ReentrancyDepthA != ReentrancyDepthB) {
2493 return CanModify && ReentrancyDepthA < ReentrancyDepthB;
2494 }
else if (A.kind() != B.kind()) {
2497 if ((A.managed() || A.asserted()) && (B.managed() || B.asserted())) {
2499 bool ShouldTakeB = B.kind() ==
LK_Shared;
2500 if (CanModify || !ShouldTakeB)
2509 return CanModify && A.asserted() && !B.asserted();
2527void ThreadSafetyAnalyzer::intersectAndWarn(FactSet &EntrySet,
2528 const FactSet &ExitSet,
2529 SourceLocation JoinLoc,
2532 FactSet EntrySetOrig = EntrySet;
2535 for (
const auto &Fact : ExitSet) {
2536 const FactEntry &ExitFact = FactMan[Fact];
2538 FactSet::iterator EntryIt = EntrySet.findLockIter(FactMan, ExitFact);
2539 if (EntryIt != EntrySet.end()) {
2540 if (join(FactMan[*EntryIt], ExitFact, JoinLoc, EntryLEK))
2543 ExitFact.handleRemovalFromIntersection(ExitSet, FactMan, JoinLoc,
2549 for (
const auto &Fact : EntrySetOrig) {
2550 const FactEntry *EntryFact = &FactMan[Fact];
2551 const FactEntry *ExitFact = ExitSet.findLock(FactMan, *EntryFact);
2556 EntryFact->handleRemovalFromIntersection(EntrySetOrig, FactMan, JoinLoc,
2559 EntrySet.removeLock(FactMan, *EntryFact);
2572 if (std::optional<CFGStmt> S =
Last.getAs<
CFGStmt>()) {
2584void ThreadSafetyAnalyzer::runAnalysis(AnalysisDeclContext &AC) {
2587 threadSafety::CFGWalker walker;
2588 if (!walker.
init(AC))
2595 const NamedDecl *D = walker.
getDecl();
2596 CurrentFunction = dyn_cast<FunctionDecl>(D);
2598 if (D->
hasAttr<NoThreadSafetyAnalysisAttr>())
2613 CFGBlockInfo::getEmptyBlockInfo(LocalVarMap));
2619 PostOrderCFGView::CFGBlockSet VisitedBlocks(CFGraph);
2625 Initial.Reachable =
true;
2628 LocalVarMap.traverseCFG(CFGraph, SortedGraph, BlockInfo);
2633 CapExprSet ExclusiveLocksAcquired;
2634 CapExprSet SharedLocksAcquired;
2635 CapExprSet LocksReleased;
2640 if (!SortedGraph->
empty()) {
2642 FactSet &InitialLockset = Initial.EntrySet;
2644 CapExprSet ExclusiveLocksToAdd;
2645 CapExprSet SharedLocksToAdd;
2648 for (
const auto *Attr : D->
attrs()) {
2649 Loc = Attr->getLocation();
2650 if (
const auto *A = dyn_cast<RequiresCapabilityAttr>(Attr)) {
2651 getMutexIDs(A->isShared() ? SharedLocksToAdd : ExclusiveLocksToAdd, A,
2653 }
else if (
const auto *A = dyn_cast<ReleaseCapabilityAttr>(Attr)) {
2656 if (A->args_size() == 0)
2658 getMutexIDs(A->isShared() ? SharedLocksToAdd : ExclusiveLocksToAdd, A,
2660 getMutexIDs(LocksReleased, A,
nullptr, D);
2661 }
else if (
const auto *A = dyn_cast<AcquireCapabilityAttr>(Attr)) {
2662 if (A->args_size() == 0)
2664 getMutexIDs(A->isShared() ? SharedLocksAcquired
2665 : ExclusiveLocksAcquired,
2672 ArrayRef<ParmVarDecl *> Params;
2673 if (CurrentFunction)
2675 else if (
auto CurrentMethod = dyn_cast<ObjCMethodDecl>(D))
2676 Params = CurrentMethod->getCanonicalDecl()->parameters();
2678 llvm_unreachable(
"Unknown function kind");
2679 for (
const ParmVarDecl *Param : Params) {
2680 CapExprSet UnderlyingLocks;
2681 for (
const auto *Attr : Param->attrs()) {
2682 Loc = Attr->getLocation();
2683 if (
const auto *A = dyn_cast<ReleaseCapabilityAttr>(Attr)) {
2684 getMutexIDs(A->isShared() ? SharedLocksToAdd : ExclusiveLocksToAdd, A,
2686 getMutexIDs(LocksReleased, A,
nullptr, Param);
2687 getMutexIDs(UnderlyingLocks, A,
nullptr, Param);
2688 }
else if (
const auto *A = dyn_cast<RequiresCapabilityAttr>(Attr)) {
2689 getMutexIDs(A->isShared() ? SharedLocksToAdd : ExclusiveLocksToAdd, A,
2691 getMutexIDs(UnderlyingLocks, A,
nullptr, Param);
2692 }
else if (
const auto *A = dyn_cast<AcquireCapabilityAttr>(Attr)) {
2693 getMutexIDs(A->isShared() ? SharedLocksAcquired
2694 : ExclusiveLocksAcquired,
2696 getMutexIDs(UnderlyingLocks, A,
nullptr, Param);
2697 }
else if (
const auto *A = dyn_cast<LocksExcludedAttr>(Attr)) {
2698 getMutexIDs(UnderlyingLocks, A,
nullptr, Param);
2701 if (UnderlyingLocks.empty())
2706 auto *ScopedEntry = FactMan.createFact<ScopedLockableFactEntry>(
2707 Cp, Param->getLocation(), FactEntry::Declared,
2708 UnderlyingLocks.size());
2709 for (
const CapabilityExpr &M : UnderlyingLocks)
2710 ScopedEntry->addLock(M);
2711 addLock(InitialLockset, ScopedEntry,
true);
2715 for (
const auto &Mu : ExclusiveLocksToAdd) {
2716 const auto *Entry = FactMan.createFact<LockableFactEntry>(
2718 addLock(InitialLockset, Entry,
true);
2720 for (
const auto &Mu : SharedLocksToAdd) {
2721 const auto *Entry = FactMan.createFact<LockableFactEntry>(
2722 Mu,
LK_Shared, Loc, FactEntry::Declared);
2723 addLock(InitialLockset, Entry,
true);
2729 FactSet ExpectedFunctionExitSet = Initial.EntrySet;
2735 for (
const auto &Lock : ExclusiveLocksAcquired)
2736 ExpectedFunctionExitSet.addLock(
2737 FactMan, FactMan.createFact<LockableFactEntry>(Lock,
LK_Exclusive,
2739 for (
const auto &Lock : SharedLocksAcquired)
2740 ExpectedFunctionExitSet.addLock(
2741 FactMan, FactMan.createFact<LockableFactEntry>(Lock,
LK_Shared,
2743 for (
const auto &Lock : LocksReleased)
2744 ExpectedFunctionExitSet.removeLock(FactMan, Lock);
2746 for (
const auto *CurrBlock : *SortedGraph) {
2747 unsigned CurrBlockID = CurrBlock->
getBlockID();
2748 CFGBlockInfo *CurrBlockInfo = &BlockInfo[CurrBlockID];
2751 VisitedBlocks.insert(CurrBlock);
2766 bool LocksetInitialized =
false;
2768 PE = CurrBlock->
pred_end(); PI != PE; ++PI) {
2770 if (*PI ==
nullptr || !VisitedBlocks.alreadySet(*PI))
2773 unsigned PrevBlockID = (*PI)->getBlockID();
2774 CFGBlockInfo *PrevBlockInfo = &BlockInfo[PrevBlockID];
2781 CurrBlockInfo->Reachable =
true;
2783 FactSet PrevLockset;
2784 getEdgeLockset(PrevLockset, PrevBlockInfo->ExitSet, *PI, CurrBlock);
2786 if (!LocksetInitialized) {
2787 CurrBlockInfo->EntrySet = PrevLockset;
2788 LocksetInitialized =
true;
2794 CurrBlockInfo->EntrySet, PrevLockset, CurrBlockInfo->EntryLoc,
2795 isa_and_nonnull<ContinueStmt>((*PI)->getTerminatorStmt())
2802 if (!CurrBlockInfo->Reachable)
2805 BuildLockset LocksetBuilder(
this, *CurrBlockInfo, ExpectedFunctionExitSet);
2808 for (
const auto &BI : *CurrBlock) {
2809 switch (BI.getKind()) {
2811 CFGStmt CS = BI.castAs<CFGStmt>();
2812 LocksetBuilder.Visit(CS.
getStmt());
2817 CFGAutomaticObjDtor AD = BI.castAs<CFGAutomaticObjDtor>();
2819 if (!DD->hasAttrs())
2822 LocksetBuilder.handleCall(
2830 const CFGCleanupFunction &
CF = BI.castAs<CFGCleanupFunction>();
2831 LocksetBuilder.handleCall(
2832 nullptr,
CF.getFunctionDecl(),
2834 CF.getVarDecl()->getLocation());
2839 auto TD = BI.castAs<CFGTemporaryDtor>();
2843 if (
auto Object = ConstructedObjects.find(
2844 TD.getBindTemporaryExpr()->getSubExpr());
2845 Object != ConstructedObjects.end()) {
2849 LocksetBuilder.handleCall(
nullptr, DD,
Object->second,
2850 TD.getBindTemporaryExpr()->getEndLoc());
2851 ConstructedObjects.erase(Object);
2859 CurrBlockInfo->ExitSet = LocksetBuilder.FSet;
2866 SE = CurrBlock->succ_end(); SI != SE; ++SI) {
2868 if (*SI ==
nullptr || !VisitedBlocks.alreadySet(*SI))
2871 CFGBlock *FirstLoopBlock = *SI;
2872 CFGBlockInfo *PreLoop = &BlockInfo[FirstLoopBlock->
getBlockID()];
2873 CFGBlockInfo *LoopEnd = &BlockInfo[CurrBlockID];
2874 intersectAndWarn(PreLoop->EntrySet, LoopEnd->ExitSet, PreLoop->EntryLoc,
2880 if (!Final.Reachable)
2884 intersectAndWarn(ExpectedFunctionExitSet, Final.ExitSet, Final.ExitLoc,
2900 ThreadSafetyAnalyzer Analyzer(Handler, *BSet);
2901 Analyzer.runAnalysis(AC);
2915 llvm_unreachable(
"Unknown AccessKind");
This file defines AnalysisDeclContext, a class that manages the analysis context data for context sen...
Defines enum values for all the target-independent builtin functions.
static void dump(llvm::raw_ostream &OS, StringRef FunctionName, ArrayRef< CounterExpression > Expressions, ArrayRef< CounterMappingRegion > Regions)
static Decl::Kind getKind(const Decl *D)
Defines the C++ Decl subclasses, other than those for templates (found in DeclTemplate....
Defines the clang::Expr interface and subclasses for C++ expressions.
Forward-declares and imports various common LLVM datatypes that clang wants to use unqualified.
Defines an enumeration for C++ overloaded operators.
static std::string toString(const clang::SanitizerSet &Sanitizers)
Produce a string containing comma-separated names of sanitizers in Sanitizers set.
Defines the clang::SourceLocation class and associated facilities.
Defines various enumerations that describe declaration and type specifiers.
static void warnInvalidLock(ThreadSafetyHandler &Handler, const Expr *MutexExp, const NamedDecl *D, const Expr *DeclExp, StringRef Kind)
Issue a warning about an invalid lock expression.
static bool getStaticBooleanValue(Expr *E, bool &TCond)
static bool neverReturns(const CFGBlock *B)
static void findBlockLocations(CFG *CFGraph, const PostOrderCFGView *SortedGraph, std::vector< CFGBlockInfo > &BlockInfo)
Find the appropriate source locations to use when producing diagnostics for each block in the CFG.
static const ValueDecl * getValueDecl(const Expr *Exp)
Gets the value decl pointer from DeclRefExprs or MemberExprs.
static const Expr * UnpackConstruction(const Expr *E)
C Language Family Type Representation.
AnalysisDeclContext contains the context data for the function, method or block under analysis.
ASTContext & getASTContext() const
static bool isAssignmentOp(Opcode Opc)
const VarDecl * getVarDecl() const
const Stmt * getTriggerStmt() const
Represents a single basic block in a source-level CFG.
bool hasNoReturnElement() const
ElementList::const_reverse_iterator const_reverse_iterator
succ_iterator succ_begin()
Stmt * getTerminatorStmt()
AdjacentBlocks::const_iterator const_pred_iterator
pred_iterator pred_begin()
unsigned getBlockID() const
Stmt * getTerminatorCondition(bool StripParens=true)
AdjacentBlocks::const_iterator const_succ_iterator
Represents a top-level expression in a basic block.
const CXXDestructorDecl * getDestructorDecl(ASTContext &astContext) const
const Stmt * getStmt() const
Represents a source-level, intra-procedural CFG that represents the control-flow of a Stmt.
unsigned getNumBlockIDs() const
Returns the total number of BlockIDs allocated (which start at 0).
Expr * getArg(unsigned Arg)
Return the specified argument.
CXXConstructorDecl * getConstructor() const
Get the constructor that this expression will (ultimately) call.
bool isCopyConstructor(unsigned &TypeQuals) const
Whether this constructor is a copy constructor (C++ [class.copy]p2, which can be used to copy the cla...
Expr * getArg(unsigned Arg)
getArg - Return the specified argument.
ConstExprIterator const_arg_iterator
FunctionDecl * getDirectCallee()
If the callee is a FunctionDecl, return it. Otherwise return null.
unsigned getNumArgs() const
getNumArgs - Return the number of actual arguments to this call.
CastKind getCastKind() const
const DeclGroupRef getDeclGroup() const
SourceLocation getBeginLoc() const LLVM_READONLY
llvm::iterator_range< specific_attr_iterator< T > > specific_attrs() const
SourceLocation getLocation() const
bool isDefinedOutsideFunctionOrMethod() const
isDefinedOutsideFunctionOrMethod - This predicate returns true if this scoped decl is defined outside...
DeclContext * getDeclContext()
This represents one expression.
Expr * IgnoreParenCasts() LLVM_READONLY
Skip past any parentheses and casts which might surround this expression until reaching a fixed point...
Expr * IgnoreParenImpCasts() LLVM_READONLY
Skip past any parentheses and implicit casts which might surround this expression until reaching a fi...
Expr * IgnoreImplicit() LLVM_READONLY
Skip past any implicit AST nodes which might surround this expression until reaching a fixed point.
Expr * IgnoreParens() LLVM_READONLY
Skip past any parentheses which might surround this expression until reaching a fixed point.
Expr * IgnoreCasts() LLVM_READONLY
Skip past any casts which might surround this expression until reaching a fixed point.
SourceLocation getExprLoc() const LLVM_READONLY
getExprLoc - Return the preferred location for the arrow when diagnosing a problem with a generic exp...
const ParmVarDecl * getParamDecl(unsigned i) const
QualType getReturnType() const
ArrayRef< ParmVarDecl * > parameters() const
FunctionDecl * getCanonicalDecl() override
Retrieves the "canonical" declaration of the given declaration.
unsigned getNumParams() const
Return the number of parameters this function must have based on its FunctionType.
Expr * getSubExpr() const
Retrieve the temporary-generating subexpression whose value will be materialized into a glvalue.
ValueDecl * getExtendingDecl()
Get the declaration which triggered the lifetime-extension of this temporary, if any.
This represents a decl that may have a name.
IdentifierInfo * getIdentifier() const
Get the identifier that names this declaration, if there is one.
StringRef getName() const
Get the name of identifier for this declaration as a StringRef.
std::string getNameAsString() const
Get a human-readable name for the declaration, even if it is one of the special kinds of names (C++ c...
virtual void printName(raw_ostream &OS, const PrintingPolicy &Policy) const
Pretty-print the unqualified name of this declaration.
QualType getCanonicalType() const
bool isConstQualified() const
Determine whether this type is const-qualified.
Encodes a location in the source.
bool isValid() const
Return true if this is a valid SourceLocation object.
Stmt - This represents one statement.
SourceLocation getEndLoc() const LLVM_READONLY
void dump() const
Dumps the specified AST fragment and all subtrees to llvm::errs().
bool isPointerType() const
bool isReferenceType() const
QualType getPointeeType() const
If this is a pointer, ObjC object pointer, or block pointer, this returns the respective pointee.
bool isLValueReferenceType() const
const T * getAs() const
Member-template getAs<specific type>'.
Expr * getSubExpr() const
Represent the declaration of a variable (in which case it is an lvalue) a function (in which case it ...
void checkBeforeAfter(const ValueDecl *Vd, const FactSet &FSet, ThreadSafetyAnalyzer &Analyzer, SourceLocation Loc, StringRef CapKind)
Return true if any mutexes in FSet are in the acquired_before set of Vd.
BeforeInfo * insertAttrExprs(const ValueDecl *Vd, ThreadSafetyAnalyzer &Analyzer)
Process acquired_before and acquired_after attributes on Vd.
BeforeInfo * getBeforeInfoForDecl(const ValueDecl *Vd, ThreadSafetyAnalyzer &Analyzer)
const PostOrderCFGView * getSortedGraph() const
const NamedDecl * getDecl() const
bool init(AnalysisDeclContext &AC)
const CFG * getGraph() const
bool shouldIgnore() const
bool equals(const CapabilityExpr &other) const
const til::SExpr * sexpr() const
std::string toString() const
const ValueDecl * valueDecl() const
StringRef getKind() const
CapabilityExpr translateAttrExpr(const Expr *AttrExp, const NamedDecl *D, const Expr *DeclExp, til::SExpr *Self=nullptr)
Translate a clang expression in an attribute to a til::SExpr.
void setLookupLocalVarExpr(std::function< const Expr *(const NamedDecl *)> F)
til::SExpr * translate(const Stmt *S, CallingContext *Ctx)
til::LiteralPtr * createThisPlaceholder()
til::SExpr * translateVariable(const VarDecl *VD, CallingContext *Ctx)
Handler class for thread safety warnings.
virtual ~ThreadSafetyHandler()
virtual void handleExpectMoreUnderlyingMutexes(SourceLocation Loc, SourceLocation DLoc, Name ScopeName, StringRef Kind, Name Expected)
Warn when we get fewer underlying mutexes than expected.
virtual void handleInvalidLockExp(SourceLocation Loc)
Warn about lock expressions which fail to resolve to lockable objects.
virtual void handleUnmatchedUnderlyingMutexes(SourceLocation Loc, SourceLocation DLoc, Name ScopeName, StringRef Kind, Name Expected, Name Actual)
Warn when an actual underlying mutex of a scoped lockable does not match the expected.
virtual void handleExpectFewerUnderlyingMutexes(SourceLocation Loc, SourceLocation DLoc, Name ScopeName, StringRef Kind, Name Actual)
Warn when we get more underlying mutexes than expected.
virtual void enterFunction(const FunctionDecl *FD)
Called by the analysis when starting analysis of a function.
virtual void handleIncorrectUnlockKind(StringRef Kind, Name LockName, LockKind Expected, LockKind Received, SourceLocation LocLocked, SourceLocation LocUnlock)
Warn about an unlock function call that attempts to unlock a lock with the incorrect lock kind.
virtual void handleMutexHeldEndOfScope(StringRef Kind, Name LockName, SourceLocation LocLocked, SourceLocation LocEndOfScope, LockErrorKind LEK, bool ReentrancyMismatch=false)
Warn about situations where a mutex is sometimes held and sometimes not.
virtual void leaveFunction(const FunctionDecl *FD)
Called by the analysis when finishing analysis of a function.
virtual void handleExclusiveAndShared(StringRef Kind, Name LockName, SourceLocation Loc1, SourceLocation Loc2)
Warn when a mutex is held exclusively and shared at the same point.
virtual void handleMutexNotHeld(StringRef Kind, const NamedDecl *D, ProtectedOperationKind POK, Name LockName, LockKind LK, SourceLocation Loc, Name *PossibleMatch=nullptr)
Warn when a protected operation occurs while the specific mutex protecting the operation is not locke...
virtual void handleFunExcludesLock(StringRef Kind, Name FunName, Name LockName, SourceLocation Loc)
Warn when a function is called while an excluded mutex is locked.
virtual void handleNoMutexHeld(const NamedDecl *D, ProtectedOperationKind POK, AccessKind AK, SourceLocation Loc)
Warn when a protected operation occurs while no locks are held.
virtual void handleUnmatchedUnlock(StringRef Kind, Name LockName, SourceLocation Loc, SourceLocation LocPreviousUnlock)
Warn about unlock function calls that do not have a prior matching lock expression.
virtual void handleNegativeNotHeld(StringRef Kind, Name LockName, Name Neg, SourceLocation Loc)
Warn when acquiring a lock that the negative capability is not held.
virtual void handleDoubleLock(StringRef Kind, Name LockName, SourceLocation LocLocked, SourceLocation LocDoubleLock)
Warn about lock function calls for locks which are already held.
internal::Matcher< T > traverse(TraversalKind TK, const internal::Matcher< T > &InnerMatcher)
Causes all nested matchers to be matched with the specified traversal kind.
unsigned kind
All of the diagnostics that can be emitted by the frontend.
@ CF
Indicates that the tracked object is a CF object.
bool Alloc(InterpState &S, CodePtr OpPC, const Descriptor *Desc)
bool Dec(InterpState &S, CodePtr OpPC, bool CanOverflow)
1) Pops a pointer from the stack 2) Load the value from the pointer 3) Writes the value decreased by ...
bool Neg(InterpState &S, CodePtr OpPC)
std::unique_ptr< DiagnosticConsumer > create(StringRef OutputFile, DiagnosticOptions &DiagOpts, bool MergeChildRecords=false)
Returns a DiagnosticConsumer that serializes diagnostics to a bitcode file.
bool matches(const til::SExpr *E1, const til::SExpr *E2)
LockKind getLockKindFromAccessKind(AccessKind AK)
Helper function that returns a LockKind required for the given level of access.
LockErrorKind
This enum distinguishes between different situations where we warn due to inconsistent locking.
@ LEK_NotLockedAtEndOfFunction
Expecting a capability to be held at the end of function.
@ LEK_LockedSomePredecessors
A capability is locked in some but not all predecessors of a CFGBlock.
@ LEK_LockedAtEndOfFunction
A capability is still locked at the end of a function.
@ LEK_LockedSomeLoopIterations
A capability is locked for some but not all loop iterations.
void threadSafetyCleanup(BeforeSet *Cache)
AccessKind
This enum distinguishes between different ways to access (read or write) a variable.
@ AK_Written
Writing a variable.
@ AK_Read
Reading a variable.
LockKind
This enum distinguishes between different kinds of lock actions.
@ LK_Shared
Shared/reader lock of a mutex.
@ LK_Exclusive
Exclusive/writer lock of a mutex.
@ LK_Generic
Can be either Shared or Exclusive.
void runThreadSafetyAnalysis(AnalysisDeclContext &AC, ThreadSafetyHandler &Handler, BeforeSet **Bset)
Check a function's CFG for thread-safety violations.
ProtectedOperationKind
This enum distinguishes between different kinds of operations that may need to be protected by locks.
@ POK_PtPassByRef
Passing a pt-guarded variable by reference.
@ POK_PassPointer
Passing pointer to a guarded variable.
@ POK_VarDereference
Dereferencing a variable (e.g. p in *p = 5;)
@ POK_PassByRef
Passing a guarded variable by reference.
@ POK_ReturnByRef
Returning a guarded variable by reference.
@ POK_PtPassPointer
Passing a pt-guarded pointer.
@ POK_PtReturnPointer
Returning a pt-guarded pointer.
@ POK_VarAccess
Reading or writing a variable (e.g. x in x = 5;)
@ POK_FunctionCall
Making a function call (e.g. fool())
@ POK_ReturnPointer
Returning pointer to a guarded variable.
@ POK_PtReturnByRef
Returning a pt-guarded variable by reference.
The JSON file list parser is used to communicate input to InstallAPI.
OverloadedOperatorKind
Enumeration specifying the different kinds of C++ overloaded operators.
bool isa(CodeGen::Address addr)
@ Self
'self' clause, allowed on Compute and Combined Constructs, plus 'update'.
nullptr
This class represents a compute construct, representing a 'Kind' of ‘parallel’, 'serial',...
static bool classof(const Stmt *T)
@ Result
The result type of a method or function.
const FunctionProtoType * T
U cast(CodeGen::Address addr)
@ Other
Other implicit parameter.
int const char * function