GlobalsModRef.cpp revision 360784
1//===- GlobalsModRef.cpp - Simple Mod/Ref Analysis for Globals ------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This simple pass provides alias and mod/ref information for global values
10// that do not have their address taken, and keeps track of whether functions
11// read or write memory (are "pure").  For this simple (but very common) case,
12// we can provide pretty accurate and useful information.
13//
14//===----------------------------------------------------------------------===//
15
16#include "llvm/Analysis/GlobalsModRef.h"
17#include "llvm/ADT/SCCIterator.h"
18#include "llvm/ADT/SmallPtrSet.h"
19#include "llvm/ADT/Statistic.h"
20#include "llvm/Analysis/MemoryBuiltins.h"
21#include "llvm/Analysis/TargetLibraryInfo.h"
22#include "llvm/Analysis/ValueTracking.h"
23#include "llvm/IR/DerivedTypes.h"
24#include "llvm/IR/InstIterator.h"
25#include "llvm/IR/Instructions.h"
26#include "llvm/IR/IntrinsicInst.h"
27#include "llvm/IR/Module.h"
28#include "llvm/InitializePasses.h"
29#include "llvm/Pass.h"
30#include "llvm/Support/CommandLine.h"
31using namespace llvm;
32
33#define DEBUG_TYPE "globalsmodref-aa"
34
35STATISTIC(NumNonAddrTakenGlobalVars,
36          "Number of global vars without address taken");
37STATISTIC(NumNonAddrTakenFunctions,"Number of functions without address taken");
38STATISTIC(NumNoMemFunctions, "Number of functions that do not access memory");
39STATISTIC(NumReadMemFunctions, "Number of functions that only read memory");
40STATISTIC(NumIndirectGlobalVars, "Number of indirect global objects");
41
42// An option to enable unsafe alias results from the GlobalsModRef analysis.
43// When enabled, GlobalsModRef will provide no-alias results which in extremely
44// rare cases may not be conservatively correct. In particular, in the face of
45// transforms which cause assymetry between how effective GetUnderlyingObject
46// is for two pointers, it may produce incorrect results.
47//
48// These unsafe results have been returned by GMR for many years without
49// causing significant issues in the wild and so we provide a mechanism to
50// re-enable them for users of LLVM that have a particular performance
51// sensitivity and no known issues. The option also makes it easy to evaluate
52// the performance impact of these results.
53static cl::opt<bool> EnableUnsafeGlobalsModRefAliasResults(
54    "enable-unsafe-globalsmodref-alias-results", cl::init(false), cl::Hidden);
55
56/// The mod/ref information collected for a particular function.
57///
58/// We collect information about mod/ref behavior of a function here, both in
59/// general and as pertains to specific globals. We only have this detailed
60/// information when we know *something* useful about the behavior. If we
61/// saturate to fully general mod/ref, we remove the info for the function.
62class GlobalsAAResult::FunctionInfo {
63  typedef SmallDenseMap<const GlobalValue *, ModRefInfo, 16> GlobalInfoMapType;
64
65  /// Build a wrapper struct that has 8-byte alignment. All heap allocations
66  /// should provide this much alignment at least, but this makes it clear we
67  /// specifically rely on this amount of alignment.
68  struct alignas(8) AlignedMap {
69    AlignedMap() {}
70    AlignedMap(const AlignedMap &Arg) : Map(Arg.Map) {}
71    GlobalInfoMapType Map;
72  };
73
74  /// Pointer traits for our aligned map.
75  struct AlignedMapPointerTraits {
76    static inline void *getAsVoidPointer(AlignedMap *P) { return P; }
77    static inline AlignedMap *getFromVoidPointer(void *P) {
78      return (AlignedMap *)P;
79    }
80    enum { NumLowBitsAvailable = 3 };
81    static_assert(alignof(AlignedMap) >= (1 << NumLowBitsAvailable),
82                  "AlignedMap insufficiently aligned to have enough low bits.");
83  };
84
85  /// The bit that flags that this function may read any global. This is
86  /// chosen to mix together with ModRefInfo bits.
87  /// FIXME: This assumes ModRefInfo lattice will remain 4 bits!
88  /// It overlaps with ModRefInfo::Must bit!
89  /// FunctionInfo.getModRefInfo() masks out everything except ModRef so
90  /// this remains correct, but the Must info is lost.
91  enum { MayReadAnyGlobal = 4 };
92
93  /// Checks to document the invariants of the bit packing here.
94  static_assert((MayReadAnyGlobal & static_cast<int>(ModRefInfo::MustModRef)) ==
95                    0,
96                "ModRef and the MayReadAnyGlobal flag bits overlap.");
97  static_assert(((MayReadAnyGlobal |
98                  static_cast<int>(ModRefInfo::MustModRef)) >>
99                 AlignedMapPointerTraits::NumLowBitsAvailable) == 0,
100                "Insufficient low bits to store our flag and ModRef info.");
101
102public:
103  FunctionInfo() : Info() {}
104  ~FunctionInfo() {
105    delete Info.getPointer();
106  }
107  // Spell out the copy ond move constructors and assignment operators to get
108  // deep copy semantics and correct move semantics in the face of the
109  // pointer-int pair.
110  FunctionInfo(const FunctionInfo &Arg)
111      : Info(nullptr, Arg.Info.getInt()) {
112    if (const auto *ArgPtr = Arg.Info.getPointer())
113      Info.setPointer(new AlignedMap(*ArgPtr));
114  }
115  FunctionInfo(FunctionInfo &&Arg)
116      : Info(Arg.Info.getPointer(), Arg.Info.getInt()) {
117    Arg.Info.setPointerAndInt(nullptr, 0);
118  }
119  FunctionInfo &operator=(const FunctionInfo &RHS) {
120    delete Info.getPointer();
121    Info.setPointerAndInt(nullptr, RHS.Info.getInt());
122    if (const auto *RHSPtr = RHS.Info.getPointer())
123      Info.setPointer(new AlignedMap(*RHSPtr));
124    return *this;
125  }
126  FunctionInfo &operator=(FunctionInfo &&RHS) {
127    delete Info.getPointer();
128    Info.setPointerAndInt(RHS.Info.getPointer(), RHS.Info.getInt());
129    RHS.Info.setPointerAndInt(nullptr, 0);
130    return *this;
131  }
132
133  /// This method clears MayReadAnyGlobal bit added by GlobalsAAResult to return
134  /// the corresponding ModRefInfo. It must align in functionality with
135  /// clearMust().
136  ModRefInfo globalClearMayReadAnyGlobal(int I) const {
137    return ModRefInfo((I & static_cast<int>(ModRefInfo::ModRef)) |
138                      static_cast<int>(ModRefInfo::NoModRef));
139  }
140
141  /// Returns the \c ModRefInfo info for this function.
142  ModRefInfo getModRefInfo() const {
143    return globalClearMayReadAnyGlobal(Info.getInt());
144  }
145
146  /// Adds new \c ModRefInfo for this function to its state.
147  void addModRefInfo(ModRefInfo NewMRI) {
148    Info.setInt(Info.getInt() | static_cast<int>(setMust(NewMRI)));
149  }
150
151  /// Returns whether this function may read any global variable, and we don't
152  /// know which global.
153  bool mayReadAnyGlobal() const { return Info.getInt() & MayReadAnyGlobal; }
154
155  /// Sets this function as potentially reading from any global.
156  void setMayReadAnyGlobal() { Info.setInt(Info.getInt() | MayReadAnyGlobal); }
157
158  /// Returns the \c ModRefInfo info for this function w.r.t. a particular
159  /// global, which may be more precise than the general information above.
160  ModRefInfo getModRefInfoForGlobal(const GlobalValue &GV) const {
161    ModRefInfo GlobalMRI =
162        mayReadAnyGlobal() ? ModRefInfo::Ref : ModRefInfo::NoModRef;
163    if (AlignedMap *P = Info.getPointer()) {
164      auto I = P->Map.find(&GV);
165      if (I != P->Map.end())
166        GlobalMRI = unionModRef(GlobalMRI, I->second);
167    }
168    return GlobalMRI;
169  }
170
171  /// Add mod/ref info from another function into ours, saturating towards
172  /// ModRef.
173  void addFunctionInfo(const FunctionInfo &FI) {
174    addModRefInfo(FI.getModRefInfo());
175
176    if (FI.mayReadAnyGlobal())
177      setMayReadAnyGlobal();
178
179    if (AlignedMap *P = FI.Info.getPointer())
180      for (const auto &G : P->Map)
181        addModRefInfoForGlobal(*G.first, G.second);
182  }
183
184  void addModRefInfoForGlobal(const GlobalValue &GV, ModRefInfo NewMRI) {
185    AlignedMap *P = Info.getPointer();
186    if (!P) {
187      P = new AlignedMap();
188      Info.setPointer(P);
189    }
190    auto &GlobalMRI = P->Map[&GV];
191    GlobalMRI = unionModRef(GlobalMRI, NewMRI);
192  }
193
194  /// Clear a global's ModRef info. Should be used when a global is being
195  /// deleted.
196  void eraseModRefInfoForGlobal(const GlobalValue &GV) {
197    if (AlignedMap *P = Info.getPointer())
198      P->Map.erase(&GV);
199  }
200
201private:
202  /// All of the information is encoded into a single pointer, with a three bit
203  /// integer in the low three bits. The high bit provides a flag for when this
204  /// function may read any global. The low two bits are the ModRefInfo. And
205  /// the pointer, when non-null, points to a map from GlobalValue to
206  /// ModRefInfo specific to that GlobalValue.
207  PointerIntPair<AlignedMap *, 3, unsigned, AlignedMapPointerTraits> Info;
208};
209
210void GlobalsAAResult::DeletionCallbackHandle::deleted() {
211  Value *V = getValPtr();
212  if (auto *F = dyn_cast<Function>(V))
213    GAR->FunctionInfos.erase(F);
214
215  if (GlobalValue *GV = dyn_cast<GlobalValue>(V)) {
216    if (GAR->NonAddressTakenGlobals.erase(GV)) {
217      // This global might be an indirect global.  If so, remove it and
218      // remove any AllocRelatedValues for it.
219      if (GAR->IndirectGlobals.erase(GV)) {
220        // Remove any entries in AllocsForIndirectGlobals for this global.
221        for (auto I = GAR->AllocsForIndirectGlobals.begin(),
222                  E = GAR->AllocsForIndirectGlobals.end();
223             I != E; ++I)
224          if (I->second == GV)
225            GAR->AllocsForIndirectGlobals.erase(I);
226      }
227
228      // Scan the function info we have collected and remove this global
229      // from all of them.
230      for (auto &FIPair : GAR->FunctionInfos)
231        FIPair.second.eraseModRefInfoForGlobal(*GV);
232    }
233  }
234
235  // If this is an allocation related to an indirect global, remove it.
236  GAR->AllocsForIndirectGlobals.erase(V);
237
238  // And clear out the handle.
239  setValPtr(nullptr);
240  GAR->Handles.erase(I);
241  // This object is now destroyed!
242}
243
244FunctionModRefBehavior GlobalsAAResult::getModRefBehavior(const Function *F) {
245  FunctionModRefBehavior Min = FMRB_UnknownModRefBehavior;
246
247  if (FunctionInfo *FI = getFunctionInfo(F)) {
248    if (!isModOrRefSet(FI->getModRefInfo()))
249      Min = FMRB_DoesNotAccessMemory;
250    else if (!isModSet(FI->getModRefInfo()))
251      Min = FMRB_OnlyReadsMemory;
252  }
253
254  return FunctionModRefBehavior(AAResultBase::getModRefBehavior(F) & Min);
255}
256
257FunctionModRefBehavior
258GlobalsAAResult::getModRefBehavior(const CallBase *Call) {
259  FunctionModRefBehavior Min = FMRB_UnknownModRefBehavior;
260
261  if (!Call->hasOperandBundles())
262    if (const Function *F = Call->getCalledFunction())
263      if (FunctionInfo *FI = getFunctionInfo(F)) {
264        if (!isModOrRefSet(FI->getModRefInfo()))
265          Min = FMRB_DoesNotAccessMemory;
266        else if (!isModSet(FI->getModRefInfo()))
267          Min = FMRB_OnlyReadsMemory;
268      }
269
270  return FunctionModRefBehavior(AAResultBase::getModRefBehavior(Call) & Min);
271}
272
273/// Returns the function info for the function, or null if we don't have
274/// anything useful to say about it.
275GlobalsAAResult::FunctionInfo *
276GlobalsAAResult::getFunctionInfo(const Function *F) {
277  auto I = FunctionInfos.find(F);
278  if (I != FunctionInfos.end())
279    return &I->second;
280  return nullptr;
281}
282
283/// AnalyzeGlobals - Scan through the users of all of the internal
284/// GlobalValue's in the program.  If none of them have their "address taken"
285/// (really, their address passed to something nontrivial), record this fact,
286/// and record the functions that they are used directly in.
287void GlobalsAAResult::AnalyzeGlobals(Module &M) {
288  SmallPtrSet<Function *, 32> TrackedFunctions;
289  for (Function &F : M)
290    if (F.hasLocalLinkage()) {
291      if (!AnalyzeUsesOfPointer(&F)) {
292        // Remember that we are tracking this global.
293        NonAddressTakenGlobals.insert(&F);
294        TrackedFunctions.insert(&F);
295        Handles.emplace_front(*this, &F);
296        Handles.front().I = Handles.begin();
297        ++NumNonAddrTakenFunctions;
298      } else
299        UnknownFunctionsWithLocalLinkage = true;
300    }
301
302  SmallPtrSet<Function *, 16> Readers, Writers;
303  for (GlobalVariable &GV : M.globals())
304    if (GV.hasLocalLinkage()) {
305      if (!AnalyzeUsesOfPointer(&GV, &Readers,
306                                GV.isConstant() ? nullptr : &Writers)) {
307        // Remember that we are tracking this global, and the mod/ref fns
308        NonAddressTakenGlobals.insert(&GV);
309        Handles.emplace_front(*this, &GV);
310        Handles.front().I = Handles.begin();
311
312        for (Function *Reader : Readers) {
313          if (TrackedFunctions.insert(Reader).second) {
314            Handles.emplace_front(*this, Reader);
315            Handles.front().I = Handles.begin();
316          }
317          FunctionInfos[Reader].addModRefInfoForGlobal(GV, ModRefInfo::Ref);
318        }
319
320        if (!GV.isConstant()) // No need to keep track of writers to constants
321          for (Function *Writer : Writers) {
322            if (TrackedFunctions.insert(Writer).second) {
323              Handles.emplace_front(*this, Writer);
324              Handles.front().I = Handles.begin();
325            }
326            FunctionInfos[Writer].addModRefInfoForGlobal(GV, ModRefInfo::Mod);
327          }
328        ++NumNonAddrTakenGlobalVars;
329
330        // If this global holds a pointer type, see if it is an indirect global.
331        if (GV.getValueType()->isPointerTy() &&
332            AnalyzeIndirectGlobalMemory(&GV))
333          ++NumIndirectGlobalVars;
334      }
335      Readers.clear();
336      Writers.clear();
337    }
338}
339
340/// AnalyzeUsesOfPointer - Look at all of the users of the specified pointer.
341/// If this is used by anything complex (i.e., the address escapes), return
342/// true.  Also, while we are at it, keep track of those functions that read and
343/// write to the value.
344///
345/// If OkayStoreDest is non-null, stores into this global are allowed.
346bool GlobalsAAResult::AnalyzeUsesOfPointer(Value *V,
347                                           SmallPtrSetImpl<Function *> *Readers,
348                                           SmallPtrSetImpl<Function *> *Writers,
349                                           GlobalValue *OkayStoreDest) {
350  if (!V->getType()->isPointerTy())
351    return true;
352
353  for (Use &U : V->uses()) {
354    User *I = U.getUser();
355    if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
356      if (Readers)
357        Readers->insert(LI->getParent()->getParent());
358    } else if (StoreInst *SI = dyn_cast<StoreInst>(I)) {
359      if (V == SI->getOperand(1)) {
360        if (Writers)
361          Writers->insert(SI->getParent()->getParent());
362      } else if (SI->getOperand(1) != OkayStoreDest) {
363        return true; // Storing the pointer
364      }
365    } else if (Operator::getOpcode(I) == Instruction::GetElementPtr) {
366      if (AnalyzeUsesOfPointer(I, Readers, Writers))
367        return true;
368    } else if (Operator::getOpcode(I) == Instruction::BitCast) {
369      if (AnalyzeUsesOfPointer(I, Readers, Writers, OkayStoreDest))
370        return true;
371    } else if (auto *Call = dyn_cast<CallBase>(I)) {
372      // Make sure that this is just the function being called, not that it is
373      // passing into the function.
374      if (Call->isDataOperand(&U)) {
375        // Detect calls to free.
376        if (Call->isArgOperand(&U) &&
377            isFreeCall(I, &GetTLI(*Call->getFunction()))) {
378          if (Writers)
379            Writers->insert(Call->getParent()->getParent());
380        } else {
381          return true; // Argument of an unknown call.
382        }
383      }
384    } else if (ICmpInst *ICI = dyn_cast<ICmpInst>(I)) {
385      if (!isa<ConstantPointerNull>(ICI->getOperand(1)))
386        return true; // Allow comparison against null.
387    } else if (Constant *C = dyn_cast<Constant>(I)) {
388      // Ignore constants which don't have any live uses.
389      if (isa<GlobalValue>(C) || C->isConstantUsed())
390        return true;
391    } else {
392      return true;
393    }
394  }
395
396  return false;
397}
398
399/// AnalyzeIndirectGlobalMemory - We found an non-address-taken global variable
400/// which holds a pointer type.  See if the global always points to non-aliased
401/// heap memory: that is, all initializers of the globals are allocations, and
402/// those allocations have no use other than initialization of the global.
403/// Further, all loads out of GV must directly use the memory, not store the
404/// pointer somewhere.  If this is true, we consider the memory pointed to by
405/// GV to be owned by GV and can disambiguate other pointers from it.
406bool GlobalsAAResult::AnalyzeIndirectGlobalMemory(GlobalVariable *GV) {
407  // Keep track of values related to the allocation of the memory, f.e. the
408  // value produced by the malloc call and any casts.
409  std::vector<Value *> AllocRelatedValues;
410
411  // If the initializer is a valid pointer, bail.
412  if (Constant *C = GV->getInitializer())
413    if (!C->isNullValue())
414      return false;
415
416  // Walk the user list of the global.  If we find anything other than a direct
417  // load or store, bail out.
418  for (User *U : GV->users()) {
419    if (LoadInst *LI = dyn_cast<LoadInst>(U)) {
420      // The pointer loaded from the global can only be used in simple ways:
421      // we allow addressing of it and loading storing to it.  We do *not* allow
422      // storing the loaded pointer somewhere else or passing to a function.
423      if (AnalyzeUsesOfPointer(LI))
424        return false; // Loaded pointer escapes.
425      // TODO: Could try some IP mod/ref of the loaded pointer.
426    } else if (StoreInst *SI = dyn_cast<StoreInst>(U)) {
427      // Storing the global itself.
428      if (SI->getOperand(0) == GV)
429        return false;
430
431      // If storing the null pointer, ignore it.
432      if (isa<ConstantPointerNull>(SI->getOperand(0)))
433        continue;
434
435      // Check the value being stored.
436      Value *Ptr = GetUnderlyingObject(SI->getOperand(0),
437                                       GV->getParent()->getDataLayout());
438
439      if (!isAllocLikeFn(Ptr, &GetTLI(*SI->getFunction())))
440        return false; // Too hard to analyze.
441
442      // Analyze all uses of the allocation.  If any of them are used in a
443      // non-simple way (e.g. stored to another global) bail out.
444      if (AnalyzeUsesOfPointer(Ptr, /*Readers*/ nullptr, /*Writers*/ nullptr,
445                               GV))
446        return false; // Loaded pointer escapes.
447
448      // Remember that this allocation is related to the indirect global.
449      AllocRelatedValues.push_back(Ptr);
450    } else {
451      // Something complex, bail out.
452      return false;
453    }
454  }
455
456  // Okay, this is an indirect global.  Remember all of the allocations for
457  // this global in AllocsForIndirectGlobals.
458  while (!AllocRelatedValues.empty()) {
459    AllocsForIndirectGlobals[AllocRelatedValues.back()] = GV;
460    Handles.emplace_front(*this, AllocRelatedValues.back());
461    Handles.front().I = Handles.begin();
462    AllocRelatedValues.pop_back();
463  }
464  IndirectGlobals.insert(GV);
465  Handles.emplace_front(*this, GV);
466  Handles.front().I = Handles.begin();
467  return true;
468}
469
470void GlobalsAAResult::CollectSCCMembership(CallGraph &CG) {
471  // We do a bottom-up SCC traversal of the call graph.  In other words, we
472  // visit all callees before callers (leaf-first).
473  unsigned SCCID = 0;
474  for (scc_iterator<CallGraph *> I = scc_begin(&CG); !I.isAtEnd(); ++I) {
475    const std::vector<CallGraphNode *> &SCC = *I;
476    assert(!SCC.empty() && "SCC with no functions?");
477
478    for (auto *CGN : SCC)
479      if (Function *F = CGN->getFunction())
480        FunctionToSCCMap[F] = SCCID;
481    ++SCCID;
482  }
483}
484
485/// AnalyzeCallGraph - At this point, we know the functions where globals are
486/// immediately stored to and read from.  Propagate this information up the call
487/// graph to all callers and compute the mod/ref info for all memory for each
488/// function.
489void GlobalsAAResult::AnalyzeCallGraph(CallGraph &CG, Module &M) {
490  // We do a bottom-up SCC traversal of the call graph.  In other words, we
491  // visit all callees before callers (leaf-first).
492  for (scc_iterator<CallGraph *> I = scc_begin(&CG); !I.isAtEnd(); ++I) {
493    const std::vector<CallGraphNode *> &SCC = *I;
494    assert(!SCC.empty() && "SCC with no functions?");
495
496    Function *F = SCC[0]->getFunction();
497
498    if (!F || !F->isDefinitionExact()) {
499      // Calls externally or not exact - can't say anything useful. Remove any
500      // existing function records (may have been created when scanning
501      // globals).
502      for (auto *Node : SCC)
503        FunctionInfos.erase(Node->getFunction());
504      continue;
505    }
506
507    FunctionInfo &FI = FunctionInfos[F];
508    Handles.emplace_front(*this, F);
509    Handles.front().I = Handles.begin();
510    bool KnowNothing = false;
511
512    // Collect the mod/ref properties due to called functions.  We only compute
513    // one mod-ref set.
514    for (unsigned i = 0, e = SCC.size(); i != e && !KnowNothing; ++i) {
515      if (!F) {
516        KnowNothing = true;
517        break;
518      }
519
520      if (F->isDeclaration() || F->hasOptNone()) {
521        // Try to get mod/ref behaviour from function attributes.
522        if (F->doesNotAccessMemory()) {
523          // Can't do better than that!
524        } else if (F->onlyReadsMemory()) {
525          FI.addModRefInfo(ModRefInfo::Ref);
526          if (!F->isIntrinsic() && !F->onlyAccessesArgMemory())
527            // This function might call back into the module and read a global -
528            // consider every global as possibly being read by this function.
529            FI.setMayReadAnyGlobal();
530        } else {
531          FI.addModRefInfo(ModRefInfo::ModRef);
532          if (!F->onlyAccessesArgMemory())
533            FI.setMayReadAnyGlobal();
534          if (!F->isIntrinsic()) {
535            KnowNothing = true;
536            break;
537          }
538        }
539        continue;
540      }
541
542      for (CallGraphNode::iterator CI = SCC[i]->begin(), E = SCC[i]->end();
543           CI != E && !KnowNothing; ++CI)
544        if (Function *Callee = CI->second->getFunction()) {
545          if (FunctionInfo *CalleeFI = getFunctionInfo(Callee)) {
546            // Propagate function effect up.
547            FI.addFunctionInfo(*CalleeFI);
548          } else {
549            // Can't say anything about it.  However, if it is inside our SCC,
550            // then nothing needs to be done.
551            CallGraphNode *CalleeNode = CG[Callee];
552            if (!is_contained(SCC, CalleeNode))
553              KnowNothing = true;
554          }
555        } else {
556          KnowNothing = true;
557        }
558    }
559
560    // If we can't say anything useful about this SCC, remove all SCC functions
561    // from the FunctionInfos map.
562    if (KnowNothing) {
563      for (auto *Node : SCC)
564        FunctionInfos.erase(Node->getFunction());
565      continue;
566    }
567
568    // Scan the function bodies for explicit loads or stores.
569    for (auto *Node : SCC) {
570      if (isModAndRefSet(FI.getModRefInfo()))
571        break; // The mod/ref lattice saturates here.
572
573      // Don't prove any properties based on the implementation of an optnone
574      // function. Function attributes were already used as a best approximation
575      // above.
576      if (Node->getFunction()->hasOptNone())
577        continue;
578
579      for (Instruction &I : instructions(Node->getFunction())) {
580        if (isModAndRefSet(FI.getModRefInfo()))
581          break; // The mod/ref lattice saturates here.
582
583        // We handle calls specially because the graph-relevant aspects are
584        // handled above.
585        if (auto *Call = dyn_cast<CallBase>(&I)) {
586          auto &TLI = GetTLI(*Node->getFunction());
587          if (isAllocationFn(Call, &TLI) || isFreeCall(Call, &TLI)) {
588            // FIXME: It is completely unclear why this is necessary and not
589            // handled by the above graph code.
590            FI.addModRefInfo(ModRefInfo::ModRef);
591          } else if (Function *Callee = Call->getCalledFunction()) {
592            // The callgraph doesn't include intrinsic calls.
593            if (Callee->isIntrinsic()) {
594              if (isa<DbgInfoIntrinsic>(Call))
595                // Don't let dbg intrinsics affect alias info.
596                continue;
597
598              FunctionModRefBehavior Behaviour =
599                  AAResultBase::getModRefBehavior(Callee);
600              FI.addModRefInfo(createModRefInfo(Behaviour));
601            }
602          }
603          continue;
604        }
605
606        // All non-call instructions we use the primary predicates for whether
607        // they read or write memory.
608        if (I.mayReadFromMemory())
609          FI.addModRefInfo(ModRefInfo::Ref);
610        if (I.mayWriteToMemory())
611          FI.addModRefInfo(ModRefInfo::Mod);
612      }
613    }
614
615    if (!isModSet(FI.getModRefInfo()))
616      ++NumReadMemFunctions;
617    if (!isModOrRefSet(FI.getModRefInfo()))
618      ++NumNoMemFunctions;
619
620    // Finally, now that we know the full effect on this SCC, clone the
621    // information to each function in the SCC.
622    // FI is a reference into FunctionInfos, so copy it now so that it doesn't
623    // get invalidated if DenseMap decides to re-hash.
624    FunctionInfo CachedFI = FI;
625    for (unsigned i = 1, e = SCC.size(); i != e; ++i)
626      FunctionInfos[SCC[i]->getFunction()] = CachedFI;
627  }
628}
629
630// GV is a non-escaping global. V is a pointer address that has been loaded from.
631// If we can prove that V must escape, we can conclude that a load from V cannot
632// alias GV.
633static bool isNonEscapingGlobalNoAliasWithLoad(const GlobalValue *GV,
634                                               const Value *V,
635                                               int &Depth,
636                                               const DataLayout &DL) {
637  SmallPtrSet<const Value *, 8> Visited;
638  SmallVector<const Value *, 8> Inputs;
639  Visited.insert(V);
640  Inputs.push_back(V);
641  do {
642    const Value *Input = Inputs.pop_back_val();
643
644    if (isa<GlobalValue>(Input) || isa<Argument>(Input) || isa<CallInst>(Input) ||
645        isa<InvokeInst>(Input))
646      // Arguments to functions or returns from functions are inherently
647      // escaping, so we can immediately classify those as not aliasing any
648      // non-addr-taken globals.
649      //
650      // (Transitive) loads from a global are also safe - if this aliased
651      // another global, its address would escape, so no alias.
652      continue;
653
654    // Recurse through a limited number of selects, loads and PHIs. This is an
655    // arbitrary depth of 4, lower numbers could be used to fix compile time
656    // issues if needed, but this is generally expected to be only be important
657    // for small depths.
658    if (++Depth > 4)
659      return false;
660
661    if (auto *LI = dyn_cast<LoadInst>(Input)) {
662      Inputs.push_back(GetUnderlyingObject(LI->getPointerOperand(), DL));
663      continue;
664    }
665    if (auto *SI = dyn_cast<SelectInst>(Input)) {
666      const Value *LHS = GetUnderlyingObject(SI->getTrueValue(), DL);
667      const Value *RHS = GetUnderlyingObject(SI->getFalseValue(), DL);
668      if (Visited.insert(LHS).second)
669        Inputs.push_back(LHS);
670      if (Visited.insert(RHS).second)
671        Inputs.push_back(RHS);
672      continue;
673    }
674    if (auto *PN = dyn_cast<PHINode>(Input)) {
675      for (const Value *Op : PN->incoming_values()) {
676        Op = GetUnderlyingObject(Op, DL);
677        if (Visited.insert(Op).second)
678          Inputs.push_back(Op);
679      }
680      continue;
681    }
682
683    return false;
684  } while (!Inputs.empty());
685
686  // All inputs were known to be no-alias.
687  return true;
688}
689
690// There are particular cases where we can conclude no-alias between
691// a non-addr-taken global and some other underlying object. Specifically,
692// a non-addr-taken global is known to not be escaped from any function. It is
693// also incorrect for a transformation to introduce an escape of a global in
694// a way that is observable when it was not there previously. One function
695// being transformed to introduce an escape which could possibly be observed
696// (via loading from a global or the return value for example) within another
697// function is never safe. If the observation is made through non-atomic
698// operations on different threads, it is a data-race and UB. If the
699// observation is well defined, by being observed the transformation would have
700// changed program behavior by introducing the observed escape, making it an
701// invalid transform.
702//
703// This property does require that transformations which *temporarily* escape
704// a global that was not previously escaped, prior to restoring it, cannot rely
705// on the results of GMR::alias. This seems a reasonable restriction, although
706// currently there is no way to enforce it. There is also no realistic
707// optimization pass that would make this mistake. The closest example is
708// a transformation pass which does reg2mem of SSA values but stores them into
709// global variables temporarily before restoring the global variable's value.
710// This could be useful to expose "benign" races for example. However, it seems
711// reasonable to require that a pass which introduces escapes of global
712// variables in this way to either not trust AA results while the escape is
713// active, or to be forced to operate as a module pass that cannot co-exist
714// with an alias analysis such as GMR.
715bool GlobalsAAResult::isNonEscapingGlobalNoAlias(const GlobalValue *GV,
716                                                 const Value *V) {
717  // In order to know that the underlying object cannot alias the
718  // non-addr-taken global, we must know that it would have to be an escape.
719  // Thus if the underlying object is a function argument, a load from
720  // a global, or the return of a function, it cannot alias. We can also
721  // recurse through PHI nodes and select nodes provided all of their inputs
722  // resolve to one of these known-escaping roots.
723  SmallPtrSet<const Value *, 8> Visited;
724  SmallVector<const Value *, 8> Inputs;
725  Visited.insert(V);
726  Inputs.push_back(V);
727  int Depth = 0;
728  do {
729    const Value *Input = Inputs.pop_back_val();
730
731    if (auto *InputGV = dyn_cast<GlobalValue>(Input)) {
732      // If one input is the very global we're querying against, then we can't
733      // conclude no-alias.
734      if (InputGV == GV)
735        return false;
736
737      // Distinct GlobalVariables never alias, unless overriden or zero-sized.
738      // FIXME: The condition can be refined, but be conservative for now.
739      auto *GVar = dyn_cast<GlobalVariable>(GV);
740      auto *InputGVar = dyn_cast<GlobalVariable>(InputGV);
741      if (GVar && InputGVar &&
742          !GVar->isDeclaration() && !InputGVar->isDeclaration() &&
743          !GVar->isInterposable() && !InputGVar->isInterposable()) {
744        Type *GVType = GVar->getInitializer()->getType();
745        Type *InputGVType = InputGVar->getInitializer()->getType();
746        if (GVType->isSized() && InputGVType->isSized() &&
747            (DL.getTypeAllocSize(GVType) > 0) &&
748            (DL.getTypeAllocSize(InputGVType) > 0))
749          continue;
750      }
751
752      // Conservatively return false, even though we could be smarter
753      // (e.g. look through GlobalAliases).
754      return false;
755    }
756
757    if (isa<Argument>(Input) || isa<CallInst>(Input) ||
758        isa<InvokeInst>(Input)) {
759      // Arguments to functions or returns from functions are inherently
760      // escaping, so we can immediately classify those as not aliasing any
761      // non-addr-taken globals.
762      continue;
763    }
764
765    // Recurse through a limited number of selects, loads and PHIs. This is an
766    // arbitrary depth of 4, lower numbers could be used to fix compile time
767    // issues if needed, but this is generally expected to be only be important
768    // for small depths.
769    if (++Depth > 4)
770      return false;
771
772    if (auto *LI = dyn_cast<LoadInst>(Input)) {
773      // A pointer loaded from a global would have been captured, and we know
774      // that the global is non-escaping, so no alias.
775      const Value *Ptr = GetUnderlyingObject(LI->getPointerOperand(), DL);
776      if (isNonEscapingGlobalNoAliasWithLoad(GV, Ptr, Depth, DL))
777        // The load does not alias with GV.
778        continue;
779      // Otherwise, a load could come from anywhere, so bail.
780      return false;
781    }
782    if (auto *SI = dyn_cast<SelectInst>(Input)) {
783      const Value *LHS = GetUnderlyingObject(SI->getTrueValue(), DL);
784      const Value *RHS = GetUnderlyingObject(SI->getFalseValue(), DL);
785      if (Visited.insert(LHS).second)
786        Inputs.push_back(LHS);
787      if (Visited.insert(RHS).second)
788        Inputs.push_back(RHS);
789      continue;
790    }
791    if (auto *PN = dyn_cast<PHINode>(Input)) {
792      for (const Value *Op : PN->incoming_values()) {
793        Op = GetUnderlyingObject(Op, DL);
794        if (Visited.insert(Op).second)
795          Inputs.push_back(Op);
796      }
797      continue;
798    }
799
800    // FIXME: It would be good to handle other obvious no-alias cases here, but
801    // it isn't clear how to do so reasonably without building a small version
802    // of BasicAA into this code. We could recurse into AAResultBase::alias
803    // here but that seems likely to go poorly as we're inside the
804    // implementation of such a query. Until then, just conservatively return
805    // false.
806    return false;
807  } while (!Inputs.empty());
808
809  // If all the inputs to V were definitively no-alias, then V is no-alias.
810  return true;
811}
812
813/// alias - If one of the pointers is to a global that we are tracking, and the
814/// other is some random pointer, we know there cannot be an alias, because the
815/// address of the global isn't taken.
816AliasResult GlobalsAAResult::alias(const MemoryLocation &LocA,
817                                   const MemoryLocation &LocB,
818                                   AAQueryInfo &AAQI) {
819  // Get the base object these pointers point to.
820  const Value *UV1 = GetUnderlyingObject(LocA.Ptr, DL);
821  const Value *UV2 = GetUnderlyingObject(LocB.Ptr, DL);
822
823  // If either of the underlying values is a global, they may be non-addr-taken
824  // globals, which we can answer queries about.
825  const GlobalValue *GV1 = dyn_cast<GlobalValue>(UV1);
826  const GlobalValue *GV2 = dyn_cast<GlobalValue>(UV2);
827  if (GV1 || GV2) {
828    // If the global's address is taken, pretend we don't know it's a pointer to
829    // the global.
830    if (GV1 && !NonAddressTakenGlobals.count(GV1))
831      GV1 = nullptr;
832    if (GV2 && !NonAddressTakenGlobals.count(GV2))
833      GV2 = nullptr;
834
835    // If the two pointers are derived from two different non-addr-taken
836    // globals we know these can't alias.
837    if (GV1 && GV2 && GV1 != GV2)
838      return NoAlias;
839
840    // If one is and the other isn't, it isn't strictly safe but we can fake
841    // this result if necessary for performance. This does not appear to be
842    // a common problem in practice.
843    if (EnableUnsafeGlobalsModRefAliasResults)
844      if ((GV1 || GV2) && GV1 != GV2)
845        return NoAlias;
846
847    // Check for a special case where a non-escaping global can be used to
848    // conclude no-alias.
849    if ((GV1 || GV2) && GV1 != GV2) {
850      const GlobalValue *GV = GV1 ? GV1 : GV2;
851      const Value *UV = GV1 ? UV2 : UV1;
852      if (isNonEscapingGlobalNoAlias(GV, UV))
853        return NoAlias;
854    }
855
856    // Otherwise if they are both derived from the same addr-taken global, we
857    // can't know the two accesses don't overlap.
858  }
859
860  // These pointers may be based on the memory owned by an indirect global.  If
861  // so, we may be able to handle this.  First check to see if the base pointer
862  // is a direct load from an indirect global.
863  GV1 = GV2 = nullptr;
864  if (const LoadInst *LI = dyn_cast<LoadInst>(UV1))
865    if (GlobalVariable *GV = dyn_cast<GlobalVariable>(LI->getOperand(0)))
866      if (IndirectGlobals.count(GV))
867        GV1 = GV;
868  if (const LoadInst *LI = dyn_cast<LoadInst>(UV2))
869    if (const GlobalVariable *GV = dyn_cast<GlobalVariable>(LI->getOperand(0)))
870      if (IndirectGlobals.count(GV))
871        GV2 = GV;
872
873  // These pointers may also be from an allocation for the indirect global.  If
874  // so, also handle them.
875  if (!GV1)
876    GV1 = AllocsForIndirectGlobals.lookup(UV1);
877  if (!GV2)
878    GV2 = AllocsForIndirectGlobals.lookup(UV2);
879
880  // Now that we know whether the two pointers are related to indirect globals,
881  // use this to disambiguate the pointers. If the pointers are based on
882  // different indirect globals they cannot alias.
883  if (GV1 && GV2 && GV1 != GV2)
884    return NoAlias;
885
886  // If one is based on an indirect global and the other isn't, it isn't
887  // strictly safe but we can fake this result if necessary for performance.
888  // This does not appear to be a common problem in practice.
889  if (EnableUnsafeGlobalsModRefAliasResults)
890    if ((GV1 || GV2) && GV1 != GV2)
891      return NoAlias;
892
893  return AAResultBase::alias(LocA, LocB, AAQI);
894}
895
896ModRefInfo GlobalsAAResult::getModRefInfoForArgument(const CallBase *Call,
897                                                     const GlobalValue *GV,
898                                                     AAQueryInfo &AAQI) {
899  if (Call->doesNotAccessMemory())
900    return ModRefInfo::NoModRef;
901  ModRefInfo ConservativeResult =
902      Call->onlyReadsMemory() ? ModRefInfo::Ref : ModRefInfo::ModRef;
903
904  // Iterate through all the arguments to the called function. If any argument
905  // is based on GV, return the conservative result.
906  for (auto &A : Call->args()) {
907    SmallVector<const Value*, 4> Objects;
908    GetUnderlyingObjects(A, Objects, DL);
909
910    // All objects must be identified.
911    if (!all_of(Objects, isIdentifiedObject) &&
912        // Try ::alias to see if all objects are known not to alias GV.
913        !all_of(Objects, [&](const Value *V) {
914          return this->alias(MemoryLocation(V), MemoryLocation(GV), AAQI) ==
915                 NoAlias;
916        }))
917      return ConservativeResult;
918
919    if (is_contained(Objects, GV))
920      return ConservativeResult;
921  }
922
923  // We identified all objects in the argument list, and none of them were GV.
924  return ModRefInfo::NoModRef;
925}
926
927ModRefInfo GlobalsAAResult::getModRefInfo(const CallBase *Call,
928                                          const MemoryLocation &Loc,
929                                          AAQueryInfo &AAQI) {
930  ModRefInfo Known = ModRefInfo::ModRef;
931
932  // If we are asking for mod/ref info of a direct call with a pointer to a
933  // global we are tracking, return information if we have it.
934  if (const GlobalValue *GV =
935          dyn_cast<GlobalValue>(GetUnderlyingObject(Loc.Ptr, DL)))
936    // If GV is internal to this IR and there is no function with local linkage
937    // that has had their address taken, keep looking for a tighter ModRefInfo.
938    if (GV->hasLocalLinkage() && !UnknownFunctionsWithLocalLinkage)
939      if (const Function *F = Call->getCalledFunction())
940        if (NonAddressTakenGlobals.count(GV))
941          if (const FunctionInfo *FI = getFunctionInfo(F))
942            Known = unionModRef(FI->getModRefInfoForGlobal(*GV),
943                                getModRefInfoForArgument(Call, GV, AAQI));
944
945  if (!isModOrRefSet(Known))
946    return ModRefInfo::NoModRef; // No need to query other mod/ref analyses
947  return intersectModRef(Known, AAResultBase::getModRefInfo(Call, Loc, AAQI));
948}
949
950GlobalsAAResult::GlobalsAAResult(
951    const DataLayout &DL,
952    std::function<const TargetLibraryInfo &(Function &F)> GetTLI)
953    : AAResultBase(), DL(DL), GetTLI(std::move(GetTLI)) {}
954
955GlobalsAAResult::GlobalsAAResult(GlobalsAAResult &&Arg)
956    : AAResultBase(std::move(Arg)), DL(Arg.DL), GetTLI(std::move(Arg.GetTLI)),
957      NonAddressTakenGlobals(std::move(Arg.NonAddressTakenGlobals)),
958      IndirectGlobals(std::move(Arg.IndirectGlobals)),
959      AllocsForIndirectGlobals(std::move(Arg.AllocsForIndirectGlobals)),
960      FunctionInfos(std::move(Arg.FunctionInfos)),
961      Handles(std::move(Arg.Handles)) {
962  // Update the parent for each DeletionCallbackHandle.
963  for (auto &H : Handles) {
964    assert(H.GAR == &Arg);
965    H.GAR = this;
966  }
967}
968
969GlobalsAAResult::~GlobalsAAResult() {}
970
971/*static*/ GlobalsAAResult GlobalsAAResult::analyzeModule(
972    Module &M, std::function<const TargetLibraryInfo &(Function &F)> GetTLI,
973    CallGraph &CG) {
974  GlobalsAAResult Result(M.getDataLayout(), GetTLI);
975
976  // Discover which functions aren't recursive, to feed into AnalyzeGlobals.
977  Result.CollectSCCMembership(CG);
978
979  // Find non-addr taken globals.
980  Result.AnalyzeGlobals(M);
981
982  // Propagate on CG.
983  Result.AnalyzeCallGraph(CG, M);
984
985  return Result;
986}
987
988AnalysisKey GlobalsAA::Key;
989
990GlobalsAAResult GlobalsAA::run(Module &M, ModuleAnalysisManager &AM) {
991  FunctionAnalysisManager &FAM =
992      AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager();
993  auto GetTLI = [&FAM](Function &F) -> TargetLibraryInfo & {
994    return FAM.getResult<TargetLibraryAnalysis>(F);
995  };
996  return GlobalsAAResult::analyzeModule(M, GetTLI,
997                                        AM.getResult<CallGraphAnalysis>(M));
998}
999
1000char GlobalsAAWrapperPass::ID = 0;
1001INITIALIZE_PASS_BEGIN(GlobalsAAWrapperPass, "globals-aa",
1002                      "Globals Alias Analysis", false, true)
1003INITIALIZE_PASS_DEPENDENCY(CallGraphWrapperPass)
1004INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass)
1005INITIALIZE_PASS_END(GlobalsAAWrapperPass, "globals-aa",
1006                    "Globals Alias Analysis", false, true)
1007
1008ModulePass *llvm::createGlobalsAAWrapperPass() {
1009  return new GlobalsAAWrapperPass();
1010}
1011
1012GlobalsAAWrapperPass::GlobalsAAWrapperPass() : ModulePass(ID) {
1013  initializeGlobalsAAWrapperPassPass(*PassRegistry::getPassRegistry());
1014}
1015
1016bool GlobalsAAWrapperPass::runOnModule(Module &M) {
1017  auto GetTLI = [this](Function &F) -> TargetLibraryInfo & {
1018    return this->getAnalysis<TargetLibraryInfoWrapperPass>().getTLI(F);
1019  };
1020  Result.reset(new GlobalsAAResult(GlobalsAAResult::analyzeModule(
1021      M, GetTLI, getAnalysis<CallGraphWrapperPass>().getCallGraph())));
1022  return false;
1023}
1024
1025bool GlobalsAAWrapperPass::doFinalization(Module &M) {
1026  Result.reset();
1027  return false;
1028}
1029
1030void GlobalsAAWrapperPass::getAnalysisUsage(AnalysisUsage &AU) const {
1031  AU.setPreservesAll();
1032  AU.addRequired<CallGraphWrapperPass>();
1033  AU.addRequired<TargetLibraryInfoWrapperPass>();
1034}
1035