1 //===- ModuleSummaryAnalysis.cpp - Module summary index builder -----------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This pass builds a ModuleSummaryIndex object for the module, to be written
10 // to bitcode or LLVM assembly.
11 //
12 //===----------------------------------------------------------------------===//
13
14 #include "llvm/Analysis/ModuleSummaryAnalysis.h"
15 #include "llvm/ADT/ArrayRef.h"
16 #include "llvm/ADT/DenseSet.h"
17 #include "llvm/ADT/MapVector.h"
18 #include "llvm/ADT/STLExtras.h"
19 #include "llvm/ADT/SetVector.h"
20 #include "llvm/ADT/SmallPtrSet.h"
21 #include "llvm/ADT/SmallVector.h"
22 #include "llvm/ADT/StringRef.h"
23 #include "llvm/Analysis/BlockFrequencyInfo.h"
24 #include "llvm/Analysis/BranchProbabilityInfo.h"
25 #include "llvm/Analysis/IndirectCallPromotionAnalysis.h"
26 #include "llvm/Analysis/LoopInfo.h"
27 #include "llvm/Analysis/MemoryProfileInfo.h"
28 #include "llvm/Analysis/ProfileSummaryInfo.h"
29 #include "llvm/Analysis/StackSafetyAnalysis.h"
30 #include "llvm/Analysis/TypeMetadataUtils.h"
31 #include "llvm/IR/Attributes.h"
32 #include "llvm/IR/BasicBlock.h"
33 #include "llvm/IR/Constant.h"
34 #include "llvm/IR/Constants.h"
35 #include "llvm/IR/Dominators.h"
36 #include "llvm/IR/Function.h"
37 #include "llvm/IR/GlobalAlias.h"
38 #include "llvm/IR/GlobalValue.h"
39 #include "llvm/IR/GlobalVariable.h"
40 #include "llvm/IR/Instructions.h"
41 #include "llvm/IR/IntrinsicInst.h"
42 #include "llvm/IR/Metadata.h"
43 #include "llvm/IR/Module.h"
44 #include "llvm/IR/ModuleSummaryIndex.h"
45 #include "llvm/IR/Use.h"
46 #include "llvm/IR/User.h"
47 #include "llvm/InitializePasses.h"
48 #include "llvm/Object/ModuleSymbolTable.h"
49 #include "llvm/Object/SymbolicFile.h"
50 #include "llvm/Pass.h"
51 #include "llvm/Support/Casting.h"
52 #include "llvm/Support/CommandLine.h"
53 #include "llvm/Support/FileSystem.h"
54 #include <algorithm>
55 #include <cassert>
56 #include <cstdint>
57 #include <vector>
58
59 using namespace llvm;
60 using namespace llvm::memprof;
61
62 #define DEBUG_TYPE "module-summary-analysis"
63
64 // Option to force edges cold which will block importing when the
65 // -import-cold-multiplier is set to 0. Useful for debugging.
66 namespace llvm {
67 FunctionSummary::ForceSummaryHotnessType ForceSummaryEdgesCold =
68 FunctionSummary::FSHT_None;
69 } // namespace llvm
70
71 static cl::opt<FunctionSummary::ForceSummaryHotnessType, true> FSEC(
72 "force-summary-edges-cold", cl::Hidden, cl::location(ForceSummaryEdgesCold),
73 cl::desc("Force all edges in the function summary to cold"),
74 cl::values(clEnumValN(FunctionSummary::FSHT_None, "none", "None."),
75 clEnumValN(FunctionSummary::FSHT_AllNonCritical,
76 "all-non-critical", "All non-critical edges."),
77 clEnumValN(FunctionSummary::FSHT_All, "all", "All edges.")));
78
79 static cl::opt<std::string> ModuleSummaryDotFile(
80 "module-summary-dot-file", cl::Hidden, cl::value_desc("filename"),
81 cl::desc("File to emit dot graph of new summary into"));
82
83 // Walk through the operands of a given User via worklist iteration and populate
84 // the set of GlobalValue references encountered. Invoked either on an
85 // Instruction or a GlobalVariable (which walks its initializer).
86 // Return true if any of the operands contains blockaddress. This is important
87 // to know when computing summary for global var, because if global variable
88 // references basic block address we can't import it separately from function
89 // containing that basic block. For simplicity we currently don't import such
90 // global vars at all. When importing function we aren't interested if any
91 // instruction in it takes an address of any basic block, because instruction
92 // can only take an address of basic block located in the same function.
findRefEdges(ModuleSummaryIndex & Index,const User * CurUser,SetVector<ValueInfo> & RefEdges,SmallPtrSet<const User *,8> & Visited)93 static bool findRefEdges(ModuleSummaryIndex &Index, const User *CurUser,
94 SetVector<ValueInfo> &RefEdges,
95 SmallPtrSet<const User *, 8> &Visited) {
96 bool HasBlockAddress = false;
97 SmallVector<const User *, 32> Worklist;
98 if (Visited.insert(CurUser).second)
99 Worklist.push_back(CurUser);
100
101 while (!Worklist.empty()) {
102 const User *U = Worklist.pop_back_val();
103 const auto *CB = dyn_cast<CallBase>(U);
104
105 for (const auto &OI : U->operands()) {
106 const User *Operand = dyn_cast<User>(OI);
107 if (!Operand)
108 continue;
109 if (isa<BlockAddress>(Operand)) {
110 HasBlockAddress = true;
111 continue;
112 }
113 if (auto *GV = dyn_cast<GlobalValue>(Operand)) {
114 // We have a reference to a global value. This should be added to
115 // the reference set unless it is a callee. Callees are handled
116 // specially by WriteFunction and are added to a separate list.
117 if (!(CB && CB->isCallee(&OI)))
118 RefEdges.insert(Index.getOrInsertValueInfo(GV));
119 continue;
120 }
121 if (Visited.insert(Operand).second)
122 Worklist.push_back(Operand);
123 }
124 }
125 return HasBlockAddress;
126 }
127
getHotness(uint64_t ProfileCount,ProfileSummaryInfo * PSI)128 static CalleeInfo::HotnessType getHotness(uint64_t ProfileCount,
129 ProfileSummaryInfo *PSI) {
130 if (!PSI)
131 return CalleeInfo::HotnessType::Unknown;
132 if (PSI->isHotCount(ProfileCount))
133 return CalleeInfo::HotnessType::Hot;
134 if (PSI->isColdCount(ProfileCount))
135 return CalleeInfo::HotnessType::Cold;
136 return CalleeInfo::HotnessType::None;
137 }
138
isNonRenamableLocal(const GlobalValue & GV)139 static bool isNonRenamableLocal(const GlobalValue &GV) {
140 return GV.hasSection() && GV.hasLocalLinkage();
141 }
142
143 /// Determine whether this call has all constant integer arguments (excluding
144 /// "this") and summarize it to VCalls or ConstVCalls as appropriate.
addVCallToSet(DevirtCallSite Call,GlobalValue::GUID Guid,SetVector<FunctionSummary::VFuncId> & VCalls,SetVector<FunctionSummary::ConstVCall> & ConstVCalls)145 static void addVCallToSet(DevirtCallSite Call, GlobalValue::GUID Guid,
146 SetVector<FunctionSummary::VFuncId> &VCalls,
147 SetVector<FunctionSummary::ConstVCall> &ConstVCalls) {
148 std::vector<uint64_t> Args;
149 // Start from the second argument to skip the "this" pointer.
150 for (auto &Arg : drop_begin(Call.CB.args())) {
151 auto *CI = dyn_cast<ConstantInt>(Arg);
152 if (!CI || CI->getBitWidth() > 64) {
153 VCalls.insert({Guid, Call.Offset});
154 return;
155 }
156 Args.push_back(CI->getZExtValue());
157 }
158 ConstVCalls.insert({{Guid, Call.Offset}, std::move(Args)});
159 }
160
161 /// If this intrinsic call requires that we add information to the function
162 /// summary, do so via the non-constant reference arguments.
addIntrinsicToSummary(const CallInst * CI,SetVector<GlobalValue::GUID> & TypeTests,SetVector<FunctionSummary::VFuncId> & TypeTestAssumeVCalls,SetVector<FunctionSummary::VFuncId> & TypeCheckedLoadVCalls,SetVector<FunctionSummary::ConstVCall> & TypeTestAssumeConstVCalls,SetVector<FunctionSummary::ConstVCall> & TypeCheckedLoadConstVCalls,DominatorTree & DT)163 static void addIntrinsicToSummary(
164 const CallInst *CI, SetVector<GlobalValue::GUID> &TypeTests,
165 SetVector<FunctionSummary::VFuncId> &TypeTestAssumeVCalls,
166 SetVector<FunctionSummary::VFuncId> &TypeCheckedLoadVCalls,
167 SetVector<FunctionSummary::ConstVCall> &TypeTestAssumeConstVCalls,
168 SetVector<FunctionSummary::ConstVCall> &TypeCheckedLoadConstVCalls,
169 DominatorTree &DT) {
170 switch (CI->getCalledFunction()->getIntrinsicID()) {
171 case Intrinsic::type_test:
172 case Intrinsic::public_type_test: {
173 auto *TypeMDVal = cast<MetadataAsValue>(CI->getArgOperand(1));
174 auto *TypeId = dyn_cast<MDString>(TypeMDVal->getMetadata());
175 if (!TypeId)
176 break;
177 GlobalValue::GUID Guid = GlobalValue::getGUID(TypeId->getString());
178
179 // Produce a summary from type.test intrinsics. We only summarize type.test
180 // intrinsics that are used other than by an llvm.assume intrinsic.
181 // Intrinsics that are assumed are relevant only to the devirtualization
182 // pass, not the type test lowering pass.
183 bool HasNonAssumeUses = llvm::any_of(CI->uses(), [](const Use &CIU) {
184 return !isa<AssumeInst>(CIU.getUser());
185 });
186 if (HasNonAssumeUses)
187 TypeTests.insert(Guid);
188
189 SmallVector<DevirtCallSite, 4> DevirtCalls;
190 SmallVector<CallInst *, 4> Assumes;
191 findDevirtualizableCallsForTypeTest(DevirtCalls, Assumes, CI, DT);
192 for (auto &Call : DevirtCalls)
193 addVCallToSet(Call, Guid, TypeTestAssumeVCalls,
194 TypeTestAssumeConstVCalls);
195
196 break;
197 }
198
199 case Intrinsic::type_checked_load: {
200 auto *TypeMDVal = cast<MetadataAsValue>(CI->getArgOperand(2));
201 auto *TypeId = dyn_cast<MDString>(TypeMDVal->getMetadata());
202 if (!TypeId)
203 break;
204 GlobalValue::GUID Guid = GlobalValue::getGUID(TypeId->getString());
205
206 SmallVector<DevirtCallSite, 4> DevirtCalls;
207 SmallVector<Instruction *, 4> LoadedPtrs;
208 SmallVector<Instruction *, 4> Preds;
209 bool HasNonCallUses = false;
210 findDevirtualizableCallsForTypeCheckedLoad(DevirtCalls, LoadedPtrs, Preds,
211 HasNonCallUses, CI, DT);
212 // Any non-call uses of the result of llvm.type.checked.load will
213 // prevent us from optimizing away the llvm.type.test.
214 if (HasNonCallUses)
215 TypeTests.insert(Guid);
216 for (auto &Call : DevirtCalls)
217 addVCallToSet(Call, Guid, TypeCheckedLoadVCalls,
218 TypeCheckedLoadConstVCalls);
219
220 break;
221 }
222 default:
223 break;
224 }
225 }
226
isNonVolatileLoad(const Instruction * I)227 static bool isNonVolatileLoad(const Instruction *I) {
228 if (const auto *LI = dyn_cast<LoadInst>(I))
229 return !LI->isVolatile();
230
231 return false;
232 }
233
isNonVolatileStore(const Instruction * I)234 static bool isNonVolatileStore(const Instruction *I) {
235 if (const auto *SI = dyn_cast<StoreInst>(I))
236 return !SI->isVolatile();
237
238 return false;
239 }
240
241 // Returns true if the function definition must be unreachable.
242 //
243 // Note if this helper function returns true, `F` is guaranteed
244 // to be unreachable; if it returns false, `F` might still
245 // be unreachable but not covered by this helper function.
mustBeUnreachableFunction(const Function & F)246 static bool mustBeUnreachableFunction(const Function &F) {
247 // A function must be unreachable if its entry block ends with an
248 // 'unreachable'.
249 assert(!F.isDeclaration());
250 return isa<UnreachableInst>(F.getEntryBlock().getTerminator());
251 }
252
computeFunctionSummary(ModuleSummaryIndex & Index,const Module & M,const Function & F,BlockFrequencyInfo * BFI,ProfileSummaryInfo * PSI,DominatorTree & DT,bool HasLocalsInUsedOrAsm,DenseSet<GlobalValue::GUID> & CantBePromoted,bool IsThinLTO,std::function<const StackSafetyInfo * (const Function & F)> GetSSICallback)253 static void computeFunctionSummary(
254 ModuleSummaryIndex &Index, const Module &M, const Function &F,
255 BlockFrequencyInfo *BFI, ProfileSummaryInfo *PSI, DominatorTree &DT,
256 bool HasLocalsInUsedOrAsm, DenseSet<GlobalValue::GUID> &CantBePromoted,
257 bool IsThinLTO,
258 std::function<const StackSafetyInfo *(const Function &F)> GetSSICallback) {
259 // Summary not currently supported for anonymous functions, they should
260 // have been named.
261 assert(F.hasName());
262
263 unsigned NumInsts = 0;
264 // Map from callee ValueId to profile count. Used to accumulate profile
265 // counts for all static calls to a given callee.
266 MapVector<ValueInfo, CalleeInfo> CallGraphEdges;
267 SetVector<ValueInfo> RefEdges, LoadRefEdges, StoreRefEdges;
268 SetVector<GlobalValue::GUID> TypeTests;
269 SetVector<FunctionSummary::VFuncId> TypeTestAssumeVCalls,
270 TypeCheckedLoadVCalls;
271 SetVector<FunctionSummary::ConstVCall> TypeTestAssumeConstVCalls,
272 TypeCheckedLoadConstVCalls;
273 ICallPromotionAnalysis ICallAnalysis;
274 SmallPtrSet<const User *, 8> Visited;
275
276 // Add personality function, prefix data and prologue data to function's ref
277 // list.
278 findRefEdges(Index, &F, RefEdges, Visited);
279 std::vector<const Instruction *> NonVolatileLoads;
280 std::vector<const Instruction *> NonVolatileStores;
281
282 std::vector<CallsiteInfo> Callsites;
283 std::vector<AllocInfo> Allocs;
284
285 bool HasInlineAsmMaybeReferencingInternal = false;
286 bool HasIndirBranchToBlockAddress = false;
287 bool HasUnknownCall = false;
288 bool MayThrow = false;
289 for (const BasicBlock &BB : F) {
290 // We don't allow inlining of function with indirect branch to blockaddress.
291 // If the blockaddress escapes the function, e.g., via a global variable,
292 // inlining may lead to an invalid cross-function reference. So we shouldn't
293 // import such function either.
294 if (BB.hasAddressTaken()) {
295 for (User *U : BlockAddress::get(const_cast<BasicBlock *>(&BB))->users())
296 if (!isa<CallBrInst>(*U)) {
297 HasIndirBranchToBlockAddress = true;
298 break;
299 }
300 }
301
302 for (const Instruction &I : BB) {
303 if (I.isDebugOrPseudoInst())
304 continue;
305 ++NumInsts;
306
307 // Regular LTO module doesn't participate in ThinLTO import,
308 // so no reference from it can be read/writeonly, since this
309 // would require importing variable as local copy
310 if (IsThinLTO) {
311 if (isNonVolatileLoad(&I)) {
312 // Postpone processing of non-volatile load instructions
313 // See comments below
314 Visited.insert(&I);
315 NonVolatileLoads.push_back(&I);
316 continue;
317 } else if (isNonVolatileStore(&I)) {
318 Visited.insert(&I);
319 NonVolatileStores.push_back(&I);
320 // All references from second operand of store (destination address)
321 // can be considered write-only if they're not referenced by any
322 // non-store instruction. References from first operand of store
323 // (stored value) can't be treated either as read- or as write-only
324 // so we add them to RefEdges as we do with all other instructions
325 // except non-volatile load.
326 Value *Stored = I.getOperand(0);
327 if (auto *GV = dyn_cast<GlobalValue>(Stored))
328 // findRefEdges will try to examine GV operands, so instead
329 // of calling it we should add GV to RefEdges directly.
330 RefEdges.insert(Index.getOrInsertValueInfo(GV));
331 else if (auto *U = dyn_cast<User>(Stored))
332 findRefEdges(Index, U, RefEdges, Visited);
333 continue;
334 }
335 }
336 findRefEdges(Index, &I, RefEdges, Visited);
337 const auto *CB = dyn_cast<CallBase>(&I);
338 if (!CB) {
339 if (I.mayThrow())
340 MayThrow = true;
341 continue;
342 }
343
344 const auto *CI = dyn_cast<CallInst>(&I);
345 // Since we don't know exactly which local values are referenced in inline
346 // assembly, conservatively mark the function as possibly referencing
347 // a local value from inline assembly to ensure we don't export a
348 // reference (which would require renaming and promotion of the
349 // referenced value).
350 if (HasLocalsInUsedOrAsm && CI && CI->isInlineAsm())
351 HasInlineAsmMaybeReferencingInternal = true;
352
353 auto *CalledValue = CB->getCalledOperand();
354 auto *CalledFunction = CB->getCalledFunction();
355 if (CalledValue && !CalledFunction) {
356 CalledValue = CalledValue->stripPointerCasts();
357 // Stripping pointer casts can reveal a called function.
358 CalledFunction = dyn_cast<Function>(CalledValue);
359 }
360 // Check if this is an alias to a function. If so, get the
361 // called aliasee for the checks below.
362 if (auto *GA = dyn_cast<GlobalAlias>(CalledValue)) {
363 assert(!CalledFunction && "Expected null called function in callsite for alias");
364 CalledFunction = dyn_cast<Function>(GA->getAliaseeObject());
365 }
366 // Check if this is a direct call to a known function or a known
367 // intrinsic, or an indirect call with profile data.
368 if (CalledFunction) {
369 if (CI && CalledFunction->isIntrinsic()) {
370 addIntrinsicToSummary(
371 CI, TypeTests, TypeTestAssumeVCalls, TypeCheckedLoadVCalls,
372 TypeTestAssumeConstVCalls, TypeCheckedLoadConstVCalls, DT);
373 continue;
374 }
375 // We should have named any anonymous globals
376 assert(CalledFunction->hasName());
377 auto ScaledCount = PSI->getProfileCount(*CB, BFI);
378 auto Hotness = ScaledCount ? getHotness(*ScaledCount, PSI)
379 : CalleeInfo::HotnessType::Unknown;
380 if (ForceSummaryEdgesCold != FunctionSummary::FSHT_None)
381 Hotness = CalleeInfo::HotnessType::Cold;
382
383 // Use the original CalledValue, in case it was an alias. We want
384 // to record the call edge to the alias in that case. Eventually
385 // an alias summary will be created to associate the alias and
386 // aliasee.
387 auto &ValueInfo = CallGraphEdges[Index.getOrInsertValueInfo(
388 cast<GlobalValue>(CalledValue))];
389 ValueInfo.updateHotness(Hotness);
390 // Add the relative block frequency to CalleeInfo if there is no profile
391 // information.
392 if (BFI != nullptr && Hotness == CalleeInfo::HotnessType::Unknown) {
393 uint64_t BBFreq = BFI->getBlockFreq(&BB).getFrequency();
394 uint64_t EntryFreq = BFI->getEntryFreq();
395 ValueInfo.updateRelBlockFreq(BBFreq, EntryFreq);
396 }
397 } else {
398 HasUnknownCall = true;
399 // Skip inline assembly calls.
400 if (CI && CI->isInlineAsm())
401 continue;
402 // Skip direct calls.
403 if (!CalledValue || isa<Constant>(CalledValue))
404 continue;
405
406 // Check if the instruction has a callees metadata. If so, add callees
407 // to CallGraphEdges to reflect the references from the metadata, and
408 // to enable importing for subsequent indirect call promotion and
409 // inlining.
410 if (auto *MD = I.getMetadata(LLVMContext::MD_callees)) {
411 for (const auto &Op : MD->operands()) {
412 Function *Callee = mdconst::extract_or_null<Function>(Op);
413 if (Callee)
414 CallGraphEdges[Index.getOrInsertValueInfo(Callee)];
415 }
416 }
417
418 uint32_t NumVals, NumCandidates;
419 uint64_t TotalCount;
420 auto CandidateProfileData =
421 ICallAnalysis.getPromotionCandidatesForInstruction(
422 &I, NumVals, TotalCount, NumCandidates);
423 for (const auto &Candidate : CandidateProfileData)
424 CallGraphEdges[Index.getOrInsertValueInfo(Candidate.Value)]
425 .updateHotness(getHotness(Candidate.Count, PSI));
426 }
427
428 // TODO: Skip indirect calls for now. Need to handle these better, likely
429 // by creating multiple Callsites, one per target, then speculatively
430 // devirtualize while applying clone info in the ThinLTO backends. This
431 // will also be important because we will have a different set of clone
432 // versions per target. This handling needs to match that in the ThinLTO
433 // backend so we handle things consistently for matching of callsite
434 // summaries to instructions.
435 if (!CalledFunction)
436 continue;
437
438 // Compute the list of stack ids first (so we can trim them from the stack
439 // ids on any MIBs).
440 CallStack<MDNode, MDNode::op_iterator> InstCallsite(
441 I.getMetadata(LLVMContext::MD_callsite));
442 auto *MemProfMD = I.getMetadata(LLVMContext::MD_memprof);
443 if (MemProfMD) {
444 std::vector<MIBInfo> MIBs;
445 for (auto &MDOp : MemProfMD->operands()) {
446 auto *MIBMD = cast<const MDNode>(MDOp);
447 MDNode *StackNode = getMIBStackNode(MIBMD);
448 assert(StackNode);
449 SmallVector<unsigned> StackIdIndices;
450 CallStack<MDNode, MDNode::op_iterator> StackContext(StackNode);
451 // Collapse out any on the allocation call (inlining).
452 for (auto ContextIter =
453 StackContext.beginAfterSharedPrefix(InstCallsite);
454 ContextIter != StackContext.end(); ++ContextIter) {
455 unsigned StackIdIdx = Index.addOrGetStackIdIndex(*ContextIter);
456 // If this is a direct recursion, simply skip the duplicate
457 // entries. If this is mutual recursion, handling is left to
458 // the LTO link analysis client.
459 if (StackIdIndices.empty() || StackIdIndices.back() != StackIdIdx)
460 StackIdIndices.push_back(StackIdIdx);
461 }
462 MIBs.push_back(
463 MIBInfo(getMIBAllocType(MIBMD), std::move(StackIdIndices)));
464 }
465 Allocs.push_back(AllocInfo(std::move(MIBs)));
466 } else if (!InstCallsite.empty()) {
467 SmallVector<unsigned> StackIdIndices;
468 for (auto StackId : InstCallsite)
469 StackIdIndices.push_back(Index.addOrGetStackIdIndex(StackId));
470 // Use the original CalledValue, in case it was an alias. We want
471 // to record the call edge to the alias in that case. Eventually
472 // an alias summary will be created to associate the alias and
473 // aliasee.
474 auto CalleeValueInfo =
475 Index.getOrInsertValueInfo(cast<GlobalValue>(CalledValue));
476 Callsites.push_back({CalleeValueInfo, StackIdIndices});
477 }
478 }
479 }
480 Index.addBlockCount(F.size());
481
482 std::vector<ValueInfo> Refs;
483 if (IsThinLTO) {
484 auto AddRefEdges = [&](const std::vector<const Instruction *> &Instrs,
485 SetVector<ValueInfo> &Edges,
486 SmallPtrSet<const User *, 8> &Cache) {
487 for (const auto *I : Instrs) {
488 Cache.erase(I);
489 findRefEdges(Index, I, Edges, Cache);
490 }
491 };
492
493 // By now we processed all instructions in a function, except
494 // non-volatile loads and non-volatile value stores. Let's find
495 // ref edges for both of instruction sets
496 AddRefEdges(NonVolatileLoads, LoadRefEdges, Visited);
497 // We can add some values to the Visited set when processing load
498 // instructions which are also used by stores in NonVolatileStores.
499 // For example this can happen if we have following code:
500 //
501 // store %Derived* @foo, %Derived** bitcast (%Base** @bar to %Derived**)
502 // %42 = load %Derived*, %Derived** bitcast (%Base** @bar to %Derived**)
503 //
504 // After processing loads we'll add bitcast to the Visited set, and if
505 // we use the same set while processing stores, we'll never see store
506 // to @bar and @bar will be mistakenly treated as readonly.
507 SmallPtrSet<const llvm::User *, 8> StoreCache;
508 AddRefEdges(NonVolatileStores, StoreRefEdges, StoreCache);
509
510 // If both load and store instruction reference the same variable
511 // we won't be able to optimize it. Add all such reference edges
512 // to RefEdges set.
513 for (const auto &VI : StoreRefEdges)
514 if (LoadRefEdges.remove(VI))
515 RefEdges.insert(VI);
516
517 unsigned RefCnt = RefEdges.size();
518 // All new reference edges inserted in two loops below are either
519 // read or write only. They will be grouped in the end of RefEdges
520 // vector, so we can use a single integer value to identify them.
521 for (const auto &VI : LoadRefEdges)
522 RefEdges.insert(VI);
523
524 unsigned FirstWORef = RefEdges.size();
525 for (const auto &VI : StoreRefEdges)
526 RefEdges.insert(VI);
527
528 Refs = RefEdges.takeVector();
529 for (; RefCnt < FirstWORef; ++RefCnt)
530 Refs[RefCnt].setReadOnly();
531
532 for (; RefCnt < Refs.size(); ++RefCnt)
533 Refs[RefCnt].setWriteOnly();
534 } else {
535 Refs = RefEdges.takeVector();
536 }
537 // Explicit add hot edges to enforce importing for designated GUIDs for
538 // sample PGO, to enable the same inlines as the profiled optimized binary.
539 for (auto &I : F.getImportGUIDs())
540 CallGraphEdges[Index.getOrInsertValueInfo(I)].updateHotness(
541 ForceSummaryEdgesCold == FunctionSummary::FSHT_All
542 ? CalleeInfo::HotnessType::Cold
543 : CalleeInfo::HotnessType::Critical);
544
545 bool NonRenamableLocal = isNonRenamableLocal(F);
546 bool NotEligibleForImport = NonRenamableLocal ||
547 HasInlineAsmMaybeReferencingInternal ||
548 HasIndirBranchToBlockAddress;
549 GlobalValueSummary::GVFlags Flags(
550 F.getLinkage(), F.getVisibility(), NotEligibleForImport,
551 /* Live = */ false, F.isDSOLocal(), F.canBeOmittedFromSymbolTable());
552 FunctionSummary::FFlags FunFlags{
553 F.doesNotAccessMemory(), F.onlyReadsMemory() && !F.doesNotAccessMemory(),
554 F.hasFnAttribute(Attribute::NoRecurse), F.returnDoesNotAlias(),
555 // FIXME: refactor this to use the same code that inliner is using.
556 // Don't try to import functions with noinline attribute.
557 F.getAttributes().hasFnAttr(Attribute::NoInline),
558 F.hasFnAttribute(Attribute::AlwaysInline),
559 F.hasFnAttribute(Attribute::NoUnwind), MayThrow, HasUnknownCall,
560 mustBeUnreachableFunction(F)};
561 std::vector<FunctionSummary::ParamAccess> ParamAccesses;
562 if (auto *SSI = GetSSICallback(F))
563 ParamAccesses = SSI->getParamAccesses(Index);
564 auto FuncSummary = std::make_unique<FunctionSummary>(
565 Flags, NumInsts, FunFlags, /*EntryCount=*/0, std::move(Refs),
566 CallGraphEdges.takeVector(), TypeTests.takeVector(),
567 TypeTestAssumeVCalls.takeVector(), TypeCheckedLoadVCalls.takeVector(),
568 TypeTestAssumeConstVCalls.takeVector(),
569 TypeCheckedLoadConstVCalls.takeVector(), std::move(ParamAccesses),
570 std::move(Callsites), std::move(Allocs));
571 if (NonRenamableLocal)
572 CantBePromoted.insert(F.getGUID());
573 Index.addGlobalValueSummary(F, std::move(FuncSummary));
574 }
575
576 /// Find function pointers referenced within the given vtable initializer
577 /// (or subset of an initializer) \p I. The starting offset of \p I within
578 /// the vtable initializer is \p StartingOffset. Any discovered function
579 /// pointers are added to \p VTableFuncs along with their cumulative offset
580 /// within the initializer.
findFuncPointers(const Constant * I,uint64_t StartingOffset,const Module & M,ModuleSummaryIndex & Index,VTableFuncList & VTableFuncs)581 static void findFuncPointers(const Constant *I, uint64_t StartingOffset,
582 const Module &M, ModuleSummaryIndex &Index,
583 VTableFuncList &VTableFuncs) {
584 // First check if this is a function pointer.
585 if (I->getType()->isPointerTy()) {
586 auto Fn = dyn_cast<Function>(I->stripPointerCasts());
587 // We can disregard __cxa_pure_virtual as a possible call target, as
588 // calls to pure virtuals are UB.
589 if (Fn && Fn->getName() != "__cxa_pure_virtual")
590 VTableFuncs.push_back({Index.getOrInsertValueInfo(Fn), StartingOffset});
591 return;
592 }
593
594 // Walk through the elements in the constant struct or array and recursively
595 // look for virtual function pointers.
596 const DataLayout &DL = M.getDataLayout();
597 if (auto *C = dyn_cast<ConstantStruct>(I)) {
598 StructType *STy = dyn_cast<StructType>(C->getType());
599 assert(STy);
600 const StructLayout *SL = DL.getStructLayout(C->getType());
601
602 for (auto EI : llvm::enumerate(STy->elements())) {
603 auto Offset = SL->getElementOffset(EI.index());
604 unsigned Op = SL->getElementContainingOffset(Offset);
605 findFuncPointers(cast<Constant>(I->getOperand(Op)),
606 StartingOffset + Offset, M, Index, VTableFuncs);
607 }
608 } else if (auto *C = dyn_cast<ConstantArray>(I)) {
609 ArrayType *ATy = C->getType();
610 Type *EltTy = ATy->getElementType();
611 uint64_t EltSize = DL.getTypeAllocSize(EltTy);
612 for (unsigned i = 0, e = ATy->getNumElements(); i != e; ++i) {
613 findFuncPointers(cast<Constant>(I->getOperand(i)),
614 StartingOffset + i * EltSize, M, Index, VTableFuncs);
615 }
616 }
617 }
618
619 // Identify the function pointers referenced by vtable definition \p V.
computeVTableFuncs(ModuleSummaryIndex & Index,const GlobalVariable & V,const Module & M,VTableFuncList & VTableFuncs)620 static void computeVTableFuncs(ModuleSummaryIndex &Index,
621 const GlobalVariable &V, const Module &M,
622 VTableFuncList &VTableFuncs) {
623 if (!V.isConstant())
624 return;
625
626 findFuncPointers(V.getInitializer(), /*StartingOffset=*/0, M, Index,
627 VTableFuncs);
628
629 #ifndef NDEBUG
630 // Validate that the VTableFuncs list is ordered by offset.
631 uint64_t PrevOffset = 0;
632 for (auto &P : VTableFuncs) {
633 // The findVFuncPointers traversal should have encountered the
634 // functions in offset order. We need to use ">=" since PrevOffset
635 // starts at 0.
636 assert(P.VTableOffset >= PrevOffset);
637 PrevOffset = P.VTableOffset;
638 }
639 #endif
640 }
641
642 /// Record vtable definition \p V for each type metadata it references.
643 static void
recordTypeIdCompatibleVtableReferences(ModuleSummaryIndex & Index,const GlobalVariable & V,SmallVectorImpl<MDNode * > & Types)644 recordTypeIdCompatibleVtableReferences(ModuleSummaryIndex &Index,
645 const GlobalVariable &V,
646 SmallVectorImpl<MDNode *> &Types) {
647 for (MDNode *Type : Types) {
648 auto TypeID = Type->getOperand(1).get();
649
650 uint64_t Offset =
651 cast<ConstantInt>(
652 cast<ConstantAsMetadata>(Type->getOperand(0))->getValue())
653 ->getZExtValue();
654
655 if (auto *TypeId = dyn_cast<MDString>(TypeID))
656 Index.getOrInsertTypeIdCompatibleVtableSummary(TypeId->getString())
657 .push_back({Offset, Index.getOrInsertValueInfo(&V)});
658 }
659 }
660
computeVariableSummary(ModuleSummaryIndex & Index,const GlobalVariable & V,DenseSet<GlobalValue::GUID> & CantBePromoted,const Module & M,SmallVectorImpl<MDNode * > & Types)661 static void computeVariableSummary(ModuleSummaryIndex &Index,
662 const GlobalVariable &V,
663 DenseSet<GlobalValue::GUID> &CantBePromoted,
664 const Module &M,
665 SmallVectorImpl<MDNode *> &Types) {
666 SetVector<ValueInfo> RefEdges;
667 SmallPtrSet<const User *, 8> Visited;
668 bool HasBlockAddress = findRefEdges(Index, &V, RefEdges, Visited);
669 bool NonRenamableLocal = isNonRenamableLocal(V);
670 GlobalValueSummary::GVFlags Flags(
671 V.getLinkage(), V.getVisibility(), NonRenamableLocal,
672 /* Live = */ false, V.isDSOLocal(), V.canBeOmittedFromSymbolTable());
673
674 VTableFuncList VTableFuncs;
675 // If splitting is not enabled, then we compute the summary information
676 // necessary for index-based whole program devirtualization.
677 if (!Index.enableSplitLTOUnit()) {
678 Types.clear();
679 V.getMetadata(LLVMContext::MD_type, Types);
680 if (!Types.empty()) {
681 // Identify the function pointers referenced by this vtable definition.
682 computeVTableFuncs(Index, V, M, VTableFuncs);
683
684 // Record this vtable definition for each type metadata it references.
685 recordTypeIdCompatibleVtableReferences(Index, V, Types);
686 }
687 }
688
689 // Don't mark variables we won't be able to internalize as read/write-only.
690 bool CanBeInternalized =
691 !V.hasComdat() && !V.hasAppendingLinkage() && !V.isInterposable() &&
692 !V.hasAvailableExternallyLinkage() && !V.hasDLLExportStorageClass();
693 bool Constant = V.isConstant();
694 GlobalVarSummary::GVarFlags VarFlags(CanBeInternalized,
695 Constant ? false : CanBeInternalized,
696 Constant, V.getVCallVisibility());
697 auto GVarSummary = std::make_unique<GlobalVarSummary>(Flags, VarFlags,
698 RefEdges.takeVector());
699 if (NonRenamableLocal)
700 CantBePromoted.insert(V.getGUID());
701 if (HasBlockAddress)
702 GVarSummary->setNotEligibleToImport();
703 if (!VTableFuncs.empty())
704 GVarSummary->setVTableFuncs(VTableFuncs);
705 Index.addGlobalValueSummary(V, std::move(GVarSummary));
706 }
707
computeAliasSummary(ModuleSummaryIndex & Index,const GlobalAlias & A,DenseSet<GlobalValue::GUID> & CantBePromoted)708 static void computeAliasSummary(ModuleSummaryIndex &Index, const GlobalAlias &A,
709 DenseSet<GlobalValue::GUID> &CantBePromoted) {
710 // Skip summary for indirect function aliases as summary for aliasee will not
711 // be emitted.
712 const GlobalObject *Aliasee = A.getAliaseeObject();
713 if (isa<GlobalIFunc>(Aliasee))
714 return;
715 bool NonRenamableLocal = isNonRenamableLocal(A);
716 GlobalValueSummary::GVFlags Flags(
717 A.getLinkage(), A.getVisibility(), NonRenamableLocal,
718 /* Live = */ false, A.isDSOLocal(), A.canBeOmittedFromSymbolTable());
719 auto AS = std::make_unique<AliasSummary>(Flags);
720 auto AliaseeVI = Index.getValueInfo(Aliasee->getGUID());
721 assert(AliaseeVI && "Alias expects aliasee summary to be available");
722 assert(AliaseeVI.getSummaryList().size() == 1 &&
723 "Expected a single entry per aliasee in per-module index");
724 AS->setAliasee(AliaseeVI, AliaseeVI.getSummaryList()[0].get());
725 if (NonRenamableLocal)
726 CantBePromoted.insert(A.getGUID());
727 Index.addGlobalValueSummary(A, std::move(AS));
728 }
729
730 // Set LiveRoot flag on entries matching the given value name.
setLiveRoot(ModuleSummaryIndex & Index,StringRef Name)731 static void setLiveRoot(ModuleSummaryIndex &Index, StringRef Name) {
732 if (ValueInfo VI = Index.getValueInfo(GlobalValue::getGUID(Name)))
733 for (const auto &Summary : VI.getSummaryList())
734 Summary->setLive(true);
735 }
736
buildModuleSummaryIndex(const Module & M,std::function<BlockFrequencyInfo * (const Function & F)> GetBFICallback,ProfileSummaryInfo * PSI,std::function<const StackSafetyInfo * (const Function & F)> GetSSICallback)737 ModuleSummaryIndex llvm::buildModuleSummaryIndex(
738 const Module &M,
739 std::function<BlockFrequencyInfo *(const Function &F)> GetBFICallback,
740 ProfileSummaryInfo *PSI,
741 std::function<const StackSafetyInfo *(const Function &F)> GetSSICallback) {
742 assert(PSI);
743 bool EnableSplitLTOUnit = false;
744 if (auto *MD = mdconst::extract_or_null<ConstantInt>(
745 M.getModuleFlag("EnableSplitLTOUnit")))
746 EnableSplitLTOUnit = MD->getZExtValue();
747 ModuleSummaryIndex Index(/*HaveGVs=*/true, EnableSplitLTOUnit);
748
749 // Identify the local values in the llvm.used and llvm.compiler.used sets,
750 // which should not be exported as they would then require renaming and
751 // promotion, but we may have opaque uses e.g. in inline asm. We collect them
752 // here because we use this information to mark functions containing inline
753 // assembly calls as not importable.
754 SmallPtrSet<GlobalValue *, 4> LocalsUsed;
755 SmallVector<GlobalValue *, 4> Used;
756 // First collect those in the llvm.used set.
757 collectUsedGlobalVariables(M, Used, /*CompilerUsed=*/false);
758 // Next collect those in the llvm.compiler.used set.
759 collectUsedGlobalVariables(M, Used, /*CompilerUsed=*/true);
760 DenseSet<GlobalValue::GUID> CantBePromoted;
761 for (auto *V : Used) {
762 if (V->hasLocalLinkage()) {
763 LocalsUsed.insert(V);
764 CantBePromoted.insert(V->getGUID());
765 }
766 }
767
768 bool HasLocalInlineAsmSymbol = false;
769 if (!M.getModuleInlineAsm().empty()) {
770 // Collect the local values defined by module level asm, and set up
771 // summaries for these symbols so that they can be marked as NoRename,
772 // to prevent export of any use of them in regular IR that would require
773 // renaming within the module level asm. Note we don't need to create a
774 // summary for weak or global defs, as they don't need to be flagged as
775 // NoRename, and defs in module level asm can't be imported anyway.
776 // Also, any values used but not defined within module level asm should
777 // be listed on the llvm.used or llvm.compiler.used global and marked as
778 // referenced from there.
779 ModuleSymbolTable::CollectAsmSymbols(
780 M, [&](StringRef Name, object::BasicSymbolRef::Flags Flags) {
781 // Symbols not marked as Weak or Global are local definitions.
782 if (Flags & (object::BasicSymbolRef::SF_Weak |
783 object::BasicSymbolRef::SF_Global))
784 return;
785 HasLocalInlineAsmSymbol = true;
786 GlobalValue *GV = M.getNamedValue(Name);
787 if (!GV)
788 return;
789 assert(GV->isDeclaration() && "Def in module asm already has definition");
790 GlobalValueSummary::GVFlags GVFlags(
791 GlobalValue::InternalLinkage, GlobalValue::DefaultVisibility,
792 /* NotEligibleToImport = */ true,
793 /* Live = */ true,
794 /* Local */ GV->isDSOLocal(), GV->canBeOmittedFromSymbolTable());
795 CantBePromoted.insert(GV->getGUID());
796 // Create the appropriate summary type.
797 if (Function *F = dyn_cast<Function>(GV)) {
798 std::unique_ptr<FunctionSummary> Summary =
799 std::make_unique<FunctionSummary>(
800 GVFlags, /*InstCount=*/0,
801 FunctionSummary::FFlags{
802 F->hasFnAttribute(Attribute::ReadNone),
803 F->hasFnAttribute(Attribute::ReadOnly),
804 F->hasFnAttribute(Attribute::NoRecurse),
805 F->returnDoesNotAlias(),
806 /* NoInline = */ false,
807 F->hasFnAttribute(Attribute::AlwaysInline),
808 F->hasFnAttribute(Attribute::NoUnwind),
809 /* MayThrow */ true,
810 /* HasUnknownCall */ true,
811 /* MustBeUnreachable */ false},
812 /*EntryCount=*/0, ArrayRef<ValueInfo>{},
813 ArrayRef<FunctionSummary::EdgeTy>{},
814 ArrayRef<GlobalValue::GUID>{},
815 ArrayRef<FunctionSummary::VFuncId>{},
816 ArrayRef<FunctionSummary::VFuncId>{},
817 ArrayRef<FunctionSummary::ConstVCall>{},
818 ArrayRef<FunctionSummary::ConstVCall>{},
819 ArrayRef<FunctionSummary::ParamAccess>{},
820 ArrayRef<CallsiteInfo>{}, ArrayRef<AllocInfo>{});
821 Index.addGlobalValueSummary(*GV, std::move(Summary));
822 } else {
823 std::unique_ptr<GlobalVarSummary> Summary =
824 std::make_unique<GlobalVarSummary>(
825 GVFlags,
826 GlobalVarSummary::GVarFlags(
827 false, false, cast<GlobalVariable>(GV)->isConstant(),
828 GlobalObject::VCallVisibilityPublic),
829 ArrayRef<ValueInfo>{});
830 Index.addGlobalValueSummary(*GV, std::move(Summary));
831 }
832 });
833 }
834
835 bool IsThinLTO = true;
836 if (auto *MD =
837 mdconst::extract_or_null<ConstantInt>(M.getModuleFlag("ThinLTO")))
838 IsThinLTO = MD->getZExtValue();
839
840 // Compute summaries for all functions defined in module, and save in the
841 // index.
842 for (const auto &F : M) {
843 if (F.isDeclaration())
844 continue;
845
846 DominatorTree DT(const_cast<Function &>(F));
847 BlockFrequencyInfo *BFI = nullptr;
848 std::unique_ptr<BlockFrequencyInfo> BFIPtr;
849 if (GetBFICallback)
850 BFI = GetBFICallback(F);
851 else if (F.hasProfileData()) {
852 LoopInfo LI{DT};
853 BranchProbabilityInfo BPI{F, LI};
854 BFIPtr = std::make_unique<BlockFrequencyInfo>(F, BPI, LI);
855 BFI = BFIPtr.get();
856 }
857
858 computeFunctionSummary(Index, M, F, BFI, PSI, DT,
859 !LocalsUsed.empty() || HasLocalInlineAsmSymbol,
860 CantBePromoted, IsThinLTO, GetSSICallback);
861 }
862
863 // Compute summaries for all variables defined in module, and save in the
864 // index.
865 SmallVector<MDNode *, 2> Types;
866 for (const GlobalVariable &G : M.globals()) {
867 if (G.isDeclaration())
868 continue;
869 computeVariableSummary(Index, G, CantBePromoted, M, Types);
870 }
871
872 // Compute summaries for all aliases defined in module, and save in the
873 // index.
874 for (const GlobalAlias &A : M.aliases())
875 computeAliasSummary(Index, A, CantBePromoted);
876
877 // Iterate through ifuncs, set their resolvers all alive.
878 for (const GlobalIFunc &I : M.ifuncs()) {
879 I.applyAlongResolverPath([&Index](const GlobalValue &GV) {
880 Index.getGlobalValueSummary(GV)->setLive(true);
881 });
882 }
883
884 for (auto *V : LocalsUsed) {
885 auto *Summary = Index.getGlobalValueSummary(*V);
886 assert(Summary && "Missing summary for global value");
887 Summary->setNotEligibleToImport();
888 }
889
890 // The linker doesn't know about these LLVM produced values, so we need
891 // to flag them as live in the index to ensure index-based dead value
892 // analysis treats them as live roots of the analysis.
893 setLiveRoot(Index, "llvm.used");
894 setLiveRoot(Index, "llvm.compiler.used");
895 setLiveRoot(Index, "llvm.global_ctors");
896 setLiveRoot(Index, "llvm.global_dtors");
897 setLiveRoot(Index, "llvm.global.annotations");
898
899 for (auto &GlobalList : Index) {
900 // Ignore entries for references that are undefined in the current module.
901 if (GlobalList.second.SummaryList.empty())
902 continue;
903
904 assert(GlobalList.second.SummaryList.size() == 1 &&
905 "Expected module's index to have one summary per GUID");
906 auto &Summary = GlobalList.second.SummaryList[0];
907 if (!IsThinLTO) {
908 Summary->setNotEligibleToImport();
909 continue;
910 }
911
912 bool AllRefsCanBeExternallyReferenced =
913 llvm::all_of(Summary->refs(), [&](const ValueInfo &VI) {
914 return !CantBePromoted.count(VI.getGUID());
915 });
916 if (!AllRefsCanBeExternallyReferenced) {
917 Summary->setNotEligibleToImport();
918 continue;
919 }
920
921 if (auto *FuncSummary = dyn_cast<FunctionSummary>(Summary.get())) {
922 bool AllCallsCanBeExternallyReferenced = llvm::all_of(
923 FuncSummary->calls(), [&](const FunctionSummary::EdgeTy &Edge) {
924 return !CantBePromoted.count(Edge.first.getGUID());
925 });
926 if (!AllCallsCanBeExternallyReferenced)
927 Summary->setNotEligibleToImport();
928 }
929 }
930
931 if (!ModuleSummaryDotFile.empty()) {
932 std::error_code EC;
933 raw_fd_ostream OSDot(ModuleSummaryDotFile, EC, sys::fs::OpenFlags::OF_None);
934 if (EC)
935 report_fatal_error(Twine("Failed to open dot file ") +
936 ModuleSummaryDotFile + ": " + EC.message() + "\n");
937 Index.exportToDot(OSDot, {});
938 }
939
940 return Index;
941 }
942
943 AnalysisKey ModuleSummaryIndexAnalysis::Key;
944
945 ModuleSummaryIndex
run(Module & M,ModuleAnalysisManager & AM)946 ModuleSummaryIndexAnalysis::run(Module &M, ModuleAnalysisManager &AM) {
947 ProfileSummaryInfo &PSI = AM.getResult<ProfileSummaryAnalysis>(M);
948 auto &FAM = AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager();
949 bool NeedSSI = needsParamAccessSummary(M);
950 return buildModuleSummaryIndex(
951 M,
952 [&FAM](const Function &F) {
953 return &FAM.getResult<BlockFrequencyAnalysis>(
954 *const_cast<Function *>(&F));
955 },
956 &PSI,
957 [&FAM, NeedSSI](const Function &F) -> const StackSafetyInfo * {
958 return NeedSSI ? &FAM.getResult<StackSafetyAnalysis>(
959 const_cast<Function &>(F))
960 : nullptr;
961 });
962 }
963
964 char ModuleSummaryIndexWrapperPass::ID = 0;
965
966 INITIALIZE_PASS_BEGIN(ModuleSummaryIndexWrapperPass, "module-summary-analysis",
967 "Module Summary Analysis", false, true)
INITIALIZE_PASS_DEPENDENCY(BlockFrequencyInfoWrapperPass)968 INITIALIZE_PASS_DEPENDENCY(BlockFrequencyInfoWrapperPass)
969 INITIALIZE_PASS_DEPENDENCY(ProfileSummaryInfoWrapperPass)
970 INITIALIZE_PASS_DEPENDENCY(StackSafetyInfoWrapperPass)
971 INITIALIZE_PASS_END(ModuleSummaryIndexWrapperPass, "module-summary-analysis",
972 "Module Summary Analysis", false, true)
973
974 ModulePass *llvm::createModuleSummaryIndexWrapperPass() {
975 return new ModuleSummaryIndexWrapperPass();
976 }
977
ModuleSummaryIndexWrapperPass()978 ModuleSummaryIndexWrapperPass::ModuleSummaryIndexWrapperPass()
979 : ModulePass(ID) {
980 initializeModuleSummaryIndexWrapperPassPass(*PassRegistry::getPassRegistry());
981 }
982
runOnModule(Module & M)983 bool ModuleSummaryIndexWrapperPass::runOnModule(Module &M) {
984 auto *PSI = &getAnalysis<ProfileSummaryInfoWrapperPass>().getPSI();
985 bool NeedSSI = needsParamAccessSummary(M);
986 Index.emplace(buildModuleSummaryIndex(
987 M,
988 [this](const Function &F) {
989 return &(this->getAnalysis<BlockFrequencyInfoWrapperPass>(
990 *const_cast<Function *>(&F))
991 .getBFI());
992 },
993 PSI,
994 [&](const Function &F) -> const StackSafetyInfo * {
995 return NeedSSI ? &getAnalysis<StackSafetyInfoWrapperPass>(
996 const_cast<Function &>(F))
997 .getResult()
998 : nullptr;
999 }));
1000 return false;
1001 }
1002
doFinalization(Module & M)1003 bool ModuleSummaryIndexWrapperPass::doFinalization(Module &M) {
1004 Index.reset();
1005 return false;
1006 }
1007
getAnalysisUsage(AnalysisUsage & AU) const1008 void ModuleSummaryIndexWrapperPass::getAnalysisUsage(AnalysisUsage &AU) const {
1009 AU.setPreservesAll();
1010 AU.addRequired<BlockFrequencyInfoWrapperPass>();
1011 AU.addRequired<ProfileSummaryInfoWrapperPass>();
1012 AU.addRequired<StackSafetyInfoWrapperPass>();
1013 }
1014
1015 char ImmutableModuleSummaryIndexWrapperPass::ID = 0;
1016
ImmutableModuleSummaryIndexWrapperPass(const ModuleSummaryIndex * Index)1017 ImmutableModuleSummaryIndexWrapperPass::ImmutableModuleSummaryIndexWrapperPass(
1018 const ModuleSummaryIndex *Index)
1019 : ImmutablePass(ID), Index(Index) {
1020 initializeImmutableModuleSummaryIndexWrapperPassPass(
1021 *PassRegistry::getPassRegistry());
1022 }
1023
getAnalysisUsage(AnalysisUsage & AU) const1024 void ImmutableModuleSummaryIndexWrapperPass::getAnalysisUsage(
1025 AnalysisUsage &AU) const {
1026 AU.setPreservesAll();
1027 }
1028
createImmutableModuleSummaryIndexWrapperPass(const ModuleSummaryIndex * Index)1029 ImmutablePass *llvm::createImmutableModuleSummaryIndexWrapperPass(
1030 const ModuleSummaryIndex *Index) {
1031 return new ImmutableModuleSummaryIndexWrapperPass(Index);
1032 }
1033
1034 INITIALIZE_PASS(ImmutableModuleSummaryIndexWrapperPass, "module-summary-info",
1035 "Module summary info", false, true)
1036