74#define DEBUG_TYPE "globalopt"
76STATISTIC(NumMarked ,
"Number of globals marked constant");
77STATISTIC(NumUnnamed ,
"Number of globals marked unnamed_addr");
78STATISTIC(NumSRA ,
"Number of aggregate globals broken into scalars");
79STATISTIC(NumSubstitute,
"Number of globals with initializers stored into them");
81STATISTIC(NumGlobUses ,
"Number of global uses devirtualized");
82STATISTIC(NumLocalized ,
"Number of globals localized");
83STATISTIC(NumShrunkToBool ,
"Number of global vars shrunk to booleans");
84STATISTIC(NumFastCallFns ,
"Number of functions converted to fastcc");
85STATISTIC(NumCtorsEvaluated,
"Number of static ctors evaluated");
86STATISTIC(NumNestRemoved ,
"Number of nest attributes removed");
87STATISTIC(NumAliasesResolved,
"Number of global aliases resolved");
88STATISTIC(NumAliasesRemoved,
"Number of global aliases eliminated");
89STATISTIC(NumCXXDtorsRemoved,
"Number of global C++ destructors removed");
90STATISTIC(NumAtExitRemoved,
"Number of atexit handlers removed");
91STATISTIC(NumInternalFunc,
"Number of internal functions");
92STATISTIC(NumColdCC,
"Number of functions marked coldcc");
93STATISTIC(NumIFuncsResolved,
"Number of statically resolved IFuncs");
94STATISTIC(NumIFuncsDeleted,
"Number of IFuncs removed");
96 "Number of global arrays padded to alignment boundary");
109 cl::desc(
"Statically resolve calls to versioned "
110 "functions from non-versioned callers."),
115 cl::desc(
"Enable stress test of coldcc by adding "
116 "calling conv to all internal functions."),
122 "Maximum block frequency, expressed as a percentage of caller's "
123 "entry frequency, for a call site to be considered cold for enabling "
145 Type *Ty = Types.pop_back_val();
152 if (cast<VectorType>(Ty)->getElementType()->
isPointerTy())
156 Types.push_back(cast<ArrayType>(Ty)->getElementType());
162 if (isa<PointerType>(InnerTy))
return true;
163 if (isa<StructType>(InnerTy) || isa<ArrayType>(InnerTy) ||
164 isa<VectorType>(InnerTy))
165 Types.push_back(InnerTy);
170 if (--Limit == 0)
return true;
171 }
while (!Types.empty());
181 if (isa<Constant>(V))
185 if (isa<LoadInst>(V) || isa<InvokeInst>(V) || isa<Argument>(V) ||
192 if (
I->mayHaveSideEffects())
195 if (!
GEP->hasAllConstantIndices())
197 }
else if (
I->getNumOperands() != 1) {
201 V =
I->getOperand(0);
220 bool Changed =
false;
228 while (!Worklist.
empty()) {
230 if (
StoreInst *SI = dyn_cast<StoreInst>(U)) {
231 Value *V = SI->getValueOperand();
232 if (isa<Constant>(V)) {
234 SI->eraseFromParent();
237 Dead.push_back(std::make_pair(
I, SI));
239 }
else if (
MemSetInst *MSI = dyn_cast<MemSetInst>(U)) {
240 if (isa<Constant>(MSI->getValue())) {
242 MSI->eraseFromParent();
243 }
else if (
Instruction *
I = dyn_cast<Instruction>(MSI->getValue())) {
245 Dead.push_back(std::make_pair(
I, MSI));
248 GlobalVariable *MemSrc = dyn_cast<GlobalVariable>(MTI->getSource());
251 MTI->eraseFromParent();
252 }
else if (
Instruction *
I = dyn_cast<Instruction>(MTI->getSource())) {
254 Dead.push_back(std::make_pair(
I, MTI));
256 }
else if (
ConstantExpr *CE = dyn_cast<ConstantExpr>(U)) {
257 if (isa<GEPOperator>(CE))
262 for (
int i = 0, e = Dead.size(); i != e; ++i) {
264 Dead[i].second->eraseFromParent();
269 Instruction *J = dyn_cast<Instruction>(
I->getOperand(0));
272 I->eraseFromParent();
275 I->eraseFromParent();
292 bool Changed =
false;
297 if (
auto *OpI = dyn_cast<Instruction>(
Op))
299 I->eraseFromParent();
302 while (!WorkList.
empty()) {
304 if (!Visited.
insert(U).second)
307 if (
auto *BO = dyn_cast<BitCastOperator>(U))
309 if (
auto *ASC = dyn_cast<AddrSpaceCastOperator>(U))
311 else if (
auto *
GEP = dyn_cast<GEPOperator>(U))
313 else if (
auto *LI = dyn_cast<LoadInst>(U)) {
316 Type *Ty = LI->getType();
318 LI->replaceAllUsesWith(Res);
323 Value *PtrOp = LI->getPointerOperand();
328 if (
II->getIntrinsicID() == Intrinsic::threadlocal_address)
329 PtrOp =
II->getArgOperand(0);
333 LI->replaceAllUsesWith(
Value);
337 }
else if (
StoreInst *SI = dyn_cast<StoreInst>(U)) {
344 if (
II->getIntrinsicID() == Intrinsic::threadlocal_address)
370 auto AppendUses = [&](
Value *V) {
371 for (
Use &U : V->uses())
372 if (Visited.
insert(&U).second)
376 while (!Worklist.
empty()) {
378 User *V = U->getUser();
380 auto *
GEP = dyn_cast<GEPOperator>(V);
381 if (isa<BitCastOperator>(V) || isa<AddrSpaceCastOperator>(V) ||
382 (
GEP &&
GEP->hasAllConstantIndices())) {
390 if (isa<StoreInst>(V) && U->getOperandNo() == 0)
396 if (
Ptr != GV ||
Offset.getActiveBits() >= 64)
402 const auto &[It, Inserted] =
404 if (Ty != It->second.Ty)
408 It->second.Initializer =
410 if (!It->second.Initializer) {
411 LLVM_DEBUG(
dbgs() <<
"Global SRA: Failed to evaluate initializer of "
412 << *GV <<
" with type " << *Ty <<
" at offset "
413 <<
Offset.getZExtValue());
423 auto *SI = dyn_cast<StoreInst>(V);
427 Constant *StoredConst = dyn_cast<Constant>(SI->getOperand(0));
432 return Initializer != StoredConst;
435 It->second.IsLoaded |= isa<LoadInst>(V);
436 It->second.IsStored |= IsStored(V, It->second.Initializer);
441 if (
auto *
C = dyn_cast<Constant>(V)) {
461 for (
auto *GVE : GVs) {
464 int64_t CurVarOffsetInBytes = 0;
466 uint64_t FragmentEndInBits = FragmentOffsetInBits + FragmentSizeInBits;
473 if (CurVarOffsetInBytes < 0)
477 CurVarOffsetInBits = CHAR_BIT * (
uint64_t)CurVarOffsetInBytes;
480 if (CurVarOffsetInBits >= FragmentEndInBits)
484 uint64_t CurVarEndInBits = CurVarOffsetInBits + CurVarSize;
486 if (CurVarSize != 0 &&
487 CurVarEndInBits <= FragmentOffsetInBits)
492 if (CurVarSize != 0 &&
493 CurVarOffsetInBits >= FragmentOffsetInBits &&
494 CurVarEndInBits <= FragmentEndInBits) {
496 (CurVarOffsetInBits - FragmentOffsetInBits) / 8;
497 if (CurVarOffsetInFragment != 0)
498 Expr = DIExpression::get(Expr->
getContext(), {dwarf::DW_OP_plus_uconst,
499 CurVarOffsetInFragment});
501 Expr = DIExpression::get(Expr->
getContext(), {});
503 DIGlobalVariableExpression::get(GVE->getContext(), Var, Expr);
509 if (FragmentSizeInBits < VarSize) {
510 if (CurVarOffsetInBits > FragmentOffsetInBits)
512 uint64_t CurVarFragmentOffsetInBits =
513 FragmentOffsetInBits - CurVarOffsetInBits;
514 uint64_t CurVarFragmentSizeInBits = FragmentSizeInBits;
515 if (CurVarSize != 0 && CurVarEndInBits < FragmentEndInBits)
516 CurVarFragmentSizeInBits -= (FragmentEndInBits - CurVarEndInBits);
517 if (CurVarOffsetInBits)
518 Expr = DIExpression::get(Expr->
getContext(), {});
520 Expr, CurVarFragmentOffsetInBits, CurVarFragmentSizeInBits))
525 auto *NGVE = DIGlobalVariableExpression::get(GVE->getContext(), Var, Expr);
550 unsigned NumParts =
count_if(Parts, [](
const auto &Pair) {
551 return Pair.second.IsLoaded && Pair.second.IsStored;
558 for (
const auto &Pair : Parts) {
560 {Pair.first, Pair.second.Ty, Pair.second.Initializer});
566 for (
const auto &[OffsetForTy, Ty,
_] : TypesVector) {
571 Offset = OffsetForTy +
DL.getTypeAllocSize(Ty);
578 LLVM_DEBUG(
dbgs() <<
"PERFORMING GLOBAL SRA ON: " << *GV <<
"\n");
581 Align StartAlignment =
587 unsigned NameSuffix = 0;
588 for (
auto &[OffsetForTy, Ty, Initializer] : TypesVector) {
590 *GV->
getParent(), Ty,
false, GlobalVariable::InternalLinkage,
591 Initializer, GV->
getName() +
"." +
Twine(NameSuffix++), GV,
595 NewGlobals.
insert({OffsetForTy, NGV});
606 DL.getTypeAllocSizeInBits(Ty), VarSize);
613 auto AppendUsers = [&](
Value *V) {
614 for (
User *U : V->users())
615 if (Visited.
insert(U).second)
619 while (!Worklist.
empty()) {
621 if (isa<BitCastOperator>(V) || isa<AddrSpaceCastOperator>(V) ||
622 isa<GEPOperator>(V)) {
624 if (isa<Instruction>(V))
633 assert(
Ptr == GV &&
"Load/store must be from/to global");
635 assert(NGV &&
"Must have replacement global for this offset");
642 if (
auto *LI = dyn_cast<LoadInst>(V)) {
643 LI->setOperand(0, NGV);
644 LI->setAlignment(NewAlign);
646 auto *SI = cast<StoreInst>(V);
647 SI->setOperand(1, NGV);
648 SI->setAlignment(NewAlign);
654 "Other users can only be dead constants");
664 return NewGlobals.
begin()->second;
672 for (
const User *U : V->users()) {
679 if (isa<LoadInst>(U)) {
681 }
else if (
const StoreInst *SI = dyn_cast<StoreInst>(U)) {
682 if (SI->getOperand(0) == V) {
685 }
else if (
const CallInst *CI = dyn_cast<CallInst>(U)) {
686 if (CI->getCalledOperand() != V) {
689 }
else if (
const InvokeInst *
II = dyn_cast<InvokeInst>(U)) {
690 if (
II->getCalledOperand() != V) {
698 }
else if (
const PHINode *PN = dyn_cast<PHINode>(U)) {
703 }
else if (isa<ICmpInst>(U) &&
704 !ICmpInst::isSigned(cast<ICmpInst>(U)->getPredicate()) &&
705 isa<LoadInst>(U->getOperand(0)) &&
706 isa<ConstantPointerNull>(U->getOperand(1))) {
707 assert(isa<GlobalValue>(cast<LoadInst>(U->getOperand(0))
708 ->getPointerOperand()
709 ->stripPointerCasts()) &&
710 "Should be GlobalVariable");
727 while (!Worklist.
empty()) {
729 for (
const auto *U :
P->users()) {
730 if (
auto *LI = dyn_cast<LoadInst>(U)) {
734 }
else if (
auto *SI = dyn_cast<StoreInst>(U)) {
736 if (SI->getPointerOperand() !=
P)
738 }
else if (
auto *CE = dyn_cast<ConstantExpr>(U)) {
739 if (CE->stripPointerCasts() != GV)
758 while (!Worklist.
empty()) {
760 for (
auto *U :
P->users()) {
761 if (
auto *CE = dyn_cast<ConstantExpr>(U)) {
766 assert((isa<LoadInst>(U) || isa<StoreInst>(U)) &&
767 "Expect only load or store instructions");
774 bool Changed =
false;
775 for (
auto UI = V->user_begin(), E = V->user_end(); UI != E; ) {
781 if (
LoadInst *LI = dyn_cast<LoadInst>(
I)) {
782 LI->setOperand(0, NewV);
784 }
else if (
StoreInst *SI = dyn_cast<StoreInst>(
I)) {
785 if (SI->getOperand(1) == V) {
786 SI->setOperand(1, NewV);
789 }
else if (isa<CallInst>(
I) || isa<InvokeInst>(
I)) {
796 bool PassedAsArg =
false;
797 for (
unsigned i = 0, e = CB->
arg_size(); i != e; ++i)
805 UI = V->user_begin();
811 if (CI->use_empty()) {
813 CI->eraseFromParent();
818 Idxs.
reserve(GEPI->getNumOperands()-1);
821 if (
Constant *
C = dyn_cast<Constant>(*i))
825 if (Idxs.
size() == GEPI->getNumOperands()-1)
829 if (GEPI->use_empty()) {
831 GEPI->eraseFromParent();
846 bool Changed =
false;
850 bool AllNonStoreUsesGone =
true;
854 if (
LoadInst *LI = dyn_cast<LoadInst>(GlobalUser)) {
857 if (LI->use_empty()) {
858 LI->eraseFromParent();
861 AllNonStoreUsesGone =
false;
863 }
else if (isa<StoreInst>(GlobalUser)) {
865 assert(GlobalUser->getOperand(1) == GV &&
866 "Must be storing *to* the global");
868 AllNonStoreUsesGone =
false;
872 assert((isa<PHINode>(GlobalUser) || isa<SelectInst>(GlobalUser) ||
873 isa<ConstantExpr>(GlobalUser) || isa<CmpInst>(GlobalUser) ||
874 isa<BitCastInst>(GlobalUser) ||
875 isa<GetElementPtrInst>(GlobalUser) ||
876 isa<AddrSpaceCastInst>(GlobalUser)) &&
877 "Only expect load and stores!");
882 LLVM_DEBUG(
dbgs() <<
"OPTIMIZED LOADS FROM STORED ONCE POINTER: " << *GV
889 if (AllNonStoreUsesGone) {
913 I->replaceAllUsesWith(NewC);
917 while (UI != E && *UI ==
I)
920 I->eraseFromParent();
934 LLVM_DEBUG(
errs() <<
"PROMOTING GLOBAL: " << *GV <<
" CALL = " << *CI
953 if (!isa<UndefValue>(InitVal)) {
956 Builder.
CreateMemSet(NewGV, InitVal, AllocSize, std::nullopt);
968 bool InitBoolUsed =
false;
973 for (
auto *U : Guses) {
974 if (
StoreInst *SI = dyn_cast<StoreInst>(U)) {
979 !isa<ConstantPointerNull>(SI->getValueOperand())),
980 InitBool,
false,
Align(1), SI->getOrdering(),
981 SI->getSyncScopeID(), SI->getIterator());
982 SI->eraseFromParent();
1000 InitBoolUsed =
true;
1003 case ICmpInst::ICMP_ULT:
1006 case ICmpInst::ICMP_UGE:
1009 case ICmpInst::ICMP_ULE:
1010 case ICmpInst::ICMP_EQ:
1013 case ICmpInst::ICMP_NE:
1014 case ICmpInst::ICMP_UGT:
1024 if (!InitBoolUsed) {
1026 cast<StoreInst>(InitBool->
user_back())->eraseFromParent();
1053 while (!Worklist.
empty()) {
1055 if (!Visited.
insert(V).second)
1058 for (
const Use &VUse : V->uses()) {
1059 const User *U = VUse.getUser();
1060 if (isa<LoadInst>(U) || isa<CmpInst>(U))
1063 if (
auto *SI = dyn_cast<StoreInst>(U)) {
1064 if (SI->getValueOperand() == V &&
1065 SI->getPointerOperand()->stripPointerCasts() != GV)
1070 if (
auto *GEPI = dyn_cast<GetElementPtrInst>(U)) {
1107 if (AllocSize >= 2048)
1149 if (
Constant *SOVC = dyn_cast<Constant>(StoredOnceVal)) {
1154 if (
auto *CI = dyn_cast<CallInst>(StoredOnceVal)) {
1155 auto *TLI = &GetTLI(*CI->getFunction());
1185 if (!isa<LoadInst>(U) && !isa<StoreInst>(U))
1206 "No reason to shrink to bool!");
1213 bool IsOneZero =
false;
1214 bool EmitOneOrZero =
true;
1215 auto *CI = dyn_cast<ConstantInt>(OtherVal);
1216 if (CI && CI->getValue().getActiveBits() <= 64) {
1217 IsOneZero = InitVal->
isNullValue() && CI->isOne();
1220 if (CIInit && CIInit->getValue().getActiveBits() <= 64) {
1221 uint64_t ValInit = CIInit->getZExtValue();
1222 uint64_t ValOther = CI->getZExtValue();
1223 uint64_t ValMinus = ValOther - ValInit;
1225 for(
auto *GVe : GVs){
1229 unsigned SizeInOctets =
1241 dwarf::DW_OP_deref_size, SizeInOctets,
1242 dwarf::DW_OP_constu, ValMinus,
1243 dwarf::DW_OP_mul, dwarf::DW_OP_constu, ValInit,
1245 bool WithStackValue =
true;
1248 DIGlobalVariableExpression::get(NewGV->
getContext(), DGV, E);
1251 EmitOneOrZero =
false;
1255 if (EmitOneOrZero) {
1264 if (
StoreInst *SI = dyn_cast<StoreInst>(UI)) {
1266 bool StoringOther = SI->getOperand(0) == OtherVal;
1269 if (StoringOther || SI->getOperand(0) == InitVal) {
1276 Instruction *StoredVal = cast<Instruction>(SI->getOperand(0));
1281 if (
LoadInst *LI = dyn_cast<LoadInst>(StoredVal)) {
1282 assert(LI->getOperand(0) == GV &&
"Not a copy!");
1286 false,
Align(1), LI->getOrdering(),
1287 LI->getSyncScopeID(), LI->getIterator());
1289 assert((isa<CastInst>(StoredVal) || isa<SelectInst>(StoredVal)) &&
1290 "This is not a form that we understand!");
1292 assert(isa<LoadInst>(StoreVal) &&
"Not a load of NewGV!");
1296 new StoreInst(StoreVal, NewGV,
false,
Align(1), SI->getOrdering(),
1297 SI->getSyncScopeID(), SI->getIterator());
1341 if (
auto *
F = dyn_cast<Function>(&GV))
1342 Dead = (
F->isDeclaration() &&
F->use_empty()) ||
F->isDefTriviallyDead();
1349 if (
auto *
F = dyn_cast<Function>(&GV)) {
1350 if (DeleteFnCallback)
1351 DeleteFnCallback(*
F);
1375 for (
auto *U : GV->
users()) {
1379 assert(
I->getParent()->getParent() ==
F);
1381 if (
auto *LI = dyn_cast<LoadInst>(
I))
1383 else if (
auto *SI = dyn_cast<StoreInst>(
I))
1393 auto &DT = LookupDomTree(*
const_cast<Function *
>(
F));
1404 const unsigned Threshold = 100;
1405 if (Loads.
size() * Stores.
size() > Threshold)
1408 for (
auto *L : Loads) {
1409 auto *LTy = L->getType();
1416 DL.getTypeStoreSize(LTy).getFixedValue() <=
1417 DL.getTypeStoreSize(STy).getFixedValue();
1435 if (!isa<Constant>(StoredOnceValue))
1440 if (
auto *LI = dyn_cast<LoadInst>(U)) {
1441 if (LI->getFunction() ==
F &&
1442 LI->getType() == StoredOnceValue->
getType() && LI->isSimple())
1447 bool MadeChange =
false;
1448 if (!Loads.
empty()) {
1449 auto &DT = LookupDomTree(*
const_cast<Function *
>(
F));
1450 for (
auto *LI : Loads) {
1451 if (DT.
dominates(StoredOnceStore, LI)) {
1452 LI->replaceAllUsesWith(
const_cast<Value *
>(StoredOnceValue));
1453 LI->eraseFromParent();
1477 if (!GS.HasMultipleAccessingFunctions &&
1478 GS.AccessingFunction &&
1482 GS.AccessingFunction->doesNotRecurse() &&
1489 GS.AccessingFunction->getEntryBlock().begin().getNonConst();
1493 nullptr, GV->
getName(), FirstI);
1503 bool Changed =
false;
1534 if (GS.Ordering == AtomicOrdering::NotAtomic) {
1545 LLVM_DEBUG(
dbgs() <<
" *** Marking constant allowed us to simplify "
1546 <<
"all users and delete global!\n");
1560 Value *StoredOnceValue = GS.getStoredOnceValue();
1563 const_cast<Function &
>(*GS.StoredOnceStore->getFunction());
1564 bool CanHaveNonUndefGlobalInitializer =
1565 GetTTI(StoreFn).canHaveNonUndefGlobalInitializerInAddressSpace(
1574 auto *SOVConstant = dyn_cast<Constant>(StoredOnceValue);
1576 DL.getTypeAllocSize(SOVConstant->getType()) ==
1578 CanHaveNonUndefGlobalInitializer) {
1589 NGV->copyAttributesFrom(GV);
1599 LLVM_DEBUG(
dbgs() <<
" *** Substituting initializer allowed us to "
1600 <<
"simplify all users and delete global!\n");
1615 if (GS.NumStores == 1)
1621 if (SOVConstant && GS.Ordering == AtomicOrdering::NotAtomic &&
1623 CanHaveNonUndefGlobalInitializer)) {
1649 bool Changed =
false;
1651 auto NewUnnamedAddr = GV.
hasLocalLinkage() ? GlobalValue::UnnamedAddr::Global
1652 : GlobalValue::UnnamedAddr::Local;
1664 auto *GVar = dyn_cast<GlobalVariable>(&GV);
1668 if (GVar->isConstant() || !GVar->hasInitializer())
1678 for (
User *U :
F->users()) {
1679 if (isa<BlockAddress>(U))
1688 if (Attrs.hasAttrSomewhere(
A, &AttrIndex))
1689 return Attrs.removeAttributeAtIndex(
C, AttrIndex,
A);
1694 F->setAttributes(
StripAttr(
F->getContext(),
F->getAttributes(),
A));
1695 for (
User *U :
F->users()) {
1696 if (isa<BlockAddress>(U))
1721 for (
User *U :
F->users()) {
1722 if (isa<BlockAddress>(U))
1724 CallInst* CI = dyn_cast<CallInst>(U);
1733 if (BB.getTerminatingMustTailCall())
1736 return !
F->hasAddressTaken();
1745 return Res.first->second;
1753 auto CallSiteFreq = CallerBFI.
getBlockFreq(CallSiteBB);
1754 auto CallerEntryFreq =
1756 return CallSiteFreq < CallerEntryFreq * ColdProb;
1766 const std::vector<Function *> &AllCallsCold) {
1771 for (
User *U :
F.users()) {
1772 if (isa<BlockAddress>(U))
1787 for (
User *U :
F->users()) {
1788 if (isa<BlockAddress>(U))
1803 if (
CallInst *CI = dyn_cast<CallInst>(&
I)) {
1805 if (CI->isInlineAsm())
1807 Function *CalledFn = CI->getCalledFunction();
1831 for (
User *U :
F->users()) {
1832 CallBase *CB = dyn_cast<CallBase>(U);
1834 assert(isa<BlockAddress>(U) &&
1835 "Expected either CallBase or BlockAddress");
1845 for (
User *U :
F->users())
1846 if (isa<InvokeInst>(U))
1854 auto *M =
F->getParent();
1860 for (
User *U : PreallocatedCalls) {
1861 CallBase *CB = dyn_cast<CallBase>(U);
1867 "Shouldn't call RemotePreallocated() on a musttail preallocated call");
1871 CallBase *PreallocatedSetup =
nullptr;
1872 for (
auto *It = OpBundles.
begin(); It != OpBundles.
end(); ++It) {
1873 if (It->getTag() ==
"preallocated") {
1874 PreallocatedSetup = cast<CallBase>(*It->input_begin());
1875 OpBundles.
erase(It);
1879 assert(PreallocatedSetup &&
"Did not find preallocated bundle");
1881 cast<ConstantInt>(PreallocatedSetup->
getArgOperand(0))->getZExtValue();
1883 assert((isa<CallInst>(CB) || isa<InvokeInst>(CB)) &&
1884 "Unknown indirect call type");
1904 for (
auto *
User : PreallocatedArgs) {
1905 auto *UseCall = cast<CallBase>(
User);
1906 assert(UseCall->getCalledFunction()->getIntrinsicID() ==
1907 Intrinsic::call_preallocated_arg &&
1908 "preallocated token use was not a llvm.call.preallocated.arg");
1910 cast<ConstantInt>(UseCall->getArgOperand(1))->getZExtValue();
1911 Value *AllocaReplacement = ArgAllocas[AllocArgIndex];
1912 if (!AllocaReplacement) {
1913 auto AddressSpace = UseCall->getType()->getPointerAddressSpace();
1915 UseCall->getFnAttr(Attribute::Preallocated).getValueAsType();
1920 ArgAllocas[AllocArgIndex] = Alloca;
1921 AllocaReplacement = Alloca;
1925 UseCall->eraseFromParent();
1928 cast<Instruction>(PreallocatedSetup)->eraseFromParent();
1942 bool Changed =
false;
1945 std::vector<Function *> AllCallsCold;
1948 AllCallsCold.push_back(&
F);
1954 if (
F.hasFnAttribute(Attribute::Naked))
1958 if (!
F.hasName() && !
F.isDeclaration() && !
F.hasLocalLinkage())
1961 if (
deleteIfDead(
F, NotDiscardableComdats, DeleteFnCallback)) {
1975 if (!
F.isDeclaration()) {
1978 ChangedCFGCallback(
F);
1984 if (!
F.hasLocalLinkage())
1992 if (
F.getAttributes().hasAttrSomewhere(Attribute::InAlloca) &&
2000 if (
F.getAttributes().hasAttrSomewhere(Attribute::Preallocated)) {
2019 ChangeableCCCache.
erase(&
F);
2037 if (
F.getAttributes().hasAttrSomewhere(Attribute::Nest) &&
2038 !
F.hasAddressTaken()) {
2054 if (!
F || !
F->isIntrinsic() ||
F->getIntrinsicID() != Intrinsic::memcpy)
2061 auto *IsVolatile = dyn_cast<ConstantInt>(CI->
getArgOperand(3));
2064 if (!Alloca || !IsVolatile || IsVolatile->isOne())
2067 if (!Alloca->isStaticAlloca())
2070 if (!Alloca->getAllocatedType()->isArrayTy())
2077 unsigned NumBytesToPad,
2078 unsigned NumBytesToCopy) {
2090 std::vector<uint8_t> StrData(Data.begin(), Data.end());
2091 for (
unsigned int p = 0; p < NumBytesToPad; p++)
2092 StrData.push_back(
'\0');
2093 auto Arr =
ArrayRef(StrData.data(), NumBytesToCopy + NumBytesToPad);
2098 SourceReplace, SourceReplace->
getName());
2107 const unsigned NumBytesToCopy,
2113 unsigned int TotalBytes = NumBytesToCopy + NumBytesToPad;
2114 unsigned NumElementsToCopy =
divideCeil(TotalBytes, ElementByteWidth);
2118 Alloca->getAllocatedType()->getArrayElementType(), NumElementsToCopy));
2121 Alloca->replaceAllUsesWith(NewAlloca);
2122 Alloca->eraseFromParent();
2127 const unsigned NumBytesToPad,
2128 const unsigned NumBytesToCopy,
2139 auto *CI = dyn_cast<CallInst>(
User);
2143 if (CI->getArgOperand(1) != SourceVar)
2146 widenDestArray(CI, NumBytesToPad, NumBytesToCopy, SourceDataArray);
2148 CI->setArgOperand(2, ConstantInt::get(BytesToCopyOp->
getType(),
2149 NumBytesToCopy + NumBytesToPad));
2153 NumGlobalArraysPadded++;
2172 auto *BytesToCopyOp = dyn_cast<ConstantInt>(CI->
getArgOperand(2));
2178 if (!SourceDataArray)
2181 unsigned NumBytesToCopy = BytesToCopyOp->getZExtValue();
2184 uint64_t DZSize = Alloca->getAllocatedType()->getArrayNumElements();
2190 unsigned NumElementsToCopy =
divideCeil(NumBytesToCopy, ElementByteWidth);
2195 if (NumElementsToCopy != DZSize || DZSize != SZSize)
2198 unsigned NumBytesToPad = GetTTI(*F).getNumBytesToPadGlobalArray(
2199 NumBytesToCopy, SourceDataArray->
getType());
2200 if (NumBytesToPad) {
2202 BytesToCopyOp, SourceDataArray);
2214 bool Changed =
false;
2223 auto &
DL = M.getDataLayout();
2241 Changed |=
processGlobal(GV, GetTTI, GetTLI, LookupDomTree);
2251 if (
F->isDeclaration())
2260 ++NumCtorsEvaluated;
2265 <<
F->getName() <<
"' to " << NewInitializers.size()
2267 for (
const auto &Pair : NewInitializers)
2268 Pair.first->setInitializer(Pair.second);
2285 V.eraseFromParent();
2290 const Type *UsedArrayType = V.getValueType();
2291 const auto *VAT = cast<ArrayType>(UsedArrayType);
2292 const auto *VEPT = cast<PointerType>(VAT->getArrayElementType());
2296 PointerType::get(V.getContext(), VEPT->getAddressSpace());
2308 Module *M = V.getParent();
2309 V.removeFromParent();
2314 NV->setSection(
"llvm.metadata");
2334 CompilerUsed = {Vec.
begin(), Vec.
end()};
2340 iterator usedBegin() {
return Used.begin(); }
2341 iterator usedEnd() {
return Used.end(); }
2343 used_iterator_range used() {
2344 return used_iterator_range(usedBegin(), usedEnd());
2347 iterator compilerUsedBegin() {
return CompilerUsed.
begin(); }
2348 iterator compilerUsedEnd() {
return CompilerUsed.
end(); }
2350 used_iterator_range compilerUsed() {
2351 return used_iterator_range(compilerUsedBegin(), compilerUsedEnd());
2357 return CompilerUsed.
count(GV);
2365 return CompilerUsed.
insert(GV).second;
2368 void syncVariablesAndSets() {
2382 assert((!U.usedCount(&GA) || !U.compilerUsedCount(&GA)) &&
2383 "We should have removed the duplicated "
2384 "element from llvm.compiler.used");
2391 return !U.usedCount(&GA) && !U.compilerUsedCount(&GA);
2398 return U.usedCount(&GV) || U.compilerUsedCount(&GV);
2402 bool &RenameTarget) {
2406 RenameTarget =
false;
2427 RenameTarget =
true;
2434 bool Changed =
false;
2438 Used.compilerUsedErase(GV);
2449 if (!J.hasName() && !J.isDeclaration() && !J.hasLocalLinkage())
2458 if (!IsModuleLocal(J))
2461 Constant *Aliasee = J.getAliasee();
2472 Target->removeDeadConstantUsers();
2479 J.replaceAllUsesWith(Aliasee);
2480 ++NumAliasesResolved;
2486 Target->setLinkage(J.getLinkage());
2487 Target->setDSOLocal(J.isDSOLocal());
2488 Target->setVisibility(J.getVisibility());
2489 Target->setDLLStorageClass(J.getDLLStorageClass());
2491 if (Used.usedErase(&J))
2494 if (Used.compilerUsedErase(&J))
2495 Used.compilerUsedInsert(
Target);
2501 ++NumAliasesRemoved;
2505 Used.syncVariablesAndSets();
2515 auto FuncIter = M.begin();
2516 if (FuncIter == M.end())
2518 auto *TLI = &GetTLI(*FuncIter);
2520 if (!TLI->has(Func))
2523 Function *Fn = M.getFunction(TLI->getName(Func));
2532 if (!TLI->getLibFunc(*Fn,
F) ||
F != Func)
2549 if (
I.isDebugOrPseudoInst())
2551 if (isa<ReturnInst>(
I))
2573 bool Changed =
false;
2579 CallInst *CI = dyn_cast<CallInst>(U);
2593 ++NumCXXDtorsRemoved;
2604 if (IF.isInterposable())
2628 return dyn_cast<Function>(Ret->getReturnValue());
2634 bool Changed =
false;
2637 if (!IF.use_empty() &&
2638 (!Callee->isDeclaration() ||
2639 none_of(IF.users(), [](
User *U) { return isa<GlobalAlias>(U); }))) {
2640 IF.replaceAllUsesWith(Callee);
2641 NumIFuncsResolved++;
2650 bool Changed =
false;
2664 if (
auto *
F = dyn_cast<Function>(V)) {
2668 }
else if (
auto *Sel = dyn_cast<SelectInst>(V)) {
2673 }
else if (
auto *Phi = dyn_cast<PHINode>(V)) {
2674 for (
unsigned I = 0, E = Phi->getNumIncomingValues();
I != E; ++
I)
2703 bool Changed =
false;
2709 if (IF.isInterposable())
2724 if (
auto *Ret = dyn_cast_or_null<ReturnInst>(BB.
getTerminator()))
2731 assert(!Callees.
empty() &&
"Expecting successful collection of versions");
2738 auto [It, Inserted] = FeatureMask.
try_emplace(Callee);
2745 return FeatureMask[
LHS] > FeatureMask[
RHS];
2751 for (
User *U : IF.users()) {
2752 if (
auto *CB = dyn_cast<CallBase>(U)) {
2753 if (CB->getCalledOperand() == &IF) {
2754 Function *Caller = CB->getFunction();
2755 auto [FeatIt, FeatInserted] = FeatureMask.
try_emplace(Caller);
2758 auto [CallIt, CallInserted] = CallSites.
try_emplace(Caller);
2760 Callers.push_back(Caller);
2761 CallIt->second.push_back(CB);
2768 return FeatureMask[
LHS] > FeatureMask[
RHS];
2777 assert(
I < Callees.
size() &&
"Found callers of equal priority");
2780 uint64_t CallerBits = FeatureMask[Caller];
2781 uint64_t CalleeBits = FeatureMask[Callee];
2791 if (CallerBits == CalleeBits)
2793 else if (!implies(CallerBits, CalleeBits)) {
2796 while (implies(CalleeBits, CallerBits)) {
2797 if (++
I == Callees.
size())
2799 CalleeBits = FeatureMask[Callees[
I]];
2809 auto &Calls = CallSites[Caller];
2811 LLVM_DEBUG(
dbgs() <<
"Redirecting call " << Caller->getName() <<
" -> "
2812 << Callee->getName() <<
"\n");
2813 CS->setCalledOperand(Callee);
2817 if (IF.use_empty() ||
2818 all_of(IF.users(), [](
User *U) { return isa<GlobalAlias>(U); }))
2819 NumIFuncsResolved++;
2833 bool Changed =
false;
2834 bool LocalChange =
true;
2835 std::optional<uint32_t> FirstNotFullyEvaluatedPriority;
2837 while (LocalChange) {
2838 LocalChange =
false;
2840 NotDiscardableComdats.
clear();
2844 NotDiscardableComdats.
insert(
C);
2846 if (
const Comdat *
C =
F.getComdat())
2847 if (!
F.isDefTriviallyDead())
2848 NotDiscardableComdats.
insert(
C);
2850 if (
const Comdat *
C = GA.getComdat())
2851 if (!GA.isDiscardableIfUnused() || !GA.use_empty())
2852 NotDiscardableComdats.
insert(
C);
2856 NotDiscardableComdats, ChangedCFGCallback,
2862 if (FirstNotFullyEvaluatedPriority &&
2863 *FirstNotFullyEvaluatedPriority != Priority)
2867 FirstNotFullyEvaluatedPriority = Priority;
2873 NotDiscardableComdats);
2896 Changed |= LocalChange;
2906 auto &
DL = M.getDataLayout();
2928 ChangedCFGCallback, DeleteFnCallback))
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
Atomic ordering constants.
This file contains the simple types necessary to represent the attributes associated with functions a...
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
This file contains the declarations for the subclasses of Constant, which represent the different fla...
This file defines the DenseMap class.
This file contains constants used for implementing Dwarf debug support.
static bool IsSafeComputationToRemove(Value *V, function_ref< TargetLibraryInfo &(Function &)> GetTLI)
Given a value that is stored to a global but never read, determine whether it's safe to remove the st...
static Function * FindAtExitLibFunc(Module &M, function_ref< TargetLibraryInfo &(Function &)> GetTLI, LibFunc Func)
static bool optimizeOnceStoredGlobal(GlobalVariable *GV, Value *StoredOnceVal, const DataLayout &DL, function_ref< TargetLibraryInfo &(Function &)> GetTLI)
static Function * hasSideeffectFreeStaticResolution(GlobalIFunc &IF)
static bool tryToOptimizeStoreOfAllocationToGlobal(GlobalVariable *GV, CallInst *CI, const DataLayout &DL, TargetLibraryInfo *TLI)
If we have a global that is only initialized with a fixed size allocation try to transform the progra...
static void ConstantPropUsersOf(Value *V, const DataLayout &DL, TargetLibraryInfo *TLI)
Walk the use list of V, constant folding all of the instructions that are foldable.
static bool hasOnlyColdCalls(Function &F, function_ref< BlockFrequencyInfo &(Function &)> GetBFI, ChangeableCCCacheTy &ChangeableCCCache)
static bool allUsesOfLoadedValueWillTrapIfNull(const GlobalVariable *GV)
Return true if all uses of any loads from GV will trap if the loaded value is null.
static bool hasChangeableCCImpl(Function *F)
Return true if this is a calling convention that we'd like to change.
static bool tryWidenGlobalArrayAndDests(Function *F, GlobalVariable *SourceVar, const unsigned NumBytesToPad, const unsigned NumBytesToCopy, ConstantInt *BytesToCopyOp, ConstantDataArray *SourceDataArray)
static GlobalVariable * widenGlobalVariable(GlobalVariable *OldVar, Function *F, unsigned NumBytesToPad, unsigned NumBytesToCopy)
static bool AllUsesOfValueWillTrapIfNull(const Value *V, SmallPtrSetImpl< const PHINode * > &PHIs)
Return true if all users of the specified value will trap if the value is dynamically null.
static GlobalVariable * OptimizeGlobalAddressOfAllocation(GlobalVariable *GV, CallInst *CI, uint64_t AllocSize, Constant *InitVal, const DataLayout &DL, TargetLibraryInfo *TLI)
This function takes the specified global variable, and transforms the program as if it always contain...
Returns whether the given function is an empty C destructor or atexit handler and can therefore be eliminated Note that we assume that other optimization passes have already simplified the code so we simply check for static ret bool IsEmptyAtExitFunction(const Function &Fn)
static bool collectSRATypes(DenseMap< uint64_t, GlobalPart > &Parts, GlobalVariable *GV, const DataLayout &DL)
Look at all uses of the global and determine which (offset, type) pairs it can be split into.
static bool valueIsOnlyUsedLocallyOrStoredToOneGlobal(const CallInst *CI, const GlobalVariable *GV)
Scan the use-list of GV checking to make sure that there are no complex uses of GV.
static bool OptimizeFunctions(Module &M, function_ref< TargetLibraryInfo &(Function &)> GetTLI, function_ref< TargetTransformInfo &(Function &)> GetTTI, function_ref< BlockFrequencyInfo &(Function &)> GetBFI, function_ref< DominatorTree &(Function &)> LookupDomTree, SmallPtrSetImpl< const Comdat * > &NotDiscardableComdats, function_ref< void(Function &F)> ChangedCFGCallback, function_ref< void(Function &F)> DeleteFnCallback)
static bool DeleteDeadIFuncs(Module &M, SmallPtrSetImpl< const Comdat * > &NotDiscardableComdats)
static void RemoveAttribute(Function *F, Attribute::AttrKind A)
static bool tryWidenGlobalArraysUsedByMemcpy(GlobalVariable *GV, function_ref< TargetTransformInfo &(Function &)> GetTTI)
static bool hasChangeableCC(Function *F, ChangeableCCCacheTy &ChangeableCCCache)
static bool deleteIfDead(GlobalValue &GV, SmallPtrSetImpl< const Comdat * > &NotDiscardableComdats, function_ref< void(Function &)> DeleteFnCallback=nullptr)
static void RemovePreallocated(Function *F)
static bool processGlobal(GlobalValue &GV, function_ref< TargetTransformInfo &(Function &)> GetTTI, function_ref< TargetLibraryInfo &(Function &)> GetTLI, function_ref< DominatorTree &(Function &)> LookupDomTree)
Analyze the specified global variable and optimize it if possible.
static bool isColdCallSite(CallBase &CB, BlockFrequencyInfo &CallerBFI)
Return true if the block containing the call site has a BlockFrequency of less than ColdCCRelFreq% of...
static void transferSRADebugInfo(GlobalVariable *GV, GlobalVariable *NGV, uint64_t FragmentOffsetInBits, uint64_t FragmentSizeInBits, uint64_t VarSize)
Copy over the debug info for a variable to its SRA replacements.
static cl::opt< bool > EnableColdCCStressTest("enable-coldcc-stress-test", cl::desc("Enable stress test of coldcc by adding " "calling conv to all internal functions."), cl::init(false), cl::Hidden)
static bool OptimizeGlobalAliases(Module &M, SmallPtrSetImpl< const Comdat * > &NotDiscardableComdats)
static bool TryToShrinkGlobalToBoolean(GlobalVariable *GV, Constant *OtherVal)
At this point, we have learned that the only two values ever stored into GV are its initializer and O...
static cl::opt< bool > OptimizeNonFMVCallers("optimize-non-fmv-callers", cl::desc("Statically resolve calls to versioned " "functions from non-versioned callers."), cl::init(false), cl::Hidden)
static void ChangeCalleesToFastCall(Function *F)
Walk all of the direct calls of the specified function, changing them to FastCC.
static bool hasMustTailCallers(Function *F)
static bool callInstIsMemcpy(CallInst *CI)
static bool OptimizeNonTrivialIFuncs(Module &M, function_ref< TargetTransformInfo &(Function &)> GetTTI)
static bool OptimizeGlobalVars(Module &M, function_ref< TargetTransformInfo &(Function &)> GetTTI, function_ref< TargetLibraryInfo &(Function &)> GetTLI, function_ref< DominatorTree &(Function &)> LookupDomTree, SmallPtrSetImpl< const Comdat * > &NotDiscardableComdats)
static void allUsesOfLoadAndStores(GlobalVariable *GV, SmallVector< Value *, 4 > &Uses)
Get all the loads/store uses for global variable GV.
static void widenDestArray(CallInst *CI, const unsigned NumBytesToPad, const unsigned NumBytesToCopy, ConstantDataArray *SourceDataArray)
static bool OptimizeEmptyGlobalAtExitDtors(Function *CXAAtExitFn, bool isCXX)
static bool mayHaveOtherReferences(GlobalValue &GV, const LLVMUsed &U)
static void changeCallSitesToColdCC(Function *F)
static AttributeList StripAttr(LLVMContext &C, AttributeList Attrs, Attribute::AttrKind A)
static bool hasInvokeCallers(Function *F)
static void setUsedInitializer(GlobalVariable &V, const SmallPtrSetImpl< GlobalValue * > &Init)
static bool OptimizeAwayTrappingUsesOfLoads(GlobalVariable *GV, Constant *LV, const DataLayout &DL, function_ref< TargetLibraryInfo &(Function &)> GetTLI)
The specified global has only one non-null value stored into it.
static bool isValidCandidateForColdCC(Function &F, function_ref< BlockFrequencyInfo &(Function &)> GetBFI, const std::vector< Function * > &AllCallsCold)
static cl::opt< int > ColdCCRelFreq("coldcc-rel-freq", cl::Hidden, cl::init(2), cl::desc("Maximum block frequency, expressed as a percentage of caller's " "entry frequency, for a call site to be considered cold for enabling " "coldcc"))
static bool optimizeGlobalsInModule(Module &M, const DataLayout &DL, function_ref< TargetLibraryInfo &(Function &)> GetTLI, function_ref< TargetTransformInfo &(Function &)> GetTTI, function_ref< BlockFrequencyInfo &(Function &)> GetBFI, function_ref< DominatorTree &(Function &)> LookupDomTree, function_ref< void(Function &F)> ChangedCFGCallback, function_ref< void(Function &F)> DeleteFnCallback)
static bool EvaluateStaticConstructor(Function *F, const DataLayout &DL, TargetLibraryInfo *TLI)
Evaluate static constructors in the function, if we can.
static bool CleanupConstantGlobalUsers(GlobalVariable *GV, const DataLayout &DL)
We just marked GV constant.
Find IFuncs that have resolvers that always point at the same statically known and replace their callers with a direct static call bool OptimizeStaticIFuncs(Module &M)
static bool isLeakCheckerRoot(GlobalVariable *GV)
Is this global variable possibly used by a leak checker as a root? If so, we might not really want to...
static bool forwardStoredOnceStore(GlobalVariable *GV, const StoreInst *StoredOnceStore, function_ref< DominatorTree &(Function &)> LookupDomTree)
static int compareNames(Constant *const *A, Constant *const *B)
static bool collectVersions(TargetTransformInfo &TTI, Value *V, SmallVectorImpl< Function * > &Versions)
static bool CleanupPointerRootUsers(GlobalVariable *GV, function_ref< TargetLibraryInfo &(Function &)> GetTLI)
This GV is a pointer root.
static bool isPointerValueDeadOnEntryToFunction(const Function *F, GlobalValue *GV, function_ref< DominatorTree &(Function &)> LookupDomTree)
static bool processInternalGlobal(GlobalVariable *GV, const GlobalStatus &GS, function_ref< TargetTransformInfo &(Function &)> GetTTI, function_ref< TargetLibraryInfo &(Function &)> GetTLI, function_ref< DominatorTree &(Function &)> LookupDomTree)
Analyze the specified global variable and optimize it if possible.
static bool hasUsesToReplace(GlobalAlias &GA, const LLVMUsed &U, bool &RenameTarget)
static bool OptimizeAwayTrappingUsesOfValue(Value *V, Constant *NewV)
static GlobalVariable * SRAGlobal(GlobalVariable *GV, const DataLayout &DL)
Perform scalar replacement of aggregates on the specified global variable.
static bool destArrayCanBeWidened(CallInst *CI)
static bool hasUseOtherThanLLVMUsed(GlobalAlias &GA, const LLVMUsed &U)
Module.h This file contains the declarations for the Module class.
This defines the Use class.
uint64_t IntrinsicInst * II
FunctionAnalysisManager FAM
Remove Loads Into Fake Uses
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
static SymbolRef::Type getType(const Symbol *Sym)
Class for arbitrary precision integers.
This class represents a conversion between pointers from one address space to another.
an instruction to allocate memory on the stack
void setAlignment(Align Align)
A container for analyses that lazily runs them and caches their results.
void clear(IRUnitT &IR, llvm::StringRef Name)
Clear any cached analysis results for a single unit of IR.
void invalidate(IRUnitT &IR, const PreservedAnalyses &PA)
Invalidate cached analyses for an IR unit.
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)
Get the result of an analysis pass for a given IR unit.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
uint64_t getNumElements() const
AttrKind
This enumeration lists the attributes that can be associated with parameters, function results,...
LLVM Basic Block Representation.
InstListType::iterator iterator
Instruction iterators...
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
static BinaryOperator * CreateNot(Value *Op, const Twine &Name="", InsertPosition InsertBefore=nullptr)
Analysis pass which computes BlockFrequencyInfo.
BlockFrequencyInfo pass uses BlockFrequencyInfoImpl implementation to estimate IR basic block frequen...
BlockFrequency getBlockFreq(const BasicBlock *BB) const
getblockFreq - Return block frequency.
Represents analyses that only rely on functions' control flow.
Base class for all callable instructions (InvokeInst and CallInst) Holds everything related to callin...
void getOperandBundlesAsDefs(SmallVectorImpl< OperandBundleDef > &Defs) const
Return the list of operand bundles attached to this instruction as a vector of OperandBundleDefs.
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation or the function signa...
bool isMustTailCall() const
Tests if this call site must be tail call optimized.
Value * getCalledOperand() const
void setAttributes(AttributeList A)
Set the attributes for this call.
Value * getArgOperand(unsigned i) const
void setArgOperand(unsigned i, Value *v)
static CallBase * Create(CallBase *CB, ArrayRef< OperandBundleDef > Bundles, InsertPosition InsertPt=nullptr)
Create a clone of CB with a different set of operand bundles and insert it before InsertPt.
void setCalledOperand(Value *V)
unsigned arg_size() const
AttributeList getAttributes() const
Return the attributes for this call.
Function * getCaller()
Helper to get the caller (the parent function).
This class represents a function call, abstracting a target machine's calling convention.
bool isMustTailCall() const
Predicate getPredicate() const
Return the predicate for this instruction.
static Constant * get(ArrayType *T, ArrayRef< Constant * > V)
An array constant whose element type is a simple 1/2/4/8-byte integer or float/double,...
static Constant * get(LLVMContext &Context, ArrayRef< ElementTy > Elts)
get() constructor - Return a constant with array type with an element count and element type matching...
ArrayType * getType() const
Specialize the getType() method to always return an ArrayType, which reduces the amount of casting ne...
uint64_t getElementByteSize() const
Return the size (in bytes) of each element in the array/vector.
StringRef getRawDataValues() const
Return the raw, underlying, bytes of this data.
A constant value that is initialized with an expression using other constant values.
static Constant * getPointerBitCastOrAddrSpaceCast(Constant *C, Type *Ty)
Create a BitCast or AddrSpaceCast for a pointer type depending on the address space.
static Constant * getAddrSpaceCast(Constant *C, Type *Ty, bool OnlyIfReduced=false)
static Constant * getGetElementPtr(Type *Ty, Constant *C, ArrayRef< Constant * > IdxList, GEPNoWrapFlags NW=GEPNoWrapFlags::none(), std::optional< ConstantRange > InRange=std::nullopt, Type *OnlyIfReducedTy=nullptr)
Getelementptr form.
This is the shared class of boolean and integer constants.
static ConstantInt * getTrue(LLVMContext &Context)
static ConstantInt * getFalse(LLVMContext &Context)
static ConstantInt * getBool(LLVMContext &Context, bool V)
This is an important base class in LLVM.
const Constant * stripPointerCasts() const
void removeDeadConstantUsers() const
If there are any dead constant users dangling off of this constant, remove them.
static Constant * getNullValue(Type *Ty)
Constructor to create a '0' constant of arbitrary type.
bool isNullValue() const
Return true if this is the value that would be returned by getNullValue.
bool extractIfOffset(int64_t &Offset) const
If this is a constant offset, extract it.
static std::optional< DIExpression * > createFragmentExpression(const DIExpression *Expr, unsigned OffsetInBits, unsigned SizeInBits)
Create a DIExpression to describe one part of an aggregate variable that is fragmented across multipl...
static DIExpression * prependOpcodes(const DIExpression *Expr, SmallVectorImpl< uint64_t > &Ops, bool StackValue=false, bool EntryValue=false)
Prepend DIExpr with the given opcodes and optionally turn it into a stack value.
A pair of DIGlobalVariable and DIExpression.
uint64_t getSizeInBits() const
Base class for variables.
This class represents an Operation in the Expression.
A parsed version of the target data layout string in and methods for querying it.
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&...Args)
bool erase(const KeyT &Val)
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Analysis pass which computes a DominatorTree.
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.
bool dominates(const BasicBlock *BB, const Use &U) const
Return true if the (end of the) basic block BB dominates the use U.
This class evaluates LLVM IR, producing the Constant representing each SSA instruction.
DenseMap< GlobalVariable *, Constant * > getMutatedInitializers() const
bool EvaluateFunction(Function *F, Constant *&RetVal, const SmallVectorImpl< Constant * > &ActualArgs)
Evaluate a call to function F, returning true if successful, false if we can't evaluate it.
const SmallPtrSetImpl< GlobalVariable * > & getInvariants() const
const BasicBlock & getEntryBlock() const
Intrinsic::ID getIntrinsicID() const LLVM_READONLY
getIntrinsicID - This method returns the ID number of the specified function, or Intrinsic::not_intri...
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
an instruction for type-safe pointer arithmetic to access elements of arrays and structs
const Constant * getAliasee() const
MaybeAlign getAlign() const
Returns the alignment of the given variable or function.
void setAlignment(Align Align)
Sets the alignment attribute of the GlobalObject.
PreservedAnalyses run(Module &M, ModuleAnalysisManager &AM)
bool isImplicitDSOLocal() const
bool isDeclaration() const
Return true if the primary definition of this global value is outside of the current translation unit...
LinkageTypes getLinkage() const
void setUnnamedAddr(UnnamedAddr Val)
bool hasLocalLinkage() const
bool hasPrivateLinkage() const
const Comdat * getComdat() const
ThreadLocalMode getThreadLocalMode() const
void setLinkage(LinkageTypes LT)
unsigned getAddressSpace() const
Module * getParent()
Get the module that this global value is contained inside of...
void eraseFromParent()
This method unlinks 'this' from the containing module and deletes it.
PointerType * getType() const
Global values are always pointers.
const DataLayout & getDataLayout() const
Get the data layout of the module this global belongs to.
static bool isInterposableLinkage(LinkageTypes Linkage)
Whether the definition of this global may be replaced by something non-equivalent at link time.
bool hasGlobalUnnamedAddr() const
UnnamedAddr getUnnamedAddr() const
static bool isWeakForLinker(LinkageTypes Linkage)
Whether the definition of this global may be replaced at link time.
static bool isDiscardableIfUnused(LinkageTypes Linkage)
Whether the definition of this global may be discarded if it is not used in its compilation unit.
@ InternalLinkage
Rename collisions when linking (static functions).
@ AppendingLinkage
Special purpose, only applies to global arrays.
Type * getValueType() const
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
void setInitializer(Constant *InitVal)
setInitializer - Sets the initializer for this global variable, removing any existing initializer if ...
bool isExternallyInitialized() const
bool hasInitializer() const
Definitions have initializers, declarations don't.
void setConstant(bool Val)
void copyAttributesFrom(const GlobalVariable *Src)
copyAttributesFrom - copy all additional attributes (those not needed to create a GlobalVariable) fro...
void getDebugInfo(SmallVectorImpl< DIGlobalVariableExpression * > &GVs) const
Fill the vector with all debug info attachements.
bool isConstant() const
If the value is a global constant, its value is immutable throughout the runtime execution of the pro...
void eraseFromParent()
eraseFromParent - This method unlinks 'this' from the containing module and deletes it.
void addDebugInfo(DIGlobalVariableExpression *GV)
Attach a DIGlobalVariableExpression.
This instruction compares its operands according to the predicate given to the constructor.
AllocaInst * CreateAlloca(Type *Ty, unsigned AddrSpace, Value *ArraySize=nullptr, const Twine &Name="")
CallInst * CreateStackSave(const Twine &Name="")
Create a call to llvm.stacksave.
CallInst * CreateMemSet(Value *Ptr, Value *Val, uint64_t Size, MaybeAlign Align, bool isVolatile=false, MDNode *TBAATag=nullptr, MDNode *ScopeTag=nullptr, MDNode *NoAliasTag=nullptr)
Create and insert a memset to the specified pointer and the specified value.
CallInst * CreateStackRestore(Value *Ptr, const Twine &Name="")
Create a call to llvm.stackrestore.
void SetInsertPoint(BasicBlock *TheBB)
This specifies that created instructions should be appended to the end of the specified block.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
An analysis over an "outer" IR unit that provides access to an analysis manager over an "inner" IR un...
const DebugLoc & getDebugLoc() const
Return the debug location for this node as a DebugLoc.
InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
const Function * getFunction() const
Return the function this instruction belongs to.
const Instruction * getNextNonDebugInstruction(bool SkipPseudoOp=false) const
Return a pointer to the next non-debug instruction in the same basic block as 'this',...
void setDebugLoc(DebugLoc Loc)
Set the debug location information for this instruction.
A wrapper class for inspecting calls to intrinsic functions.
This is an important class for using LLVM in a threaded context.
An instruction for reading from memory.
AtomicOrdering getOrdering() const
Returns the ordering constraint of this load instruction.
SyncScope::ID getSyncScopeID() const
Returns the synchronization scope ID of this load instruction.
LLVMContext & getContext() const
This is the common base class for memset/memcpy/memmove.
This class wraps the llvm.memset and llvm.memset.inline intrinsics.
This class wraps the llvm.memcpy/memmove intrinsics.
A Module instance is used to store all the information related to an LLVM module.
void insertGlobalVariable(GlobalVariable *GV)
Insert global variable GV at the end of the global variable list and take ownership.
unsigned getAddressSpace() const
Return the address space of the Pointer type.
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses none()
Convenience factory function for the empty preserved set.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
void preserveSet()
Mark an analysis set as preserved.
void preserve()
Mark an analysis as preserved.
Interface for looking up the initializer for a variable name, used by Init::resolveReferences.
static SelectInst * Create(Value *C, Value *S1, Value *S2, const Twine &NameStr="", InsertPosition InsertBefore=nullptr, Instruction *MDFrom=nullptr)
A templated base class for SmallPtrSet which provides the typesafe interface that is common across al...
bool erase(PtrType Ptr)
Remove pointer from the set.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
void reserve(size_type N)
iterator erase(const_iterator CI)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
An instruction for storing to memory.
Value * getValueOperand()
StringRef - Represent a constant reference to a string, i.e.
bool starts_with(StringRef Prefix) const
Check if this string starts with the given Prefix.
int compare(StringRef RHS) const
compare - Compare two strings; the result is negative, zero, or positive if this string is lexicograp...
Class to represent struct types.
ArrayRef< Type * > elements() const
bool isOpaque() const
Return true if this is a type with an identity that has no body specified yet.
Analysis pass providing the TargetTransformInfo.
Analysis pass providing the TargetLibraryInfo.
Provides information about what library functions are available for the current target.
Target - Wrapper for Target specific information.
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
The instances of the Type class are immutable: once they are created, they are never changed.
bool isVectorTy() const
True if this is an instance of VectorType.
bool isPointerTy() const
True if this is an instance of PointerType.
static IntegerType * getInt1Ty(LLVMContext &C)
unsigned getPointerAddressSpace() const
Get the address space of this pointer or pointer vector type.
@ ScalableVectorTyID
Scalable SIMD vector type.
@ FixedVectorTyID
Fixed width SIMD vector type.
bool isSingleValueType() const
Return true if the type is a valid type for a register in codegen.
bool isScalableTy(SmallPtrSetImpl< const Type * > &Visited) const
Return true if this is a type whose size is a known multiple of vscale.
static IntegerType * getInt8Ty(LLVMContext &C)
bool isFloatingPointTy() const
Return true if this is one of the floating-point types.
TypeID getTypeID() const
Return the type id for the type.
static UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
A Use represents the edge between a Value definition and its users.
User * getUser() const
Returns the User that contains this Use.
Value * getOperand(unsigned i) const
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
const Value * stripAndAccumulateConstantOffsets(const DataLayout &DL, APInt &Offset, bool AllowNonInbounds, bool AllowInvariantGroup=false, function_ref< bool(Value &Value, APInt &Offset)> ExternalAnalysis=nullptr) const
Accumulate the constant offset this value has compared to a base pointer.
bool hasOneUse() const
Return true if there is exactly one use of this value.
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
iterator_range< user_iterator > users()
const Value * stripPointerCasts() const
Strip off pointer casts, all-zero GEPs and address space casts.
LLVMContext & getContext() const
All values hold a context through their type.
user_iterator_impl< User > user_iterator
StringRef getName() const
Return a constant reference to the value's name.
void takeName(Value *V)
Transfer the name from V to this value.
This class represents zero extension of integer types.
An efficient, type-erasing, non-owning reference to a callable.
const ParentTy * getParent() const
self_iterator getIterator()
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
A range adaptor for a pair of iterators.
This provides a very simple, boring adaptor for a begin and end iterator into a range type.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
@ Cold
Attempts to make code in the caller as efficient as possible under the assumption that the call is no...
@ X86_ThisCall
Similar to X86_StdCall.
@ Fast
Attempts to make calls as fast as possible (e.g.
@ C
The default llvm calling convention, compatible with C.
initializer< Ty > init(const Ty &Val)
This is an optimization pass for GlobalISel generic memory operations.
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
Constant * getInitialValueOfAllocation(const Value *V, const TargetLibraryInfo *TLI, Type *Ty)
If this is a call to an allocation function that initializes memory to a fixed value,...
bool RecursivelyDeleteTriviallyDeadInstructions(Value *V, const TargetLibraryInfo *TLI=nullptr, MemorySSAUpdater *MSSAU=nullptr, std::function< void(Value *)> AboutToDeleteCallback=std::function< void(Value *)>())
If the specified value is a trivially dead instruction, delete it.
bool isRemovableAlloc(const CallBase *V, const TargetLibraryInfo *TLI)
Return true if this is a call to an allocation function that does not have side effects that we are r...
const Value * getLoadStorePointerOperand(const Value *V)
A helper function that returns the pointer operand of a load or store instruction.
void append_range(Container &C, Range &&R)
Wrapper function to append range R to container C.
const Value * getUnderlyingObject(const Value *V, unsigned MaxLookup=6)
This method strips off any GEP address adjustments, pointer casts or llvm.threadlocal....
iterator_range< early_inc_iterator_impl< detail::IterOfRange< RangeT > > > make_early_inc_range(RangeT &&Range)
Make a range that does early increment to allow mutation of the underlying range without disrupting i...
Constant * ConstantFoldConstant(const Constant *C, const DataLayout &DL, const TargetLibraryInfo *TLI=nullptr)
ConstantFoldConstant - Fold the constant using the specified DataLayout.
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
bool isInstructionTriviallyDead(Instruction *I, const TargetLibraryInfo *TLI=nullptr)
Return true if the result produced by the instruction is not used, and the instruction will return.
bool getObjectSize(const Value *Ptr, uint64_t &Size, const DataLayout &DL, const TargetLibraryInfo *TLI, ObjectSizeOpts Opts={})
Compute the size of the object pointed by Ptr.
Constant * ConstantFoldLoadFromUniformValue(Constant *C, Type *Ty, const DataLayout &DL)
If C is a uniform value where all bits are the same (either all zero, all ones, all undef or all pois...
bool isSafeToDestroyConstant(const Constant *C)
It is safe to destroy a constant iff it is only used by constants itself.
Align getOrEnforceKnownAlignment(Value *V, MaybeAlign PrefAlign, const DataLayout &DL, const Instruction *CxtI=nullptr, AssumptionCache *AC=nullptr, const DominatorTree *DT=nullptr)
Try to ensure that the alignment of V is at least PrefAlign bytes.
bool optimizeGlobalCtorsList(Module &M, function_ref< bool(uint32_t, Function *)> ShouldRemove)
Call "ShouldRemove" for every entry in M's global_ctor list and remove the entries for which it retur...
void sort(IteratorTy Start, IteratorTy End)
bool NullPointerIsDefined(const Function *F, unsigned AS=0)
Check whether null pointer dereferencing is considered undefined behavior for a given function or an ...
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
bool isPointerTy(const Type *T)
bool none_of(R &&Range, UnaryPredicate P)
Provide wrappers to std::none_of which take ranges instead of having to pass begin/end explicitly.
Constant * ConstantFoldLoadFromConst(Constant *C, Type *Ty, const APInt &Offset, const DataLayout &DL)
Extract value of C at the given Offset reinterpreted as Ty.
raw_fd_ostream & errs()
This returns a reference to a raw_ostream for standard error.
constexpr T divideCeil(U Numerator, V Denominator)
Returns the integer ceil(Numerator / Denominator).
Constant * ConstantFoldInstruction(Instruction *I, const DataLayout &DL, const TargetLibraryInfo *TLI=nullptr)
ConstantFoldInstruction - Try to constant fold the specified instruction.
bool RecursivelyDeleteTriviallyDeadInstructionsPermissive(SmallVectorImpl< WeakTrackingVH > &DeadInsts, const TargetLibraryInfo *TLI=nullptr, MemorySSAUpdater *MSSAU=nullptr, std::function< void(Value *)> AboutToDeleteCallback=std::function< void(Value *)>())
Same functionality as RecursivelyDeleteTriviallyDeadInstructions, but allow instructions that are not...
auto count_if(R &&Range, UnaryPredicate P)
Wrapper function around std::count_if to count the number of times an element satisfying a given pred...
bool isAllocationFn(const Value *V, const TargetLibraryInfo *TLI)
Tests if a value is a call or invoke to a library function that allocates or reallocates memory (eith...
bool is_contained(R &&Range, const E &Element)
Returns true if Element is found in Range.
Align commonAlignment(Align A, uint64_t Offset)
Returns the alignment that satisfies both alignments.
Type * getLoadStoreType(const Value *I)
A helper function that returns the type of a load or store instruction.
void array_pod_sort(IteratorTy Start, IteratorTy End)
array_pod_sort - This sorts an array with the specified start and end extent.
bool removeUnreachableBlocks(Function &F, DomTreeUpdater *DTU=nullptr, MemorySSAUpdater *MSSAU=nullptr)
Remove all blocks that can not be reached from the function's entry.
GlobalVariable * collectUsedGlobalVariables(const Module &M, SmallVectorImpl< GlobalValue * > &Vec, bool CompilerUsed)
Given "llvm.used" or "llvm.compiler.used" as a global name, collect the initializer elements of that ...
Part of the global at a specific offset, which is only accessed through loads and stores with the giv...
This struct is a compact representation of a valid (non-zero power of two) alignment.
As we analyze each global or thread-local variable, keep track of some information about it.
@ InitializerStored
This global is stored to, but the only thing stored is the constant it was initialized with.
@ StoredOnce
This global is stored to, but only its initializer and one other value is ever stored to it.
static bool analyzeGlobal(const Value *V, GlobalStatus &GS)
Look at all uses of the global and fill in the GlobalStatus structure.
Various options to control the behavior of getObjectSize.
Function object to check whether the first component of a container supported by std::get (like std::...