23#include "llvm/IR/IntrinsicsSPIRV.h"
30#include <unordered_set>
53#define GET_BuiltinGroup_DECL
54#include "SPIRVGenTables.inc"
59class SPIRVEmitIntrinsics
61 public InstVisitor<SPIRVEmitIntrinsics, Instruction *> {
62 SPIRVTargetMachine *TM =
nullptr;
63 SPIRVGlobalRegistry *GR =
nullptr;
65 bool TrackConstants =
true;
66 bool HaveFunPtrs =
false;
67 DenseMap<Instruction *, Constant *> AggrConsts;
68 DenseMap<Instruction *, Type *> AggrConstTypes;
69 DenseSet<Instruction *> AggrStores;
70 std::unordered_set<Value *> Named;
73 DenseMap<Function *, SmallVector<std::pair<unsigned, Type *>>> FDeclPtrTys;
76 bool CanTodoType =
true;
77 unsigned TodoTypeSz = 0;
78 DenseMap<Value *, bool> TodoType;
79 void insertTodoType(
Value *
Op) {
82 auto It = TodoType.try_emplace(
Op,
true);
88 auto It = TodoType.find(
Op);
89 if (It != TodoType.end() && It->second) {
97 auto It = TodoType.find(
Op);
98 return It != TodoType.end() && It->second;
102 std::unordered_set<Instruction *> TypeValidated;
105 enum WellKnownTypes { Event };
108 Type *deduceElementType(
Value *
I,
bool UnknownElemTypeI8);
109 Type *deduceElementTypeHelper(
Value *
I,
bool UnknownElemTypeI8);
110 Type *deduceElementTypeHelper(
Value *
I, std::unordered_set<Value *> &Visited,
111 bool UnknownElemTypeI8,
112 bool IgnoreKnownType =
false);
113 Type *deduceElementTypeByValueDeep(
Type *ValueTy,
Value *Operand,
114 bool UnknownElemTypeI8);
115 Type *deduceElementTypeByValueDeep(
Type *ValueTy,
Value *Operand,
116 std::unordered_set<Value *> &Visited,
117 bool UnknownElemTypeI8);
119 std::unordered_set<Value *> &Visited,
120 bool UnknownElemTypeI8);
122 bool UnknownElemTypeI8);
125 Type *deduceNestedTypeHelper(
User *
U,
bool UnknownElemTypeI8);
127 std::unordered_set<Value *> &Visited,
128 bool UnknownElemTypeI8);
132 SmallPtrSet<Instruction *, 4> *IncompleteRets,
133 const SmallPtrSet<Value *, 4> *AskOps =
nullptr,
134 bool IsPostprocessing =
false);
139 Type *reconstructType(
Value *
Op,
bool UnknownElemTypeI8,
140 bool IsPostprocessing);
145 bool UnknownElemTypeI8);
147 void insertAssignPtrTypeTargetExt(TargetExtType *AssignedType,
Value *
V,
150 Type *ExpectedElementType,
151 unsigned OperandToReplace,
154 bool shouldTryToAddMemAliasingDecoration(
Instruction *Inst);
156 void insertConstantsForFPFastMathDefault(
Module &
M);
157 void processGlobalValue(GlobalVariable &GV,
IRBuilder<> &
B);
162 std::unordered_set<Function *> &FVisited);
164 bool deduceOperandElementTypeCalledFunction(
166 Type *&KnownElemTy,
bool &Incomplete);
167 void deduceOperandElementTypeFunctionPointer(
169 Type *&KnownElemTy,
bool IsPostprocessing);
170 bool deduceOperandElementTypeFunctionRet(
171 Instruction *
I, SmallPtrSet<Instruction *, 4> *IncompleteRets,
172 const SmallPtrSet<Value *, 4> *AskOps,
bool IsPostprocessing,
177 DenseMap<Function *, CallInst *> Ptrcasts);
179 DenseSet<std::pair<Value *, Value *>> &VisitedSubst);
182 DenseSet<std::pair<Value *, Value *>> &VisitedSubst);
183 void propagateElemTypeRec(
Value *
Op,
Type *PtrElemTy,
Type *CastElemTy,
184 DenseSet<std::pair<Value *, Value *>> &VisitedSubst,
185 std::unordered_set<Value *> &Visited,
186 DenseMap<Function *, CallInst *> Ptrcasts);
194 GetElementPtrInst *simplifyZeroLengthArrayGepInst(GetElementPtrInst *
GEP);
197 bool postprocessTypes(
Module &
M);
198 bool processFunctionPointers(
Module &
M);
199 void parseFunDeclarations(
Module &
M);
201 void useRoundingMode(ConstrainedFPIntrinsic *FPI,
IRBuilder<> &
B);
217 bool walkLogicalAccessChain(
218 GetElementPtrInst &
GEP,
219 const std::function<
void(
Type *PointedType, uint64_t
Index)>
228 Type *getGEPType(GetElementPtrInst *
GEP);
235 Type *getGEPTypeLogical(GetElementPtrInst *
GEP);
237 Instruction *buildLogicalAccessChainFromGEP(GetElementPtrInst &
GEP);
241 SPIRVEmitIntrinsics(SPIRVTargetMachine *TM =
nullptr)
242 : ModulePass(ID), TM(TM) {}
245 Instruction *visitGetElementPtrInst(GetElementPtrInst &
I);
247 Instruction *visitInsertElementInst(InsertElementInst &
I);
248 Instruction *visitExtractElementInst(ExtractElementInst &
I);
250 Instruction *visitExtractValueInst(ExtractValueInst &
I);
254 Instruction *visitAtomicCmpXchgInst(AtomicCmpXchgInst &
I);
258 StringRef getPassName()
const override {
return "SPIRV emit intrinsics"; }
260 bool runOnModule(
Module &
M)
override;
262 void getAnalysisUsage(AnalysisUsage &AU)
const override {
263 ModulePass::getAnalysisUsage(AU);
272 return II->getIntrinsicID() == Intrinsic::experimental_convergence_entry ||
273 II->getIntrinsicID() == Intrinsic::experimental_convergence_loop ||
274 II->getIntrinsicID() == Intrinsic::experimental_convergence_anchor;
277bool expectIgnoredInIRTranslation(
const Instruction *
I) {
281 switch (
II->getIntrinsicID()) {
282 case Intrinsic::invariant_start:
283 case Intrinsic::spv_resource_handlefrombinding:
284 case Intrinsic::spv_resource_getpointer:
294 if (
II->getIntrinsicID() == Intrinsic::spv_ptrcast) {
295 Value *V =
II->getArgOperand(0);
296 return getPointerRoot(V);
304char SPIRVEmitIntrinsics::ID = 0;
327 B.SetInsertPoint(
I->getParent()->getFirstNonPHIOrDbgOrAlloca());
333 B.SetCurrentDebugLocation(
I->getDebugLoc());
334 if (
I->getType()->isVoidTy())
335 B.SetInsertPoint(
I->getNextNode());
337 B.SetInsertPoint(*
I->getInsertionPointAfterDef());
342 switch (Intr->getIntrinsicID()) {
343 case Intrinsic::invariant_start:
344 case Intrinsic::invariant_end:
352 if (
I->getType()->isTokenTy())
354 "does not support token type",
359 if (!
I->hasName() ||
I->getType()->isAggregateType() ||
360 expectIgnoredInIRTranslation(
I))
365 std::vector<Value *> Args = {
368 B.CreateIntrinsic(Intrinsic::spv_assign_name, {
I->getType()}, Args);
371void SPIRVEmitIntrinsics::replaceAllUsesWith(
Value *Src,
Value *Dest,
375 if (isTodoType(Src)) {
378 insertTodoType(Dest);
382void SPIRVEmitIntrinsics::replaceAllUsesWithAndErase(
IRBuilder<> &
B,
387 std::string
Name = Src->hasName() ? Src->getName().str() :
"";
388 Src->eraseFromParent();
391 if (Named.insert(Dest).second)
416Type *SPIRVEmitIntrinsics::reconstructType(
Value *
Op,
bool UnknownElemTypeI8,
417 bool IsPostprocessing) {
432 if (UnknownElemTypeI8) {
433 if (!IsPostprocessing)
441CallInst *SPIRVEmitIntrinsics::buildSpvPtrcast(Function *
F,
Value *
Op,
449 B.SetInsertPointPastAllocas(OpA->getParent());
452 B.SetInsertPoint(
F->getEntryBlock().getFirstNonPHIOrDbgOrAlloca());
454 Type *OpTy =
Op->getType();
458 CallInst *PtrCasted =
459 B.CreateIntrinsic(Intrinsic::spv_ptrcast, {
Types},
Args);
464void SPIRVEmitIntrinsics::replaceUsesOfWithSpvPtrcast(
466 DenseMap<Function *, CallInst *> Ptrcasts) {
468 CallInst *PtrCastedI =
nullptr;
469 auto It = Ptrcasts.
find(
F);
470 if (It == Ptrcasts.
end()) {
471 PtrCastedI = buildSpvPtrcast(
F,
Op, ElemTy);
472 Ptrcasts[
F] = PtrCastedI;
474 PtrCastedI = It->second;
476 I->replaceUsesOfWith(
Op, PtrCastedI);
479void SPIRVEmitIntrinsics::propagateElemType(
481 DenseSet<std::pair<Value *, Value *>> &VisitedSubst) {
482 DenseMap<Function *, CallInst *> Ptrcasts;
484 for (
auto *U :
Users) {
487 if (!VisitedSubst.insert(std::make_pair(U,
Op)).second)
493 TypeValidated.find(UI) != TypeValidated.end())
494 replaceUsesOfWithSpvPtrcast(
Op, ElemTy, UI, Ptrcasts);
498void SPIRVEmitIntrinsics::propagateElemTypeRec(
500 DenseSet<std::pair<Value *, Value *>> &VisitedSubst) {
501 std::unordered_set<Value *> Visited;
502 DenseMap<Function *, CallInst *> Ptrcasts;
503 propagateElemTypeRec(
Op, PtrElemTy, CastElemTy, VisitedSubst, Visited,
504 std::move(Ptrcasts));
507void SPIRVEmitIntrinsics::propagateElemTypeRec(
509 DenseSet<std::pair<Value *, Value *>> &VisitedSubst,
510 std::unordered_set<Value *> &Visited,
511 DenseMap<Function *, CallInst *> Ptrcasts) {
512 if (!Visited.insert(
Op).second)
515 for (
auto *U :
Users) {
518 if (!VisitedSubst.insert(std::make_pair(U,
Op)).second)
524 TypeValidated.find(UI) != TypeValidated.end())
525 replaceUsesOfWithSpvPtrcast(
Op, CastElemTy, UI, Ptrcasts);
533SPIRVEmitIntrinsics::deduceElementTypeByValueDeep(
Type *ValueTy,
Value *Operand,
534 bool UnknownElemTypeI8) {
535 std::unordered_set<Value *> Visited;
536 return deduceElementTypeByValueDeep(ValueTy, Operand, Visited,
540Type *SPIRVEmitIntrinsics::deduceElementTypeByValueDeep(
541 Type *ValueTy,
Value *Operand, std::unordered_set<Value *> &Visited,
542 bool UnknownElemTypeI8) {
547 deduceElementTypeHelper(Operand, Visited, UnknownElemTypeI8))
558Type *SPIRVEmitIntrinsics::deduceElementTypeByUsersDeep(
559 Value *
Op, std::unordered_set<Value *> &Visited,
bool UnknownElemTypeI8) {
571 for (User *OpU :
Op->users()) {
573 if (
Type *Ty = deduceElementTypeHelper(Inst, Visited, UnknownElemTypeI8))
586 if ((DemangledName.
starts_with(
"__spirv_ocl_printf(") ||
595Type *SPIRVEmitIntrinsics::deduceElementTypeHelper(
Value *
I,
596 bool UnknownElemTypeI8) {
597 std::unordered_set<Value *> Visited;
598 return deduceElementTypeHelper(
I, Visited, UnknownElemTypeI8);
601void SPIRVEmitIntrinsics::maybeAssignPtrType(
Type *&Ty,
Value *
Op,
Type *RefTy,
602 bool UnknownElemTypeI8) {
604 if (!UnknownElemTypeI8)
611bool SPIRVEmitIntrinsics::walkLogicalAccessChain(
612 GetElementPtrInst &
GEP,
613 const std::function<
void(
Type *, uint64_t)> &OnLiteralIndexing,
614 const std::function<
void(
Type *,
Value *)> &OnDynamicIndexing) {
622 Value *Src = getPointerRoot(
GEP.getPointerOperand());
623 Type *CurType = deduceElementType(Src,
true);
632 OnDynamicIndexing(AT->getElementType(), Operand);
633 return AT ==
nullptr;
641 uint32_t EltTypeSize =
DL.getTypeSizeInBits(AT->getElementType()) / 8;
645 CurType = AT->getElementType();
646 OnLiteralIndexing(CurType, Index);
648 uint32_t StructSize =
DL.getTypeSizeInBits(ST) / 8;
651 const auto &STL =
DL.getStructLayout(ST);
652 unsigned Element = STL->getElementContainingOffset(
Offset);
653 Offset -= STL->getElementOffset(Element);
654 CurType =
ST->getElementType(Element);
655 OnLiteralIndexing(CurType, Element);
667SPIRVEmitIntrinsics::buildLogicalAccessChainFromGEP(GetElementPtrInst &
GEP) {
670 B.SetInsertPoint(&
GEP);
672 std::vector<Value *> Indices;
673 Indices.push_back(ConstantInt::get(
674 IntegerType::getInt32Ty(CurrF->
getContext()), 0,
false));
675 walkLogicalAccessChain(
677 [&Indices, &
B](
Type *EltType, uint64_t Index) {
679 ConstantInt::get(
B.getInt64Ty(), Index,
false));
682 uint32_t EltTypeSize =
DL.getTypeSizeInBits(EltType) / 8;
684 Offset, ConstantInt::get(
Offset->getType(), EltTypeSize,
686 Indices.push_back(Index);
691 Args.push_back(
B.getInt1(
GEP.isInBounds()));
692 Args.push_back(
GEP.getOperand(0));
694 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_gep, {
Types}, {
Args});
695 replaceAllUsesWithAndErase(
B, &
GEP, NewI);
699Type *SPIRVEmitIntrinsics::getGEPTypeLogical(GetElementPtrInst *
GEP) {
701 Type *CurType =
GEP->getResultElementType();
703 bool Interrupted = walkLogicalAccessChain(
704 *
GEP, [&CurType](
Type *EltType, uint64_t Index) { CurType = EltType; },
707 return Interrupted ?
GEP->getResultElementType() : CurType;
710Type *SPIRVEmitIntrinsics::getGEPType(GetElementPtrInst *
Ref) {
711 if (
Ref->getSourceElementType() ==
712 IntegerType::getInt8Ty(CurrF->
getContext()) &&
714 return getGEPTypeLogical(
Ref);
721 Ty =
Ref->getSourceElementType();
725 Ty =
Ref->getResultElementType();
730Type *SPIRVEmitIntrinsics::deduceElementTypeHelper(
731 Value *
I, std::unordered_set<Value *> &Visited,
bool UnknownElemTypeI8,
732 bool IgnoreKnownType) {
738 if (!IgnoreKnownType)
743 if (!Visited.insert(
I).second)
750 maybeAssignPtrType(Ty,
I,
Ref->getAllocatedType(), UnknownElemTypeI8);
752 Ty = getGEPType(
Ref);
757 KnownTy =
Op->getType();
759 maybeAssignPtrType(Ty,
I, ElemTy, UnknownElemTypeI8);
761 Ty = deduceElementTypeByValueDeep(
763 Ref->getNumOperands() > 0 ?
Ref->getOperand(0) :
nullptr, Visited,
766 Type *RefTy = deduceElementTypeHelper(
Ref->getPointerOperand(), Visited,
768 maybeAssignPtrType(Ty,
I, RefTy, UnknownElemTypeI8);
770 if (
Type *Src =
Ref->getSrcTy(), *Dest =
Ref->getDestTy();
772 Ty = deduceElementTypeHelper(
Ref->getOperand(0), Visited,
777 Ty = deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8);
781 Ty = deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8);
783 Type *BestTy =
nullptr;
785 DenseMap<Type *, unsigned> PhiTys;
786 for (
int i =
Ref->getNumIncomingValues() - 1; i >= 0; --i) {
787 Ty = deduceElementTypeByUsersDeep(
Ref->getIncomingValue(i), Visited,
794 if (It.first->second > MaxN) {
795 MaxN = It.first->second;
803 for (
Value *
Op : {
Ref->getTrueValue(),
Ref->getFalseValue()}) {
804 Ty = deduceElementTypeByUsersDeep(
Op, Visited, UnknownElemTypeI8);
809 static StringMap<unsigned> ResTypeByArg = {
813 {
"__spirv_GenericCastToPtr_ToGlobal", 0},
814 {
"__spirv_GenericCastToPtr_ToLocal", 0},
815 {
"__spirv_GenericCastToPtr_ToPrivate", 0},
816 {
"__spirv_GenericCastToPtrExplicit_ToGlobal", 0},
817 {
"__spirv_GenericCastToPtrExplicit_ToLocal", 0},
818 {
"__spirv_GenericCastToPtrExplicit_ToPrivate", 0}};
822 if (
II &&
II->getIntrinsicID() == Intrinsic::spv_resource_getpointer) {
824 if (HandleType->getTargetExtName() ==
"spirv.Image" ||
825 HandleType->getTargetExtName() ==
"spirv.SignedImage") {
826 for (User *U :
II->users()) {
831 }
else if (HandleType->getTargetExtName() ==
"spirv.VulkanBuffer") {
833 Ty = HandleType->getTypeParameter(0);
847 }
else if (
II &&
II->getIntrinsicID() ==
848 Intrinsic::spv_generic_cast_to_ptr_explicit) {
849 Ty = deduceElementTypeHelper(CI->getArgOperand(0), Visited,
851 }
else if (Function *CalledF = CI->getCalledFunction()) {
852 std::string DemangledName =
854 if (DemangledName.length() > 0)
855 DemangledName = SPIRV::lookupBuiltinNameHelper(DemangledName);
856 auto AsArgIt = ResTypeByArg.
find(DemangledName);
857 if (AsArgIt != ResTypeByArg.
end())
858 Ty = deduceElementTypeHelper(CI->getArgOperand(AsArgIt->second),
859 Visited, UnknownElemTypeI8);
866 if (Ty && !IgnoreKnownType) {
877Type *SPIRVEmitIntrinsics::deduceNestedTypeHelper(User *U,
878 bool UnknownElemTypeI8) {
879 std::unordered_set<Value *> Visited;
880 return deduceNestedTypeHelper(U,
U->getType(), Visited, UnknownElemTypeI8);
883Type *SPIRVEmitIntrinsics::deduceNestedTypeHelper(
884 User *U,
Type *OrigTy, std::unordered_set<Value *> &Visited,
885 bool UnknownElemTypeI8) {
894 if (!Visited.insert(U).second)
900 for (
unsigned i = 0; i <
U->getNumOperands(); ++i) {
902 assert(
Op &&
"Operands should not be null.");
903 Type *OpTy =
Op->getType();
907 deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8))
914 Change |= Ty != OpTy;
922 if (
Value *
Op =
U->getNumOperands() > 0 ?
U->getOperand(0) :
nullptr) {
923 Type *OpTy = ArrTy->getElementType();
927 deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8))
934 Type *NewTy = ArrayType::get(Ty, ArrTy->getNumElements());
940 if (
Value *
Op =
U->getNumOperands() > 0 ?
U->getOperand(0) :
nullptr) {
941 Type *OpTy = VecTy->getElementType();
945 deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8))
952 Type *NewTy = VectorType::get(Ty, VecTy->getElementCount());
962Type *SPIRVEmitIntrinsics::deduceElementType(
Value *
I,
bool UnknownElemTypeI8) {
963 if (
Type *Ty = deduceElementTypeHelper(
I, UnknownElemTypeI8))
965 if (!UnknownElemTypeI8)
968 return IntegerType::getInt8Ty(
I->getContext());
972 Value *PointerOperand) {
986bool SPIRVEmitIntrinsics::deduceOperandElementTypeCalledFunction(
988 Type *&KnownElemTy,
bool &Incomplete) {
992 std::string DemangledName =
994 if (DemangledName.length() > 0 &&
996 const SPIRVSubtarget &
ST = TM->
getSubtarget<SPIRVSubtarget>(*CalledF);
997 auto [Grp, Opcode, ExtNo] = SPIRV::mapBuiltinToOpcode(
998 DemangledName,
ST.getPreferredInstructionSet());
999 if (Opcode == SPIRV::OpGroupAsyncCopy) {
1000 for (
unsigned i = 0, PtrCnt = 0; i < CI->
arg_size() && PtrCnt < 2; ++i) {
1006 KnownElemTy = ElemTy;
1007 Ops.push_back(std::make_pair(
Op, i));
1009 }
else if (Grp == SPIRV::Atomic || Grp == SPIRV::AtomicFloating) {
1016 case SPIRV::OpAtomicFAddEXT:
1017 case SPIRV::OpAtomicFMinEXT:
1018 case SPIRV::OpAtomicFMaxEXT:
1019 case SPIRV::OpAtomicLoad:
1020 case SPIRV::OpAtomicCompareExchangeWeak:
1021 case SPIRV::OpAtomicCompareExchange:
1022 case SPIRV::OpAtomicExchange:
1023 case SPIRV::OpAtomicIAdd:
1024 case SPIRV::OpAtomicISub:
1025 case SPIRV::OpAtomicOr:
1026 case SPIRV::OpAtomicXor:
1027 case SPIRV::OpAtomicAnd:
1028 case SPIRV::OpAtomicUMin:
1029 case SPIRV::OpAtomicUMax:
1030 case SPIRV::OpAtomicSMin:
1031 case SPIRV::OpAtomicSMax: {
1036 Incomplete = isTodoType(
Op);
1037 Ops.push_back(std::make_pair(
Op, 0));
1039 case SPIRV::OpAtomicStore: {
1048 Incomplete = isTodoType(
Op);
1049 Ops.push_back(std::make_pair(
Op, 0));
1058void SPIRVEmitIntrinsics::deduceOperandElementTypeFunctionPointer(
1060 Type *&KnownElemTy,
bool IsPostprocessing) {
1064 Ops.push_back(std::make_pair(
Op, std::numeric_limits<unsigned>::max()));
1066 bool IsNewFTy =
false, IsIncomplete =
false;
1069 Type *ArgTy = Arg->getType();
1074 if (isTodoType(Arg))
1075 IsIncomplete =
true;
1077 IsIncomplete =
true;
1082 Type *RetTy = FTy->getReturnType();
1089 IsIncomplete =
true;
1091 IsIncomplete =
true;
1094 if (!IsPostprocessing && IsIncomplete)
1097 IsNewFTy ? FunctionType::get(RetTy, ArgTys, FTy->isVarArg()) : FTy;
1100bool SPIRVEmitIntrinsics::deduceOperandElementTypeFunctionRet(
1101 Instruction *
I, SmallPtrSet<Instruction *, 4> *IncompleteRets,
1102 const SmallPtrSet<Value *, 4> *AskOps,
bool IsPostprocessing,
1114 DenseSet<std::pair<Value *, Value *>> VisitedSubst{std::make_pair(
I,
Op)};
1115 for (User *U :
F->users()) {
1123 propagateElemType(CI, PrevElemTy, VisitedSubst);
1133 for (Instruction *IncompleteRetI : *IncompleteRets)
1134 deduceOperandElementType(IncompleteRetI,
nullptr, AskOps,
1136 }
else if (IncompleteRets) {
1139 TypeValidated.insert(
I);
1147void SPIRVEmitIntrinsics::deduceOperandElementType(
1148 Instruction *
I, SmallPtrSet<Instruction *, 4> *IncompleteRets,
1149 const SmallPtrSet<Value *, 4> *AskOps,
bool IsPostprocessing) {
1151 Type *KnownElemTy =
nullptr;
1152 bool Incomplete =
false;
1158 Incomplete = isTodoType(
I);
1159 for (
unsigned i = 0; i <
Ref->getNumIncomingValues(); i++) {
1162 Ops.push_back(std::make_pair(
Op, i));
1168 Incomplete = isTodoType(
I);
1169 Ops.push_back(std::make_pair(
Ref->getPointerOperand(), 0));
1176 Incomplete = isTodoType(
I);
1177 Ops.push_back(std::make_pair(
Ref->getOperand(0), 0));
1181 KnownElemTy =
Ref->getSourceElementType();
1182 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1185 KnownElemTy =
I->getType();
1191 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1195 reconstructType(
Ref->getValueOperand(),
false, IsPostprocessing)))
1200 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1208 Incomplete = isTodoType(
Ref->getPointerOperand());
1209 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1217 Incomplete = isTodoType(
Ref->getPointerOperand());
1218 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1224 Incomplete = isTodoType(
I);
1225 for (
unsigned i = 0; i <
Ref->getNumOperands(); i++) {
1228 Ops.push_back(std::make_pair(
Op, i));
1236 if (deduceOperandElementTypeFunctionRet(
I, IncompleteRets, AskOps,
1237 IsPostprocessing, KnownElemTy,
Op,
1240 Incomplete = isTodoType(CurrF);
1241 Ops.push_back(std::make_pair(
Op, 0));
1247 bool Incomplete0 = isTodoType(Op0);
1248 bool Incomplete1 = isTodoType(Op1);
1250 Type *ElemTy0 = (Incomplete0 && !Incomplete1 && ElemTy1)
1252 : GR->findDeducedElementType(Op0);
1254 KnownElemTy = ElemTy0;
1255 Incomplete = Incomplete0;
1256 Ops.push_back(std::make_pair(Op1, 1));
1257 }
else if (ElemTy1) {
1258 KnownElemTy = ElemTy1;
1259 Incomplete = Incomplete1;
1260 Ops.push_back(std::make_pair(Op0, 0));
1264 deduceOperandElementTypeCalledFunction(CI,
Ops, KnownElemTy, Incomplete);
1265 else if (HaveFunPtrs)
1266 deduceOperandElementTypeFunctionPointer(CI,
Ops, KnownElemTy,
1271 if (!KnownElemTy ||
Ops.size() == 0)
1276 for (
auto &OpIt :
Ops) {
1280 Type *AskTy =
nullptr;
1281 CallInst *AskCI =
nullptr;
1282 if (IsPostprocessing && AskOps) {
1288 if (Ty == KnownElemTy)
1291 Type *OpTy =
Op->getType();
1292 if (
Op->hasUseList() &&
1299 else if (!IsPostprocessing)
1303 if (AssignCI ==
nullptr) {
1312 DenseSet<std::pair<Value *, Value *>> VisitedSubst{
1313 std::make_pair(
I,
Op)};
1314 propagateElemTypeRec(
Op, KnownElemTy, PrevElemTy, VisitedSubst);
1318 CallInst *PtrCastI =
1319 buildSpvPtrcast(
I->getParent()->getParent(),
Op, KnownElemTy);
1320 if (OpIt.second == std::numeric_limits<unsigned>::max())
1323 I->setOperand(OpIt.second, PtrCastI);
1326 TypeValidated.insert(
I);
1329void SPIRVEmitIntrinsics::replaceMemInstrUses(Instruction *Old,
1334 if (isAssignTypeInstr(U)) {
1335 B.SetInsertPoint(U);
1336 SmallVector<Value *, 2>
Args = {
New,
U->getOperand(1)};
1337 CallInst *AssignCI =
1338 B.CreateIntrinsic(Intrinsic::spv_assign_type, {
New->getType()},
Args);
1340 U->eraseFromParent();
1343 U->replaceUsesOfWith(Old, New);
1348 New->copyMetadata(*Old);
1352void SPIRVEmitIntrinsics::preprocessUndefs(
IRBuilder<> &
B) {
1353 std::queue<Instruction *> Worklist;
1357 while (!Worklist.empty()) {
1359 bool BPrepared =
false;
1362 for (
auto &
Op :
I->operands()) {
1364 if (!AggrUndef || !
Op->getType()->isAggregateType())
1371 auto *IntrUndef =
B.CreateIntrinsic(Intrinsic::spv_undef, {});
1372 Worklist.push(IntrUndef);
1373 I->replaceUsesOfWith(
Op, IntrUndef);
1374 AggrConsts[IntrUndef] = AggrUndef;
1375 AggrConstTypes[IntrUndef] = AggrUndef->getType();
1380void SPIRVEmitIntrinsics::preprocessCompositeConstants(
IRBuilder<> &
B) {
1381 std::queue<Instruction *> Worklist;
1385 while (!Worklist.empty()) {
1386 auto *
I = Worklist.front();
1389 bool KeepInst =
false;
1390 for (
const auto &
Op :
I->operands()) {
1392 Type *ResTy =
nullptr;
1395 ResTy = COp->getType();
1407 ResTy =
Op->getType()->isVectorTy() ? COp->getType() :
B.getInt32Ty();
1412 for (
unsigned i = 0; i < COp->getNumElements(); ++i)
1413 Args.push_back(COp->getElementAsConstant(i));
1417 IsPhi ?
B.SetInsertPointPastAllocas(
I->getParent()->getParent())
1418 :
B.SetInsertPoint(
I);
1422 B.CreateIntrinsic(Intrinsic::spv_const_composite, {ResTy}, {
Args});
1426 AggrConsts[CI] = AggrConst;
1427 AggrConstTypes[CI] = deduceNestedTypeHelper(AggrConst,
false);
1439 B.CreateIntrinsic(Intrinsic::spv_assign_decoration, {
I->getType()},
1444 unsigned RoundingModeDeco,
1451 ConstantInt::get(
Int32Ty, SPIRV::Decoration::FPRoundingMode)),
1460 MDNode *SaturatedConversionNode =
1462 Int32Ty, SPIRV::Decoration::SaturatedConversion))});
1469 if (Fu->isIntrinsic()) {
1470 unsigned const int IntrinsicId = Fu->getIntrinsicID();
1471 switch (IntrinsicId) {
1472 case Intrinsic::fptosi_sat:
1473 case Intrinsic::fptoui_sat:
1492 MDString *ConstraintString =
MDString::get(Ctx,
IA->getConstraintString());
1500 B.SetInsertPoint(&
Call);
1501 B.CreateIntrinsic(Intrinsic::spv_inline_asm, {
Args});
1506void SPIRVEmitIntrinsics::useRoundingMode(ConstrainedFPIntrinsic *FPI,
1509 if (!
RM.has_value())
1511 unsigned RoundingModeDeco = std::numeric_limits<unsigned>::max();
1512 switch (
RM.value()) {
1516 case RoundingMode::NearestTiesToEven:
1517 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTE;
1519 case RoundingMode::TowardNegative:
1520 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTN;
1522 case RoundingMode::TowardPositive:
1523 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTP;
1525 case RoundingMode::TowardZero:
1526 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTZ;
1528 case RoundingMode::Dynamic:
1529 case RoundingMode::NearestTiesToAway:
1533 if (RoundingModeDeco == std::numeric_limits<unsigned>::max())
1539Instruction *SPIRVEmitIntrinsics::visitSwitchInst(SwitchInst &
I) {
1542 B.SetInsertPoint(&
I);
1545 for (
auto &
Op :
I.operands()) {
1546 if (
Op.get()->getType()->isSized()) {
1555 CallInst *NewI =
B.CreateIntrinsic(Intrinsic::spv_switch,
1556 {
I.getOperand(0)->getType()}, {
Args});
1560 I.eraseFromParent();
1563 B.SetInsertPoint(ParentBB);
1564 IndirectBrInst *BrI =
B.CreateIndirectBr(
1567 for (BasicBlock *BBCase : BBCases)
1572Instruction *SPIRVEmitIntrinsics::visitGetElementPtrInst(GetElementPtrInst &
I) {
1573 if (
I.getSourceElementType() == IntegerType::getInt8Ty(CurrF->
getContext()) &&
1581 B.SetInsertPoint(&
I);
1584 Args.push_back(
B.getInt1(
I.isInBounds()));
1586 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_gep, {
Types}, {
Args});
1587 replaceAllUsesWithAndErase(
B, &
I, NewI);
1591Instruction *SPIRVEmitIntrinsics::visitBitCastInst(BitCastInst &
I) {
1593 B.SetInsertPoint(&
I);
1602 I.eraseFromParent();
1608 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_bitcast, {
Types}, {
Args});
1609 replaceAllUsesWithAndErase(
B, &
I, NewI);
1613void SPIRVEmitIntrinsics::insertAssignPtrTypeTargetExt(
1615 Type *VTy =
V->getType();
1620 if (ElemTy != AssignedType)
1633 if (CurrentType == AssignedType)
1640 " for value " +
V->getName(),
1648void SPIRVEmitIntrinsics::replacePointerOperandWithPtrCast(
1649 Instruction *
I,
Value *Pointer,
Type *ExpectedElementType,
1651 TypeValidated.insert(
I);
1654 Type *PointerElemTy = deduceElementTypeHelper(Pointer,
false);
1655 if (PointerElemTy == ExpectedElementType ||
1661 MetadataAsValue *VMD =
buildMD(ExpectedElementVal);
1663 bool FirstPtrCastOrAssignPtrType =
true;
1669 for (
auto User :
Pointer->users()) {
1672 (
II->getIntrinsicID() != Intrinsic::spv_assign_ptr_type &&
1673 II->getIntrinsicID() != Intrinsic::spv_ptrcast) ||
1674 II->getOperand(0) != Pointer)
1679 FirstPtrCastOrAssignPtrType =
false;
1680 if (
II->getOperand(1) != VMD ||
1687 if (
II->getIntrinsicID() != Intrinsic::spv_ptrcast)
1692 if (
II->getParent() !=
I->getParent())
1695 I->setOperand(OperandToReplace,
II);
1701 if (FirstPtrCastOrAssignPtrType) {
1706 }
else if (isTodoType(Pointer)) {
1707 eraseTodoType(Pointer);
1714 DenseSet<std::pair<Value *, Value *>> VisitedSubst{
1715 std::make_pair(
I, Pointer)};
1717 propagateElemType(Pointer, PrevElemTy, VisitedSubst);
1729 auto *PtrCastI =
B.CreateIntrinsic(Intrinsic::spv_ptrcast, {
Types},
Args);
1735void SPIRVEmitIntrinsics::insertPtrCastOrAssignTypeInstr(Instruction *
I,
1740 replacePointerOperandWithPtrCast(
1741 I,
SI->getValueOperand(), IntegerType::getInt8Ty(CurrF->
getContext()),
1747 Type *OpTy =
Op->getType();
1750 if (OpTy ==
Op->getType())
1751 OpTy = deduceElementTypeByValueDeep(OpTy,
Op,
false);
1752 replacePointerOperandWithPtrCast(
I, Pointer, OpTy, 1,
B);
1757 Type *OpTy = LI->getType();
1762 Type *NewOpTy = OpTy;
1763 OpTy = deduceElementTypeByValueDeep(OpTy, LI,
false);
1764 if (OpTy == NewOpTy)
1765 insertTodoType(Pointer);
1768 replacePointerOperandWithPtrCast(
I, Pointer, OpTy, 0,
B);
1773 Type *OpTy =
nullptr;
1780 bool IsRewrittenGEP =
1781 GEPI->getSourceElementType() == IntegerType::getInt8Ty(
I->getContext());
1783 Value *Src = getPointerRoot(Pointer);
1789 OpTy = GEPI->getSourceElementType();
1791 replacePointerOperandWithPtrCast(
I, Pointer, OpTy, 0,
B);
1793 insertTodoType(Pointer);
1805 std::string DemangledName =
1809 bool HaveTypes =
false;
1827 for (User *U : CalledArg->
users()) {
1829 if ((ElemTy = deduceElementTypeHelper(Inst,
false)) !=
nullptr)
1835 HaveTypes |= ElemTy !=
nullptr;
1840 if (DemangledName.empty() && !HaveTypes)
1858 Type *ExpectedType =
1860 if (!ExpectedType && !DemangledName.empty())
1861 ExpectedType = SPIRV::parseBuiltinCallArgumentBaseType(
1862 DemangledName,
OpIdx,
I->getContext());
1863 if (!ExpectedType || ExpectedType->
isVoidTy())
1871 replacePointerOperandWithPtrCast(CI, ArgOperand, ExpectedType,
OpIdx,
B);
1875Instruction *SPIRVEmitIntrinsics::visitInsertElementInst(InsertElementInst &
I) {
1882 I.getOperand(1)->getType(),
1883 I.getOperand(2)->getType()};
1885 B.SetInsertPoint(&
I);
1887 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_insertelt, {
Types}, {
Args});
1888 replaceAllUsesWithAndErase(
B, &
I, NewI);
1893SPIRVEmitIntrinsics::visitExtractElementInst(ExtractElementInst &
I) {
1900 B.SetInsertPoint(&
I);
1902 I.getIndexOperand()->getType()};
1903 SmallVector<Value *, 2>
Args = {
I.getVectorOperand(),
I.getIndexOperand()};
1904 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_extractelt, {
Types}, {
Args});
1905 replaceAllUsesWithAndErase(
B, &
I, NewI);
1909Instruction *SPIRVEmitIntrinsics::visitInsertValueInst(InsertValueInst &
I) {
1911 B.SetInsertPoint(&
I);
1914 Value *AggregateOp =
I.getAggregateOperand();
1918 Args.push_back(AggregateOp);
1919 Args.push_back(
I.getInsertedValueOperand());
1920 for (
auto &
Op :
I.indices())
1921 Args.push_back(
B.getInt32(
Op));
1923 B.CreateIntrinsic(Intrinsic::spv_insertv, {
Types}, {
Args});
1924 replaceMemInstrUses(&
I, NewI,
B);
1928Instruction *SPIRVEmitIntrinsics::visitExtractValueInst(ExtractValueInst &
I) {
1929 if (
I.getAggregateOperand()->getType()->isAggregateType())
1932 B.SetInsertPoint(&
I);
1934 for (
auto &
Op :
I.indices())
1935 Args.push_back(
B.getInt32(
Op));
1937 B.CreateIntrinsic(Intrinsic::spv_extractv, {
I.getType()}, {
Args});
1938 replaceAllUsesWithAndErase(
B, &
I, NewI);
1942Instruction *SPIRVEmitIntrinsics::visitLoadInst(LoadInst &
I) {
1943 if (!
I.getType()->isAggregateType())
1946 B.SetInsertPoint(&
I);
1947 TrackConstants =
false;
1952 B.CreateIntrinsic(Intrinsic::spv_load, {
I.getOperand(0)->getType()},
1953 {
I.getPointerOperand(),
B.getInt16(Flags),
1954 B.getInt8(
I.getAlign().value())});
1955 replaceMemInstrUses(&
I, NewI,
B);
1959Instruction *SPIRVEmitIntrinsics::visitStoreInst(StoreInst &
I) {
1960 if (!AggrStores.contains(&
I))
1963 B.SetInsertPoint(&
I);
1964 TrackConstants =
false;
1968 auto *PtrOp =
I.getPointerOperand();
1969 auto *NewI =
B.CreateIntrinsic(
1970 Intrinsic::spv_store, {
I.getValueOperand()->getType(), PtrOp->getType()},
1971 {
I.getValueOperand(), PtrOp,
B.getInt16(Flags),
1972 B.getInt8(
I.getAlign().value())});
1974 I.eraseFromParent();
1978Instruction *SPIRVEmitIntrinsics::visitAllocaInst(AllocaInst &
I) {
1979 Value *ArraySize =
nullptr;
1980 if (
I.isArrayAllocation()) {
1983 SPIRV::Extension::SPV_INTEL_variable_length_array))
1985 "array allocation: this instruction requires the following "
1986 "SPIR-V extension: SPV_INTEL_variable_length_array",
1988 ArraySize =
I.getArraySize();
1991 B.SetInsertPoint(&
I);
1992 TrackConstants =
false;
1993 Type *PtrTy =
I.getType();
1996 ?
B.CreateIntrinsic(Intrinsic::spv_alloca_array,
1997 {PtrTy, ArraySize->
getType()},
1998 {ArraySize,
B.getInt8(
I.getAlign().value())})
1999 :
B.CreateIntrinsic(
Intrinsic::spv_alloca, {PtrTy},
2000 {
B.getInt8(
I.getAlign().value())});
2001 replaceAllUsesWithAndErase(
B, &
I, NewI);
2005Instruction *SPIRVEmitIntrinsics::visitAtomicCmpXchgInst(AtomicCmpXchgInst &
I) {
2006 assert(
I.getType()->isAggregateType() &&
"Aggregate result is expected");
2008 B.SetInsertPoint(&
I);
2010 Args.push_back(
B.getInt32(
2011 static_cast<uint32_t
>(
getMemScope(
I.getContext(),
I.getSyncScopeID()))));
2012 Args.push_back(
B.getInt32(
2014 Args.push_back(
B.getInt32(
2016 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_cmpxchg,
2017 {
I.getPointerOperand()->getType()}, {
Args});
2018 replaceMemInstrUses(&
I, NewI,
B);
2022Instruction *SPIRVEmitIntrinsics::visitUnreachableInst(UnreachableInst &
I) {
2024 B.SetInsertPoint(&
I);
2025 B.CreateIntrinsic(Intrinsic::spv_unreachable, {});
2029void SPIRVEmitIntrinsics::processGlobalValue(GlobalVariable &GV,
2032 if (GV.
getName() ==
"llvm.global.annotations")
2039 deduceElementTypeHelper(&GV,
false);
2043 auto *InitInst =
B.CreateIntrinsic(Intrinsic::spv_init_global,
2045 InitInst->setArgOperand(1, Init);
2048 B.CreateIntrinsic(Intrinsic::spv_unref_global, GV.
getType(), &GV);
2054bool SPIRVEmitIntrinsics::insertAssignPtrTypeIntrs(Instruction *
I,
2056 bool UnknownElemTypeI8) {
2062 if (
Type *ElemTy = deduceElementType(
I, UnknownElemTypeI8)) {
2069void SPIRVEmitIntrinsics::insertAssignTypeIntrs(Instruction *
I,
2072 static StringMap<unsigned> ResTypeWellKnown = {
2073 {
"async_work_group_copy", WellKnownTypes::Event},
2074 {
"async_work_group_strided_copy", WellKnownTypes::Event},
2075 {
"__spirv_GroupAsyncCopy", WellKnownTypes::Event}};
2079 bool IsKnown =
false;
2084 std::string DemangledName =
2087 if (DemangledName.length() > 0)
2089 SPIRV::lookupBuiltinNameHelper(DemangledName, &DecorationId);
2090 auto ResIt = ResTypeWellKnown.
find(DemangledName);
2091 if (ResIt != ResTypeWellKnown.
end()) {
2094 switch (ResIt->second) {
2095 case WellKnownTypes::Event:
2102 switch (DecorationId) {
2105 case FPDecorationId::SAT:
2108 case FPDecorationId::RTE:
2110 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTE,
B);
2112 case FPDecorationId::RTZ:
2114 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTZ,
B);
2116 case FPDecorationId::RTP:
2118 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTP,
B);
2120 case FPDecorationId::RTN:
2122 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTN,
B);
2128 Type *Ty =
I->getType();
2131 Type *TypeToAssign = Ty;
2133 if (
II->getIntrinsicID() == Intrinsic::spv_const_composite ||
2134 II->getIntrinsicID() == Intrinsic::spv_undef) {
2135 auto It = AggrConstTypes.find(
II);
2136 if (It == AggrConstTypes.end())
2138 TypeToAssign = It->second;
2144 for (
const auto &
Op :
I->operands()) {
2149 Type *OpTy =
Op->getType();
2151 CallInst *AssignCI =
2156 Type *OpTy =
Op->getType();
2171 CallInst *AssignCI =
2181bool SPIRVEmitIntrinsics::shouldTryToAddMemAliasingDecoration(
2182 Instruction *Inst) {
2184 if (!STI->
canUseExtension(SPIRV::Extension::SPV_INTEL_memory_access_aliasing))
2195 case Intrinsic::spv_load:
2196 case Intrinsic::spv_store:
2203 const std::string
Prefix =
"__spirv_Atomic";
2204 const bool IsAtomic =
Name.find(Prefix) == 0;
2212void SPIRVEmitIntrinsics::insertSpirvDecorations(Instruction *
I,
2214 if (MDNode *MD =
I->getMetadata(
"spirv.Decorations")) {
2216 B.CreateIntrinsic(Intrinsic::spv_assign_decoration, {
I->getType()},
2221 auto processMemAliasingDecoration = [&](
unsigned Kind) {
2222 if (MDNode *AliasListMD =
I->getMetadata(Kind)) {
2223 if (shouldTryToAddMemAliasingDecoration(
I)) {
2224 uint32_t Dec =
Kind == LLVMContext::MD_alias_scope
2225 ? SPIRV::Decoration::AliasScopeINTEL
2226 : SPIRV::Decoration::NoAliasINTEL;
2228 I, ConstantInt::get(
B.getInt32Ty(), Dec),
2231 B.CreateIntrinsic(Intrinsic::spv_assign_aliasing_decoration,
2232 {
I->getType()}, {
Args});
2236 processMemAliasingDecoration(LLVMContext::MD_alias_scope);
2237 processMemAliasingDecoration(LLVMContext::MD_noalias);
2240 if (MDNode *MD =
I->getMetadata(LLVMContext::MD_fpmath)) {
2242 bool AllowFPMaxError =
2244 if (!AllowFPMaxError)
2248 B.CreateIntrinsic(Intrinsic::spv_assign_fpmaxerror_decoration,
2257 &FPFastMathDefaultInfoMap,
2259 auto it = FPFastMathDefaultInfoMap.
find(
F);
2260 if (it != FPFastMathDefaultInfoMap.
end())
2268 SPIRV::FPFastMathMode::None);
2270 SPIRV::FPFastMathMode::None);
2272 SPIRV::FPFastMathMode::None);
2273 return FPFastMathDefaultInfoMap[
F] = std::move(FPFastMathDefaultInfoVec);
2279 size_t BitWidth = Ty->getScalarSizeInBits();
2283 assert(Index >= 0 && Index < 3 &&
2284 "Expected FPFastMathDefaultInfo for half, float, or double");
2285 assert(FPFastMathDefaultInfoVec.
size() == 3 &&
2286 "Expected FPFastMathDefaultInfoVec to have exactly 3 elements");
2287 return FPFastMathDefaultInfoVec[Index];
2290void SPIRVEmitIntrinsics::insertConstantsForFPFastMathDefault(
Module &M) {
2292 if (!
ST->canUseExtension(SPIRV::Extension::SPV_KHR_float_controls2))
2301 auto Node =
M.getNamedMetadata(
"spirv.ExecutionMode");
2303 if (!
M.getNamedMetadata(
"opencl.enable.FP_CONTRACT")) {
2311 ConstantInt::get(Type::getInt32Ty(
M.getContext()), 0);
2314 [[maybe_unused]] GlobalVariable *GV =
2315 new GlobalVariable(M,
2316 Type::getInt32Ty(
M.getContext()),
2330 DenseMap<Function *, SPIRV::FPFastMathDefaultInfoVector>
2331 FPFastMathDefaultInfoMap;
2333 for (
unsigned i = 0; i <
Node->getNumOperands(); i++) {
2342 if (EM == SPIRV::ExecutionMode::FPFastMathDefault) {
2344 "Expected 4 operands for FPFastMathDefault");
2350 SPIRV::FPFastMathDefaultInfoVector &FPFastMathDefaultInfoVec =
2352 SPIRV::FPFastMathDefaultInfo &
Info =
2355 Info.FPFastMathDefault =
true;
2356 }
else if (EM == SPIRV::ExecutionMode::ContractionOff) {
2358 "Expected no operands for ContractionOff");
2362 SPIRV::FPFastMathDefaultInfoVector &FPFastMathDefaultInfoVec =
2364 for (SPIRV::FPFastMathDefaultInfo &
Info : FPFastMathDefaultInfoVec) {
2365 Info.ContractionOff =
true;
2367 }
else if (EM == SPIRV::ExecutionMode::SignedZeroInfNanPreserve) {
2369 "Expected 1 operand for SignedZeroInfNanPreserve");
2370 unsigned TargetWidth =
2375 SPIRV::FPFastMathDefaultInfoVector &FPFastMathDefaultInfoVec =
2379 assert(Index >= 0 && Index < 3 &&
2380 "Expected FPFastMathDefaultInfo for half, float, or double");
2381 assert(FPFastMathDefaultInfoVec.
size() == 3 &&
2382 "Expected FPFastMathDefaultInfoVec to have exactly 3 elements");
2383 FPFastMathDefaultInfoVec[
Index].SignedZeroInfNanPreserve =
true;
2387 std::unordered_map<unsigned, GlobalVariable *> GlobalVars;
2388 for (
auto &[Func, FPFastMathDefaultInfoVec] : FPFastMathDefaultInfoMap) {
2389 if (FPFastMathDefaultInfoVec.
empty())
2392 for (
const SPIRV::FPFastMathDefaultInfo &
Info : FPFastMathDefaultInfoVec) {
2393 assert(
Info.Ty &&
"Expected target type for FPFastMathDefaultInfo");
2396 if (Flags == SPIRV::FPFastMathMode::None && !
Info.ContractionOff &&
2397 !
Info.SignedZeroInfNanPreserve && !
Info.FPFastMathDefault)
2401 if (
Info.ContractionOff && (Flags & SPIRV::FPFastMathMode::AllowContract))
2403 "and AllowContract");
2405 if (
Info.SignedZeroInfNanPreserve &&
2407 (SPIRV::FPFastMathMode::NotNaN | SPIRV::FPFastMathMode::NotInf |
2408 SPIRV::FPFastMathMode::NSZ))) {
2409 if (
Info.FPFastMathDefault)
2411 "SignedZeroInfNanPreserve but at least one of "
2412 "NotNaN/NotInf/NSZ is enabled.");
2415 if ((Flags & SPIRV::FPFastMathMode::AllowTransform) &&
2416 !((Flags & SPIRV::FPFastMathMode::AllowReassoc) &&
2417 (Flags & SPIRV::FPFastMathMode::AllowContract))) {
2419 "AllowTransform requires AllowReassoc and "
2420 "AllowContract to be set.");
2423 auto it = GlobalVars.find(Flags);
2424 GlobalVariable *GV =
nullptr;
2425 if (it != GlobalVars.end()) {
2431 ConstantInt::get(Type::getInt32Ty(
M.getContext()), Flags);
2434 GV =
new GlobalVariable(M,
2435 Type::getInt32Ty(
M.getContext()),
2440 GlobalVars[
Flags] = GV;
2446void SPIRVEmitIntrinsics::processInstrAfterVisit(Instruction *
I,
2449 bool IsConstComposite =
2450 II &&
II->getIntrinsicID() == Intrinsic::spv_const_composite;
2451 if (IsConstComposite && TrackConstants) {
2453 auto t = AggrConsts.find(
I);
2454 assert(t != AggrConsts.end());
2457 {
II->getType(),
II->getType()}, t->second,
I, {},
B);
2459 NewOp->setArgOperand(0,
I);
2462 for (
const auto &
Op :
I->operands()) {
2466 unsigned OpNo =
Op.getOperandNo();
2467 if (
II && ((
II->getIntrinsicID() == Intrinsic::spv_gep && OpNo == 0) ||
2468 (
II->paramHasAttr(OpNo, Attribute::ImmArg))))
2472 IsPhi ?
B.SetInsertPointPastAllocas(
I->getParent()->getParent())
2473 :
B.SetInsertPoint(
I);
2476 Type *OpTy =
Op->getType();
2484 {OpTy, OpTyVal->
getType()},
Op, OpTyVal, {},
B);
2486 if (!IsConstComposite &&
isPointerTy(OpTy) && OpElemTy !=
nullptr &&
2487 OpElemTy != IntegerType::getInt8Ty(
I->getContext())) {
2489 SmallVector<Value *, 2>
Args = {
2492 CallInst *PtrCasted =
2493 B.CreateIntrinsic(Intrinsic::spv_ptrcast, {
Types},
Args);
2498 I->setOperand(OpNo, NewOp);
2500 if (Named.insert(
I).second)
2504Type *SPIRVEmitIntrinsics::deduceFunParamElementType(Function *
F,
2506 std::unordered_set<Function *> FVisited;
2507 return deduceFunParamElementType(
F,
OpIdx, FVisited);
2510Type *SPIRVEmitIntrinsics::deduceFunParamElementType(
2511 Function *
F,
unsigned OpIdx, std::unordered_set<Function *> &FVisited) {
2513 if (!FVisited.insert(
F).second)
2516 std::unordered_set<Value *> Visited;
2519 for (User *U :
F->users()) {
2531 if (
Type *Ty = deduceElementTypeHelper(OpArg, Visited,
false))
2534 for (User *OpU : OpArg->
users()) {
2536 if (!Inst || Inst == CI)
2539 if (
Type *Ty = deduceElementTypeHelper(Inst, Visited,
false))
2546 if (FVisited.find(OuterF) != FVisited.end())
2548 for (
unsigned i = 0; i < OuterF->
arg_size(); ++i) {
2549 if (OuterF->
getArg(i) == OpArg) {
2550 Lookup.push_back(std::make_pair(OuterF, i));
2557 for (
auto &Pair :
Lookup) {
2558 if (
Type *Ty = deduceFunParamElementType(Pair.first, Pair.second, FVisited))
2565void SPIRVEmitIntrinsics::processParamTypesByFunHeader(Function *
F,
2567 B.SetInsertPointPastAllocas(
F);
2581 for (User *U :
F->users()) {
2597 for (User *U : Arg->
users()) {
2601 CI->
getParent()->getParent() == CurrF) {
2603 deduceOperandElementTypeFunctionPointer(CI,
Ops, ElemTy,
false);
2614void SPIRVEmitIntrinsics::processParamTypes(Function *
F,
IRBuilder<> &
B) {
2615 B.SetInsertPointPastAllocas(
F);
2621 if (!ElemTy && (ElemTy = deduceFunParamElementType(
F,
OpIdx)) !=
nullptr) {
2623 DenseSet<std::pair<Value *, Value *>> VisitedSubst;
2625 propagateElemType(Arg, IntegerType::getInt8Ty(
F->getContext()),
2637 bool IsNewFTy =
false;
2653bool SPIRVEmitIntrinsics::processFunctionPointers(
Module &M) {
2656 if (
F.isIntrinsic())
2658 if (
F.isDeclaration()) {
2659 for (User *U :
F.users()) {
2672 for (User *U :
F.users()) {
2674 if (!
II ||
II->arg_size() != 3 ||
II->getOperand(0) != &
F)
2676 if (
II->getIntrinsicID() == Intrinsic::spv_assign_ptr_type ||
2677 II->getIntrinsicID() == Intrinsic::spv_ptrcast) {
2684 if (Worklist.
empty())
2690 "cannot allocate a name for the internal service function");
2691 LLVMContext &Ctx =
M.getContext();
2699 for (Function *
F : Worklist) {
2701 for (
const auto &Arg :
F->args())
2703 IRB.CreateCall(
F, Args);
2705 IRB.CreateRetVoid();
2711void SPIRVEmitIntrinsics::applyDemangledPtrArgTypes(
IRBuilder<> &
B) {
2712 DenseMap<Function *, CallInst *> Ptrcasts;
2713 for (
auto It : FDeclPtrTys) {
2715 for (
auto *U :
F->users()) {
2720 for (
auto [Idx, ElemTy] : It.second) {
2728 B.SetInsertPointPastAllocas(Arg->
getParent());
2733 replaceUsesOfWithSpvPtrcast(Param,
normalizeType(ElemTy), CI,
2742 .getFirstNonPHIOrDbgOrAlloca());
2763SPIRVEmitIntrinsics::simplifyZeroLengthArrayGepInst(GetElementPtrInst *
GEP) {
2770 Type *SrcTy =
GEP->getSourceElementType();
2771 SmallVector<Value *, 8> Indices(
GEP->indices());
2773 if (ArrTy && ArrTy->getNumElements() == 0 &&
2776 Indices.erase(Indices.begin());
2777 SrcTy = ArrTy->getElementType();
2778 Value *NewGEP = Builder.CreateGEP(SrcTy,
GEP->getPointerOperand(), Indices,
2779 "",
GEP->getNoWrapFlags());
2786bool SPIRVEmitIntrinsics::runOnFunction(Function &Func) {
2787 if (
Func.isDeclaration())
2791 GR =
ST.getSPIRVGlobalRegistry();
2795 ST.canUseExtension(SPIRV::Extension::SPV_INTEL_function_pointers);
2800 AggrConstTypes.clear();
2805 SmallPtrSet<Instruction *, 4> DeadInsts;
2808 if (!
Ref || GR->findDeducedElementType(
Ref))
2811 GetElementPtrInst *NewGEP = simplifyZeroLengthArrayGepInst(
Ref);
2813 Ref->replaceAllUsesWith(NewGEP);
2819 if (
Type *GepTy = getGEPType(
Ref))
2823 for (
auto *
I : DeadInsts) {
2824 assert(
I->use_empty() &&
"Dead instruction should not have any uses left");
2825 I->eraseFromParent();
2828 processParamTypesByFunHeader(CurrF,
B);
2837 Type *ElTy =
SI->getValueOperand()->getType();
2839 AggrStores.insert(&
I);
2842 B.SetInsertPoint(&
Func.getEntryBlock(),
Func.getEntryBlock().begin());
2843 for (
auto &GV :
Func.getParent()->globals())
2844 processGlobalValue(GV,
B);
2846 preprocessUndefs(
B);
2847 preprocessCompositeConstants(
B);
2851 applyDemangledPtrArgTypes(
B);
2854 for (
auto &
I : Worklist) {
2856 if (isConvergenceIntrinsic(
I))
2859 bool Postpone = insertAssignPtrTypeIntrs(
I,
B,
false);
2861 insertAssignTypeIntrs(
I,
B);
2862 insertPtrCastOrAssignTypeInstr(
I,
B);
2866 if (Postpone && !GR->findAssignPtrTypeInstr(
I))
2867 insertAssignPtrTypeIntrs(
I,
B,
true);
2870 useRoundingMode(FPI,
B);
2875 SmallPtrSet<Instruction *, 4> IncompleteRets;
2877 deduceOperandElementType(&
I, &IncompleteRets);
2881 for (BasicBlock &BB : Func)
2882 for (PHINode &Phi : BB.
phis())
2884 deduceOperandElementType(&Phi,
nullptr);
2886 for (
auto *
I : Worklist) {
2887 TrackConstants =
true;
2897 if (isConvergenceIntrinsic(
I))
2901 processInstrAfterVisit(
I,
B);
2908bool SPIRVEmitIntrinsics::postprocessTypes(
Module &M) {
2909 if (!GR || TodoTypeSz == 0)
2912 unsigned SzTodo = TodoTypeSz;
2913 DenseMap<Value *, SmallPtrSet<Value *, 4>> ToProcess;
2918 CallInst *AssignCI = GR->findAssignPtrTypeInstr(
Op);
2919 Type *KnownTy = GR->findDeducedElementType(
Op);
2920 if (!KnownTy || !AssignCI)
2926 std::unordered_set<Value *> Visited;
2927 if (
Type *ElemTy = deduceElementTypeHelper(
Op, Visited,
false,
true)) {
2928 if (ElemTy != KnownTy) {
2929 DenseSet<std::pair<Value *, Value *>> VisitedSubst;
2930 propagateElemType(CI, ElemTy, VisitedSubst);
2937 if (
Op->hasUseList()) {
2938 for (User *U :
Op->users()) {
2945 if (TodoTypeSz == 0)
2950 SmallPtrSet<Instruction *, 4> IncompleteRets;
2952 auto It = ToProcess.
find(&
I);
2953 if (It == ToProcess.
end())
2955 It->second.remove_if([
this](
Value *V) {
return !isTodoType(V); });
2956 if (It->second.size() == 0)
2958 deduceOperandElementType(&
I, &IncompleteRets, &It->second,
true);
2959 if (TodoTypeSz == 0)
2964 return SzTodo > TodoTypeSz;
2968void SPIRVEmitIntrinsics::parseFunDeclarations(
Module &M) {
2970 if (!
F.isDeclaration() ||
F.isIntrinsic())
2974 if (DemangledName.empty())
2978 auto [Grp, Opcode, ExtNo] = SPIRV::mapBuiltinToOpcode(
2979 DemangledName,
ST.getPreferredInstructionSet());
2980 if (Opcode != SPIRV::OpGroupAsyncCopy)
2983 SmallVector<unsigned> Idxs;
2992 LLVMContext &Ctx =
F.getContext();
2994 SPIRV::parseBuiltinTypeStr(TypeStrs, DemangledName, Ctx);
2995 if (!TypeStrs.
size())
2998 for (
unsigned Idx : Idxs) {
2999 if (Idx >= TypeStrs.
size())
3002 SPIRV::parseBuiltinCallArgumentType(TypeStrs[Idx].trim(), Ctx))
3005 FDeclPtrTys[&
F].push_back(std::make_pair(Idx, ElemTy));
3010bool SPIRVEmitIntrinsics::runOnModule(
Module &M) {
3013 parseFunDeclarations(M);
3014 insertConstantsForFPFastMathDefault(M);
3024 if (!
F.isDeclaration() && !
F.isIntrinsic()) {
3026 processParamTypes(&
F,
B);
3030 CanTodoType =
false;
3031 Changed |= postprocessTypes(M);
3034 Changed |= processFunctionPointers(M);
3040 return new SPIRVEmitIntrinsics(TM);
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
Expand Atomic instructions
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
Analysis containing CSE Info
static void replaceAllUsesWith(Value *Old, Value *New, SmallPtrSet< BasicBlock *, 32 > &FreshBBs, bool IsHuge)
Replace all old uses with new ones, and push the updated BBs into FreshBBs.
This file defines the DenseSet and SmallDenseSet classes.
static bool runOnFunction(Function &F, bool PostInlining)
iv Induction Variable Users
const AbstractManglingParser< Derived, Alloc >::OperatorInfo AbstractManglingParser< Derived, Alloc >::Ops[]
Machine Check Debug Module
MachineInstr unsigned OpIdx
uint64_t IntrinsicInst * II
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
static unsigned getNumElements(Type *Ty)
static bool isMemInstrToReplace(Instruction *I)
static bool isAggrConstForceInt32(const Value *V)
static SPIRV::FPFastMathDefaultInfoVector & getOrCreateFPFastMathDefaultInfoVec(const Module &M, DenseMap< Function *, SPIRV::FPFastMathDefaultInfoVector > &FPFastMathDefaultInfoMap, Function *F)
static Type * getAtomicElemTy(SPIRVGlobalRegistry *GR, Instruction *I, Value *PointerOperand)
static void reportFatalOnTokenType(const Instruction *I)
static void setInsertPointAfterDef(IRBuilder<> &B, Instruction *I)
static void emitAssignName(Instruction *I, IRBuilder<> &B)
static Type * getPointeeTypeByCallInst(StringRef DemangledName, Function *CalledF, unsigned OpIdx)
static void createRoundingModeDecoration(Instruction *I, unsigned RoundingModeDeco, IRBuilder<> &B)
static void createDecorationIntrinsic(Instruction *I, MDNode *Node, IRBuilder<> &B)
static SPIRV::FPFastMathDefaultInfo & getFPFastMathDefaultInfo(SPIRV::FPFastMathDefaultInfoVector &FPFastMathDefaultInfoVec, const Type *Ty)
static bool IsKernelArgInt8(Function *F, StoreInst *SI)
static void addSaturatedDecorationToIntrinsic(Instruction *I, IRBuilder<> &B)
static void setInsertPointSkippingPhis(IRBuilder<> &B, Instruction *I)
static FunctionType * getFunctionPointerElemType(Function *F, SPIRVGlobalRegistry *GR)
static void createSaturatedConversionDecoration(Instruction *I, IRBuilder<> &B)
static Type * restoreMutatedType(SPIRVGlobalRegistry *GR, Instruction *I, Type *Ty)
static bool requireAssignType(Instruction *I)
void visit(MachineFunction &MF, MachineBasicBlock &Start, std::function< void(MachineBasicBlock *)> op)
static void insertSpirvDecorations(MachineFunction &MF, SPIRVGlobalRegistry *GR, MachineIRBuilder MIB)
#define SPIRV_BACKEND_SERVICE_FUN_NAME
static SymbolRef::Type getType(const Symbol *Sym)
static int Lookup(ArrayRef< TableEntry > Table, unsigned Opcode)
This class represents an incoming formal argument to a Function.
const Function * getParent() const
static unsigned getPointerOperandIndex()
static unsigned getPointerOperandIndex()
iterator_range< const_phi_iterator > phis() const
Returns a range that iterates over the phis in the basic block.
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
LLVM_ABI LLVMContext & getContext() const
Get the context in which this basic block lives.
static LLVM_ABI BlockAddress * get(Function *F, BasicBlock *BB)
Return a BlockAddress for the specified function and basic block.
bool isInlineAsm() const
Check if this call is an inline asm statement.
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation or the function signa...
LLVM_ABI bool isIndirectCall() const
Return true if the callsite is an indirect call.
Value * getCalledOperand() const
Value * getArgOperand(unsigned i) const
FunctionType * getFunctionType() const
iterator_range< User::op_iterator > args()
Iteration adapter for range-for loops.
unsigned arg_size() const
This class represents a function call, abstracting a target machine's calling convention.
uint64_t getZExtValue() const
Return the constant as a 64-bit unsigned integer value after it has been zero extended as appropriate...
static LLVM_ABI Constant * getNullValue(Type *Ty)
Constructor to create a '0' constant of arbitrary type.
LLVM_ABI std::optional< RoundingMode > getRoundingMode() const
iterator find(const_arg_type_t< KeyT > Val)
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&...Args)
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
static LLVM_ABI FunctionType * get(Type *Result, ArrayRef< Type * > Params, bool isVarArg)
This static method is the primary way of constructing a FunctionType.
void addFnAttr(Attribute::AttrKind Kind)
Add function attributes to this function.
static Function * Create(FunctionType *Ty, LinkageTypes Linkage, unsigned AddrSpace, const Twine &N="", Module *M=nullptr)
const DataLayout & getDataLayout() const
Get the data layout of the module this function belongs to.
Intrinsic::ID getIntrinsicID() const LLVM_READONLY
getIntrinsicID - This method returns the ID number of the specified function, or Intrinsic::not_intri...
bool isIntrinsic() const
isIntrinsic - Returns true if the function's name starts with "llvm.".
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
Type * getReturnType() const
Returns the type of the ret val.
Argument * getArg(unsigned i) const
static LLVM_ABI Type * getTypeAtIndex(Type *Ty, Value *Idx)
Return the type of the element at the given index of an indexable type.
static unsigned getPointerOperandIndex()
PointerType * getType() const
Global values are always pointers.
@ PrivateLinkage
Like Internal, but omit from symbol table.
@ InternalLinkage
Rename collisions when linking (static functions).
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
LLVM_ABI void addDestination(BasicBlock *Dest)
Add a destination.
Base class for instruction visitors.
LLVM_ABI InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
Instruction * user_back()
Specialize the methods defined in Value, as we know that an instruction can only be used by other ins...
LLVM_ABI const Function * getFunction() const
Return the function this instruction belongs to.
LLVM_ABI void copyMetadata(const Instruction &SrcInst, ArrayRef< unsigned > WL=ArrayRef< unsigned >())
Copy metadata from SrcInst to this instruction.
This is an important class for using LLVM in a threaded context.
static unsigned getPointerOperandIndex()
const MDOperand & getOperand(unsigned I) const
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
unsigned getNumOperands() const
Return number of MDNode operands.
static LLVM_ABI MDString * get(LLVMContext &Context, StringRef Str)
Flags
Flags values. These may be or'd together.
ModulePass class - This class is used to implement unstructured interprocedural optimizations and ana...
A Module instance is used to store all the information related to an LLVM module.
void addAssignPtrTypeInstr(Value *Val, CallInst *AssignPtrTyCI)
void buildAssignPtr(IRBuilder<> &B, Type *ElemTy, Value *Arg)
Type * findDeducedCompositeType(const Value *Val)
void replaceAllUsesWith(Value *Old, Value *New, bool DeleteOld=true)
void addDeducedElementType(Value *Val, Type *Ty)
void addReturnType(const Function *ArgF, TypedPointerType *DerivedTy)
Type * findMutated(const Value *Val)
void addDeducedCompositeType(Value *Val, Type *Ty)
void buildAssignType(IRBuilder<> &B, Type *Ty, Value *Arg)
Type * findDeducedElementType(const Value *Val)
void updateAssignType(CallInst *AssignCI, Value *Arg, Value *OfType)
CallInst * findAssignPtrTypeInstr(const Value *Val)
const SPIRVTargetLowering * getTargetLowering() const override
bool isLogicalSPIRV() const
bool canUseExtension(SPIRV::Extension::Extension E) const
const SPIRVSubtarget * getSubtargetImpl() const
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
bool contains(ConstPtrType Ptr) const
reference emplace_back(ArgTypes &&... Args)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
An instruction for storing to memory.
static unsigned getPointerOperandIndex()
iterator find(StringRef Key)
StringRef - Represent a constant reference to a string, i.e.
bool starts_with(StringRef Prefix) const
Check if this string starts with the given Prefix.
static LLVM_ABI StructType * create(LLVMContext &Context, StringRef Name)
This creates an identified struct.
static LLVM_ABI TargetExtType * get(LLVMContext &Context, StringRef Name, ArrayRef< Type * > Types={}, ArrayRef< unsigned > Ints={})
Return a target extension type having the specified name and optional type and integer parameters.
Type * getTypeParameter(unsigned i) const
const STC & getSubtarget(const Function &F) const
This method returns a pointer to the specified type of TargetSubtargetInfo.
The instances of the Type class are immutable: once they are created, they are never changed.
bool isVectorTy() const
True if this is an instance of VectorType.
bool isArrayTy() const
True if this is an instance of ArrayType.
static LLVM_ABI IntegerType * getInt32Ty(LLVMContext &C)
bool isPointerTy() const
True if this is an instance of PointerType.
Type * getArrayElementType() const
LLVM_ABI StringRef getTargetExtName() const
static LLVM_ABI IntegerType * getInt8Ty(LLVMContext &C)
bool isStructTy() const
True if this is an instance of StructType.
bool isTargetExtTy() const
Return true if this is a target extension type.
bool isAggregateType() const
Return true if the type is an aggregate type.
static LLVM_ABI Type * getDoubleTy(LLVMContext &C)
static LLVM_ABI Type * getFloatTy(LLVMContext &C)
static LLVM_ABI Type * getHalfTy(LLVMContext &C)
bool isVoidTy() const
Return true if this is 'void'.
static LLVM_ABI bool isValidElementType(Type *ElemTy)
Return true if the specified type is valid as a element type.
static LLVM_ABI TypedPointerType * get(Type *ElementType, unsigned AddressSpace)
This constructs a pointer to an object of the specified type in a numbered address space.
static LLVM_ABI UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
LLVM_ABI bool replaceUsesOfWith(Value *From, Value *To)
Replace uses of one Value with another.
void setOperand(unsigned i, Value *Val)
Value * getOperand(unsigned i) const
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
LLVM_ABI void setName(const Twine &Name)
Change the name of the value.
iterator_range< user_iterator > users()
LLVM_ABI StringRef getName() const
Return a constant reference to the value's name.
const ParentTy * getParent() const
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
constexpr char Args[]
Key for Kernel::Metadata::mArgs.
@ SPIR_KERNEL
Used for SPIR kernel functions.
@ BasicBlock
Various leaf nodes.
bool match(Val *V, const Pattern &P)
is_zero m_Zero()
Match any null constant or a vector with all elements equal to 0.
DenseSetImpl< ValueT, DenseMap< ValueT, DenseSetEmpty, ValueInfoT, DenseSetPair< ValueT > >, ValueInfoT > DenseSet
ElementType
The element type of an SRV or UAV resource.
@ User
could "use" a pointer
NodeAddr< PhiNode * > Phi
NodeAddr< NodeBase * > Node
NodeAddr< FuncNode * > Func
friend class Instruction
Iterator for Instructions in a `BasicBlock.
This is an optimization pass for GlobalISel generic memory operations.
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
bool getVacantFunctionName(Module &M, std::string &Name)
FunctionAddr VTableAddr Value
bool isTypedPointerWrapper(const TargetExtType *ExtTy)
ModulePass * createSPIRVEmitIntrinsicsPass(SPIRVTargetMachine *TM)
unsigned getPointerAddressSpace(const Type *T)
decltype(auto) dyn_cast(const From &Val)
dyn_cast<X> - Return the argument parameter cast to the specified type.
FunctionAddr VTableAddr uintptr_t uintptr_t Int32Ty
CallInst * buildIntrWithMD(Intrinsic::ID IntrID, ArrayRef< Type * > Types, Value *Arg, Value *Arg2, ArrayRef< Constant * > Imms, IRBuilder<> &B)
void append_range(Container &C, Range &&R)
Wrapper function to append range R to container C.
bool isNestedPointer(const Type *Ty)
MetadataAsValue * buildMD(Value *Arg)
std::string getOclOrSpirvBuiltinDemangledName(StringRef Name)
LLVM_ABI bool isInstructionTriviallyDead(Instruction *I, const TargetLibraryInfo *TLI=nullptr)
Return true if the result produced by the instruction is not used, and the instruction will return.
auto reverse(ContainerTy &&C)
Type * getTypedPointerWrapper(Type *ElemTy, unsigned AS)
bool isPointerTy(const Type *T)
LLVM_ABI void report_fatal_error(Error Err, bool gen_crash_diag=true)
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
bool isa(const From &Val)
isa<X> - Return true if the parameter to the template is an instance of one of the template type argu...
SPIRV::Scope::Scope getMemScope(LLVMContext &Ctx, SyncScope::ID Id)
@ Ref
The access may reference the value stored in memory.
IRBuilder(LLVMContext &, FolderTy, InserterTy, MDNode *, ArrayRef< OperandBundleDef >) -> IRBuilder< FolderTy, InserterTy >
DWARFExpression::Operation Op
Type * getPointeeTypeByAttr(Argument *Arg)
bool hasPointeeTypeAttr(Argument *Arg)
constexpr unsigned BitWidth
bool isEquivalentTypes(Type *Ty1, Type *Ty2)
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
iterator_range< pointer_iterator< WrappedIteratorT > > make_pointer_range(RangeT &&Range)
bool hasInitializer(const GlobalVariable *GV)
Type * normalizeType(Type *Ty)
bool isSpvIntrinsic(const MachineInstr &MI, Intrinsic::ID IntrinsicID)
Type * getPointeeType(const Type *Ty)
PoisonValue * getNormalizedPoisonValue(Type *Ty)
bool isUntypedPointerTy(const Type *T)
SPIRV::MemorySemantics::MemorySemantics getMemSemantics(AtomicOrdering Ord)
static size_t computeFPFastMathDefaultInfoVecIndex(size_t BitWidth)