19#define DEBUG_TYPE "frame-info"
21STATISTIC(NumRedZoneFunctions,
"Number of functions using red zone");
32 return AFI->hasStreamingModeChanges() &&
38 unsigned Opc =
MBBI->getOpcode();
39 if (
Opc == AArch64::CNTD_XPiI)
45 if (
Opc == AArch64::BL)
48 return Opc == TargetOpcode::COPY;
53 switch (
I->getOpcode()) {
56 case AArch64::PTRUE_C_B:
57 case AArch64::LD1B_2Z_IMM:
58 case AArch64::ST1B_2Z_IMM:
59 case AArch64::STR_ZXI:
60 case AArch64::STR_PXI:
61 case AArch64::LDR_ZXI:
62 case AArch64::LDR_PXI:
63 case AArch64::PTRUE_B:
64 case AArch64::CPY_ZPzI_B:
65 case AArch64::CMPNE_PPzZI_B:
68 case AArch64::SEH_SavePReg:
69 case AArch64::SEH_SaveZReg:
96 if (
AFL.requiresSaveVG(
MF)) {
97 auto &TLI = *
Subtarget.getTargetLowering();
102 switch (
MBBI->getOpcode()) {
106 NewOpc = AArch64::STPXpre;
109 NewOpc = AArch64::STPDpre;
112 NewOpc = AArch64::STPQpre;
114 case AArch64::STRXui:
115 NewOpc = AArch64::STRXpre;
117 case AArch64::STRDui:
118 NewOpc = AArch64::STRDpre;
120 case AArch64::STRQui:
121 NewOpc = AArch64::STRQpre;
124 NewOpc = AArch64::LDPXpost;
127 NewOpc = AArch64::LDPDpost;
130 NewOpc = AArch64::LDPQpost;
132 case AArch64::LDRXui:
133 NewOpc = AArch64::LDRXpost;
135 case AArch64::LDRDui:
136 NewOpc = AArch64::LDRDpost;
138 case AArch64::LDRQui:
139 NewOpc = AArch64::LDRQpost;
143 int64_t MinOffset, MaxOffset;
145 NewOpc, Scale, Width, MinOffset, MaxOffset);
151 if (
MBBI->getOperand(
MBBI->getNumOperands() - 1).getImm() != 0 ||
152 CSStackSizeInc < MinOffset * (int64_t)Scale.
getFixedValue() ||
153 CSStackSizeInc > MaxOffset * (int64_t)Scale.
getFixedValue()) {
167 return std::prev(
MBBI);
172 auto SEH = std::next(
MBBI);
173 if (AArch64InstrInfo::isSEHInstruction(*SEH))
174 SEH->eraseFromParent();
181 unsigned OpndIdx = 0;
182 for (
unsigned OpndEnd =
MBBI->getNumOperands() - 1; OpndIdx < OpndEnd;
184 MIB.
add(
MBBI->getOperand(OpndIdx));
186 assert(
MBBI->getOperand(OpndIdx).getImm() == 0 &&
187 "Unexpected immediate offset in first/last callee-save save/restore "
189 assert(
MBBI->getOperand(OpndIdx - 1).getReg() == AArch64::SP &&
190 "Unexpected base register in callee-save save/restore instruction!");
191 assert(CSStackSizeInc % Scale == 0);
192 MIB.
addImm(CSStackSizeInc / (
int)Scale);
200 AFL.insertSEH(*MIB, *
TII, FrameFlag);
207 return std::prev(
MBB.erase(
MBBI));
212 unsigned LocalStackSize) {
214 unsigned ImmIdx =
MBBI->getNumOperands() - 1;
215 switch (
MBBI->getOpcode()) {
218 case AArch64::SEH_SaveFPLR:
219 case AArch64::SEH_SaveRegP:
220 case AArch64::SEH_SaveReg:
221 case AArch64::SEH_SaveFRegP:
222 case AArch64::SEH_SaveFReg:
223 case AArch64::SEH_SaveAnyRegQP:
224 case AArch64::SEH_SaveAnyRegQPX:
225 ImmOpnd = &
MBBI->getOperand(ImmIdx);
234 if (AArch64InstrInfo::isSEHInstruction(
MI))
237 unsigned Opc =
MI.getOpcode();
241 case AArch64::STRXui:
243 case AArch64::STRDui:
245 case AArch64::LDRXui:
247 case AArch64::LDRDui:
251 case AArch64::STRQui:
253 case AArch64::LDRQui:
260 unsigned OffsetIdx =
MI.getNumExplicitOperands() - 1;
261 assert(
MI.getOperand(OffsetIdx - 1).getReg() == AArch64::SP &&
262 "Unexpected base register in callee-save save/restore instruction!");
266 assert(LocalStackSize % Scale == 0);
267 OffsetOpnd.
setImm(OffsetOpnd.
getImm() + LocalStackSize / Scale);
272 assert(
MBBI !=
MI.getParent()->end() &&
"Expecting a valid instruction");
273 assert(AArch64InstrInfo::isSEHInstruction(*
MBBI) &&
274 "Expecting a SEH instruction");
281 if (
AFL.homogeneousPrologEpilog(
MF))
284 if (
AFI->getLocalStackSize() == 0)
295 if (
AFL.needsWinCFI(
MF) &&
AFI->getCalleeSavedStackSize() > 0 &&
296 MF.getFunction().hasOptSize())
301 if (StackBumpBytes >= 512 ||
302 AFL.windowsRequiresStackProbe(
MF, StackBumpBytes))
305 if (
MFI.hasVarSizedObjects())
314 if (
AFL.canUseRedZone(
MF))
319 if (
AFL.getSVEStackSize(
MF))
330 EmitAsyncCFI =
AFI->needsAsyncDwarfUnwindInfo(
MF);
335 collectBlockLiveins();
352void AArch64PrologueEmitter::collectBlockLiveins() {
355 PrologueEndI =
MBB.begin();
356 while (PrologueEndI !=
MBB.end() &&
360 if (PrologueEndI !=
MBB.end()) {
376void AArch64PrologueEmitter::verifyPrologueClobbers()
const {
377 if (PrologueEndI ==
MBB.end())
380 for (MachineInstr &
MI :
381 make_range(
MBB.instr_begin(), PrologueEndI->getIterator())) {
382 for (
auto &
Op :
MI.operands())
383 if (
Op.isReg() &&
Op.isDef())
384 assert(!LiveRegs.contains(
Op.getReg()) &&
385 "live register clobbered by inserted prologue instructions");
390void AArch64PrologueEmitter::determineLocalsStackSize(
391 uint64_t StackSize, uint64_t PrologueSaveSize) {
392 AFI->setLocalStackSize(StackSize - PrologueSaveSize);
399 static const int64_t MAX_BYTES_PER_SCALABLE_BYTE = 16;
400 return Size.getScalable() * MAX_BYTES_PER_SCALABLE_BYTE +
Size.getFixed();
403void AArch64PrologueEmitter::allocateStackSpace(
405 StackOffset AllocSize,
bool EmitCFI, StackOffset InitialOffset,
406 bool FollowupAllocs) {
413 const uint64_t AndMask = ~(MaxAlign - 1);
416 Register TargetReg = RealignmentPadding
417 ?
AFL.findScratchNonCalleeSaveRegister(&
MBB)
424 if (RealignmentPadding) {
445 if (AllocSize.getScalable() == 0 && RealignmentPadding == 0) {
447 assert(ScratchReg != AArch64::NoRegister);
450 .
addImm(AllocSize.getFixed())
451 .
addImm(InitialOffset.getFixed())
452 .
addImm(InitialOffset.getScalable());
457 if (FollowupAllocs) {
473 int64_t ProbeSize =
AFI->getStackProbeSize();
474 if (
upperBound(AllocSize) + RealignmentPadding <= ProbeSize) {
475 Register ScratchReg = RealignmentPadding
476 ?
AFL.findScratchNonCalleeSaveRegister(&
MBB)
478 assert(ScratchReg != AArch64::NoRegister);
483 if (RealignmentPadding) {
489 AFI->setStackRealigned(
true);
491 if (FollowupAllocs ||
upperBound(AllocSize) + RealignmentPadding >
507 assert(TargetReg != AArch64::NoRegister);
512 if (RealignmentPadding) {
525 .buildDefCFARegister(AArch64::SP);
527 if (RealignmentPadding)
528 AFI->setStackRealigned(
true);
538 AFI->setHasRedZone(
false);
548 if (
AFI->getArgumentStackToRestore())
551 if (
AFI->shouldSignReturnAddress(
MF)) {
554 if (!
AFL.shouldSignReturnAddressEverywhere(
MF)) {
562 if (
AFI->needsShadowCallStackPrologueEpilogue(
MF)) {
563 emitShadowCallStackPrologue(PrologueBeginI,
DL);
575 if (
HasFP &&
AFI->hasSwiftAsyncContext())
576 emitSwiftAsyncContextFramePointer(PrologueBeginI,
DL);
585 if (std::optional<int> TBPI =
AFI->getTaggedBasePointerIndex())
586 AFI->setTaggedBasePointerOffset(-
MFI.getObjectOffset(*TBPI));
588 AFI->setTaggedBasePointerOffset(
MFI.getStackSize());
598 if (!
AFI->hasStackFrame() && !
AFL.windowsRequiresStackProbe(
MF, NumBytes))
599 return emitEmptyStackFramePrologue(NumBytes, PrologueBeginI,
DL);
601 bool IsWin64 =
Subtarget.isCallingConvWin64(F.getCallingConv(), F.isVarArg());
616 bool FPAfterSVECalleeSaves =
617 Subtarget.isTargetWindows() &&
AFI->getSVECalleeSavedStackSize();
619 if (FPAfterSVECalleeSaves &&
AFI->hasStackHazardSlotIndex())
622 auto PrologueSaveSize =
AFI->getCalleeSavedStackSize() + FixedObject;
624 determineLocalsStackSize(NumBytes, PrologueSaveSize);
627 if (FPAfterSVECalleeSaves) {
636 allocateStackSpace(PrologueBeginI, 0, SaveSize,
false,
StackOffset{},
638 NumBytes -= FixedObject;
645 MBBI,
DL, -
AFI->getCalleeSavedStackSize(), EmitAsyncCFI);
646 NumBytes -=
AFI->getCalleeSavedStackSize();
647 }
else if (CombineSPBump) {
648 assert(!
AFL.getSVEStackSize(
MF) &&
"Cannot combine SP bump with SVE");
656 NumBytes -= PrologueSaveSize;
657 }
else if (PrologueSaveSize != 0) {
659 PrologueBeginI,
DL, -PrologueSaveSize, EmitAsyncCFI);
660 NumBytes -= PrologueSaveSize;
662 assert(NumBytes >= 0 &&
"Negative stack allocation size!?");
667 auto &TLI = *
Subtarget.getTargetLowering();
670 while (AfterGPRSavesI != EndI &&
677 AFI->getLocalStackSize());
684 emitFramePointerSetup(AfterGPRSavesI,
DL, FixedObject);
690 emitCalleeSavedGPRLocations(AfterGPRSavesI);
693 const bool NeedsRealignment =
695 const int64_t RealignmentPadding =
696 (NeedsRealignment &&
MFI.getMaxAlign() >
Align(16))
697 ?
MFI.getMaxAlign().value() - 16
700 if (
AFL.windowsRequiresStackProbe(
MF, NumBytes + RealignmentPadding))
701 emitWindowsStackProbe(AfterGPRSavesI,
DL, NumBytes, RealignmentPadding);
704 StackOffset SVECalleeSavesSize = {}, SVELocalsSize = SVEStackSize;
713 if (int64_t CalleeSavedSize =
AFI->getSVECalleeSavedStackSize()) {
714 LLVM_DEBUG(
dbgs() <<
"SVECalleeSavedStackSize = " << CalleeSavedSize
717 SVELocalsSize = SVEStackSize - SVECalleeSavesSize;
721 if (!FPAfterSVECalleeSaves) {
725 AfterSVESavesI !=
MBB.getFirstTerminator())
727 CalleeSavesEnd = AfterSVESavesI;
731 allocateStackSpace(CalleeSavesBegin, 0, SVECalleeSavesSize,
732 EmitAsyncCFI && !
HasFP, CFAOffset,
733 MFI.hasVarSizedObjects() || LocalsSize);
736 CFAOffset += SVECalleeSavesSize;
739 emitCalleeSavedSVELocations(CalleeSavesEnd);
743 assert(!(
AFL.canUseRedZone(
MF) && NeedsRealignment) &&
744 "Cannot use redzone with stack realignment");
745 if (!
AFL.canUseRedZone(
MF)) {
749 allocateStackSpace(CalleeSavesEnd, RealignmentPadding,
751 EmitAsyncCFI && !
HasFP, CFAOffset,
752 MFI.hasVarSizedObjects());
790 MBB.addLiveIn(AArch64::X1);
794 if (
EmitCFI && !EmitAsyncCFI) {
796 emitDefineCFAWithFP(AfterSVESavesI, FixedObject);
805 emitCalleeSavedGPRLocations(AfterSVESavesI);
806 emitCalleeSavedSVELocations(AfterSVESavesI);
810void AArch64PrologueEmitter::emitShadowCallStackPrologue(
821 MBB.addLiveIn(AArch64::X18);
830 static const char CFIInst[] = {
831 dwarf::DW_CFA_val_expression,
834 static_cast<char>(
unsigned(dwarf::DW_OP_breg18)),
835 static_cast<char>(-8) & 0x7f,
838 .buildEscape(StringRef(CFIInst,
sizeof(CFIInst)));
842void AArch64PrologueEmitter::emitSwiftAsyncContextFramePointer(
844 switch (
MF.getTarget().Options.SwiftAsyncFramePointer) {
846 if (
Subtarget.swiftAsyncContextIsDynamicallySet()) {
888void AArch64PrologueEmitter::emitEmptyStackFramePrologue(
891 assert(!
HasFP &&
"unexpected function without stack frame but with FP");
893 "unexpected function without stack frame but with SVE objects");
895 AFI->setLocalStackSize(NumBytes);
905 if (
AFL.canUseRedZone(
MF)) {
906 AFI->setHasRedZone(
true);
907 ++NumRedZoneFunctions;
914 MCSymbol *FrameLabel =
MF.getContext().createTempSymbol();
917 .buildDefCFAOffset(NumBytes, FrameLabel);
928void AArch64PrologueEmitter::emitFramePointerSetup(
930 unsigned FixedObject) {
931 int64_t FPOffset =
AFI->getCalleeSaveBaseToFrameRecordOffset();
933 FPOffset +=
AFI->getLocalStackSize();
935 if (
AFI->hasSwiftAsyncContext()) {
939 const auto &
Attrs =
MF.getFunction().getAttributes();
940 bool HaveInitialContext =
Attrs.hasAttrSomewhere(Attribute::SwiftAsync);
941 if (HaveInitialContext)
942 MBB.addLiveIn(AArch64::X22);
943 Register Reg = HaveInitialContext ? AArch64::X22 : AArch64::XZR;
981 emitDefineCFAWithFP(
MBBI, FixedObject);
985void AArch64PrologueEmitter::emitDefineCFAWithFP(
987 const int OffsetToFirstCalleeSaveFromFP =
988 AFI->getCalleeSaveBaseToFrameRecordOffset() -
989 AFI->getCalleeSavedStackSize();
992 .buildDefCFA(
FramePtr, FixedObject - OffsetToFirstCalleeSaveFromFP);
995void AArch64PrologueEmitter::emitWindowsStackProbe(
997 int64_t RealignmentPadding)
const {
998 if (
AFI->getSVECalleeSavedStackSize())
1003 unsigned X15Scratch = AArch64::NoRegister;
1005 [
this](
const MachineBasicBlock::RegisterMaskPair &LiveIn) {
1006 return RegInfo.isSuperOrSubRegisterEq(AArch64::X15,
1009 X15Scratch =
AFL.findScratchNonCalleeSaveRegister(&
MBB,
true);
1010 assert(X15Scratch != AArch64::NoRegister &&
1011 (X15Scratch < AArch64::X15 || X15Scratch > AArch64::X17));
1013 LiveRegs.removeReg(AArch64::X15);
1022 uint64_t NumWords = (NumBytes + RealignmentPadding) >> 4;
1030 if (NumBytes >= (1 << 28))
1032 "unwinding purposes");
1034 uint32_t LowNumWords = NumWords & 0xFFFF;
1041 if ((NumWords & 0xFFFF0000) != 0) {
1044 .
addImm((NumWords & 0xFFFF0000) >> 16)
1056 const char *ChkStk =
Subtarget.getChkStkName();
1057 switch (
MF.getTarget().getCodeModel()) {
1121 if (RealignmentPadding > 0) {
1122 if (RealignmentPadding >= 4096) {
1125 .
addImm(RealignmentPadding)
1135 .
addImm(RealignmentPadding)
1140 uint64_t AndMask = ~(
MFI.getMaxAlign().value() - 1);
1144 AFI->setStackRealigned(
true);
1150 if (X15Scratch != AArch64::NoRegister) {
1159void AArch64PrologueEmitter::emitCalleeSavedGPRLocations(
1161 const std::vector<CalleeSavedInfo> &CSI =
MFI.getCalleeSavedInfo();
1166 for (
const auto &
Info : CSI) {
1167 unsigned FrameIdx =
Info.getFrameIdx();
1171 assert(!
Info.isSpilledToReg() &&
"Spilling to registers not implemented");
1172 int64_t
Offset =
MFI.getObjectOffset(FrameIdx) -
AFL.getOffsetOfLocalArea();
1173 CFIBuilder.buildOffset(
Info.getReg(),
Offset);
1177void AArch64PrologueEmitter::emitCalleeSavedSVELocations(
1180 const std::vector<CalleeSavedInfo> &CSI =
MFI.getCalleeSavedInfo();
1186 std::optional<int64_t> IncomingVGOffsetFromDefCFA;
1187 if (
AFL.requiresSaveVG(
MF)) {
1189 reverse(CSI), [](
auto &
Info) {
return Info.getReg() == AArch64::VG; });
1190 IncomingVGOffsetFromDefCFA =
MFI.getObjectOffset(IncomingVG.getFrameIdx()) -
1191 AFL.getOffsetOfLocalArea();
1194 for (
const auto &
Info : CSI) {
1200 assert(!
Info.isSpilledToReg() &&
"Spilling to registers not implemented");
1201 MCRegister
Reg =
Info.getReg();
1209 CFIBuilder.insertCFIInst(
1215 switch (
MI.getOpcode()) {
1218 case AArch64::CATCHRET:
1219 case AArch64::CLEANUPRET:
1230 SEHEpilogueStartI =
MBB.end();
1235 if (
MBB.end() != EpilogueEndI) {
1236 DL = EpilogueEndI->getDebugLoc();
1250 int64_t ArgumentStackToRestore =
AFL.getArgumentStackToRestore(
MF,
MBB);
1251 bool IsWin64 =
Subtarget.isCallingConvWin64(
MF.getFunction().getCallingConv(),
1252 MF.getFunction().isVarArg());
1255 int64_t AfterCSRPopSize = ArgumentStackToRestore;
1256 auto PrologueSaveSize =
AFI->getCalleeSavedStackSize() + FixedObject;
1261 if (
MF.hasEHFunclets())
1262 AFI->setLocalStackSize(NumBytes - PrologueSaveSize);
1266 auto FirstHomogenousEpilogI =
MBB.getFirstTerminator();
1267 if (FirstHomogenousEpilogI !=
MBB.begin()) {
1268 auto HomogeneousEpilog = std::prev(FirstHomogenousEpilogI);
1269 if (HomogeneousEpilog->getOpcode() == AArch64::HOM_Epilog)
1270 FirstHomogenousEpilogI = HomogeneousEpilog;
1280 assert(AfterCSRPopSize == 0);
1284 bool FPAfterSVECalleeSaves =
1285 Subtarget.isTargetWindows() &&
AFI->getSVECalleeSavedStackSize();
1287 bool CombineSPBump = shouldCombineCSRLocalStackBump(NumBytes);
1289 bool CombineAfterCSRBump =
false;
1290 if (FPAfterSVECalleeSaves) {
1291 AfterCSRPopSize += FixedObject;
1292 }
else if (!CombineSPBump && PrologueSaveSize != 0) {
1294 while (Pop->getOpcode() == TargetOpcode::CFI_INSTRUCTION ||
1295 AArch64InstrInfo::isSEHInstruction(*Pop))
1296 Pop = std::prev(Pop);
1303 if (
OffsetOp.getImm() == 0 && AfterCSRPopSize >= 0) {
1312 AfterCSRPopSize += PrologueSaveSize;
1313 CombineAfterCSRBump =
true;
1322 while (FirstGPRRestoreI != Begin) {
1328 }
else if (CombineSPBump)
1330 AFI->getLocalStackSize());
1340 BuildMI(
MBB, FirstGPRRestoreI, DL,
TII->get(AArch64::SEH_EpilogStart))
1342 SEHEpilogueStartI = FirstGPRRestoreI;
1343 --SEHEpilogueStartI;
1346 if (
HasFP &&
AFI->hasSwiftAsyncContext())
1347 emitSwiftAsyncContextFramePointer(EpilogueEndI, DL);
1352 if (CombineSPBump) {
1353 assert(!SVEStackSize &&
"Cannot combine SP bump with SVE");
1367 NumBytes -= PrologueSaveSize;
1368 assert(NumBytes >= 0 &&
"Negative stack allocation size!?");
1372 StackOffset DeallocateBefore = {}, DeallocateAfter = SVEStackSize;
1374 RestoreEnd = FirstGPRRestoreI;
1375 if (int64_t CalleeSavedSize =
AFI->getSVECalleeSavedStackSize()) {
1376 if (FPAfterSVECalleeSaves)
1377 RestoreEnd =
MBB.getFirstTerminator();
1379 RestoreBegin = std::prev(RestoreEnd);
1380 while (RestoreBegin !=
MBB.begin() &&
1389 DeallocateBefore = SVEStackSize - CalleeSavedSizeAsOffset;
1390 DeallocateAfter = CalleeSavedSizeAsOffset;
1394 if (FPAfterSVECalleeSaves) {
1398 if (!
AFI->isStackRealigned() && !
MFI.hasVarSizedObjects()) {
1418 }
else if (SVEStackSize) {
1419 int64_t SVECalleeSavedSize =
AFI->getSVECalleeSavedStackSize();
1424 (
AFI->isStackRealigned() ||
MFI.hasVarSizedObjects()) ? AArch64::FP
1426 if (SVECalleeSavedSize && BaseForSVEDealloc == AArch64::FP) {
1427 Register CalleeSaveBase = AArch64::FP;
1428 if (int64_t CalleeSaveBaseOffset =
1429 AFI->getCalleeSaveBaseToFrameRecordOffset()) {
1434 MF.getRegInfo().createVirtualRegister(&AArch64::GPR64RegClass);
1444 }
else if (BaseForSVEDealloc == AArch64::SP) {
1445 if (SVECalleeSavedSize) {
1449 MBB, RestoreBegin, DL, AArch64::SP, AArch64::SP,
1469 emitCalleeSavedSVERestores(RestoreEnd);
1473 bool RedZone =
AFL.canUseRedZone(
MF);
1476 if (RedZone && AfterCSRPopSize == 0)
1483 bool NoCalleeSaveRestore = PrologueSaveSize == 0;
1484 int64_t StackRestoreBytes = RedZone ? 0 : NumBytes;
1485 if (NoCalleeSaveRestore)
1486 StackRestoreBytes += AfterCSRPopSize;
1489 MBB, FirstGPRRestoreI, DL, AArch64::SP, AArch64::SP,
1496 if (NoCalleeSaveRestore || AfterCSRPopSize == 0)
1506 if (!
IsFunclet && (
MFI.hasVarSizedObjects() ||
AFI->isStackRealigned())) {
1508 MBB, FirstGPRRestoreI, DL, AArch64::SP, AArch64::FP,
1511 }
else if (NumBytes)
1524 if (AfterCSRPopSize) {
1525 assert(AfterCSRPopSize > 0 &&
"attempting to reallocate arg stack that an "
1526 "interrupt may have clobbered");
1529 MBB,
MBB.getFirstTerminator(), DL, AArch64::SP, AArch64::SP,
1536bool AArch64EpilogueEmitter::shouldCombineCSRLocalStackBump(
1548 while (LastI != Begin) {
1550 if (LastI->isTransient())
1555 switch (LastI->getOpcode()) {
1556 case AArch64::STGloop:
1557 case AArch64::STZGloop:
1559 case AArch64::STZGi:
1560 case AArch64::ST2Gi:
1561 case AArch64::STZ2Gi:
1569void AArch64EpilogueEmitter::emitSwiftAsyncContextFramePointer(
1571 switch (
MF.getTarget().Options.SwiftAsyncFramePointer) {
1599void AArch64EpilogueEmitter::emitShadowCallStackEpilogue(
1613 if (
AFI->needsAsyncDwarfUnwindInfo(
MF))
1615 .buildRestore(AArch64::X18);
1618void AArch64EpilogueEmitter::emitCalleeSavedRestores(
1620 const std::vector<CalleeSavedInfo> &CSI =
MFI.getCalleeSavedInfo();
1626 for (
const auto &
Info : CSI) {
1631 MCRegister
Reg =
Info.getReg();
1635 CFIBuilder.buildRestore(
Info.getReg());
1639void AArch64EpilogueEmitter::finalizeEpilogue()
const {
1640 if (
AFI->needsShadowCallStackPrologueEpilogue(
MF)) {
1641 emitShadowCallStackEpilogue(
MBB.getFirstTerminator(), DL);
1645 emitCalleeSavedGPRRestores(
MBB.getFirstTerminator());
1646 if (
AFI->shouldSignReturnAddress(
MF)) {
1649 if (!
AFL.shouldSignReturnAddressEverywhere(
MF)) {
1651 TII->get(AArch64::PAUTH_EPILOGUE))
1658 BuildMI(
MBB,
MBB.getFirstTerminator(), DL,
TII->get(AArch64::SEH_EpilogEnd))
1660 if (!
MF.hasWinCFI())
1661 MF.setHasWinCFI(
true);
1666 MBB.erase(SEHEpilogueStartI);
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
This file contains the declaration of the AArch64PrologueEmitter and AArch64EpilogueEmitter classes,...
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
MachineBasicBlock MachineBasicBlock::iterator MBBI
Analysis containing CSE Info
This file contains constants used for implementing Dwarf debug support.
const HexagonInstrInfo * TII
std::pair< Instruction::BinaryOps, Value * > OffsetOp
Find all possible pairs (BinOp, RHS) that BinOp V, RHS can be simplified.
Register const TargetRegisterInfo * TRI
Promote Memory to Register
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
static Function * getFunction(FunctionType *Ty, const Twine &Name, Module *M)
static const unsigned FramePtr
void emitEpilogue()
Emit the epilogue.
AArch64EpilogueEmitter(MachineFunction &MF, MachineBasicBlock &MBB, const AArch64FrameLowering &AFL)
AArch64FunctionInfo - This class is derived from MachineFunctionInfo and contains private AArch64-spe...
void setStackRealigned(bool s)
void emitPrologue()
Emit the prologue.
AArch64PrologueEmitter(MachineFunction &MF, MachineBasicBlock &MBB, const AArch64FrameLowering &AFL)
const MachineFrameInfo & MFI
AArch64FunctionInfo * AFI
MachineBasicBlock::iterator convertCalleeSaveRestoreToSPPrePostIncDec(MachineBasicBlock::iterator MBBI, const DebugLoc &DL, int CSStackSizeInc, bool EmitCFI, MachineInstr::MIFlag FrameFlag=MachineInstr::FrameSetup, int CFAOffset=0) const
bool isVGInstruction(MachineBasicBlock::iterator MBBI, const TargetLowering &TLI) const
AArch64PrologueEpilogueCommon(MachineFunction &MF, MachineBasicBlock &MBB, const AArch64FrameLowering &AFL)
const AArch64RegisterInfo & RegInfo
const AArch64FrameLowering & AFL
void fixupCalleeSaveRestoreStackOffset(MachineInstr &MI, uint64_t LocalStackSize) const
bool shouldCombineCSRLocalStackBump(uint64_t StackBumpBytes) const
const AArch64Subtarget & Subtarget
bool requiresGetVGCall() const
const TargetInstrInfo * TII
const AArch64TargetLowering * getTargetLowering() const override
bool hasInlineStackProbe(const MachineFunction &MF) const override
True if stack clash protection is enabled for this functions.
Helper class for creating CFI instructions and inserting them into MIR.
void buildDefCFAOffset(int64_t Offset, MCSymbol *Label=nullptr) const
void insertCFIInst(const MCCFIInstruction &CFIInst) const
void buildDefCFA(MCRegister Reg, int64_t Offset) const
A set of physical registers with utility functions to track liveness when walking backward/forward th...
MachineInstrBundleIterator< MachineInstr > iterator
Align getMaxAlign() const
Return the alignment in bytes that this function must be aligned to, which is greater than the defaul...
const MachineInstrBuilder & setMemRefs(ArrayRef< MachineMemOperand * > MMOs) const
const MachineInstrBuilder & addExternalSymbol(const char *FnName, unsigned TargetFlags=0) const
const MachineInstrBuilder & setMIFlag(MachineInstr::MIFlag Flag) const
const MachineInstrBuilder & addImm(int64_t Val) const
Add a new immediate operand.
const MachineInstrBuilder & add(const MachineOperand &MO) const
const MachineInstrBuilder & addReg(Register RegNo, unsigned flags=0, unsigned SubReg=0) const
Add a new virtual register operand.
const MachineInstrBuilder & addUse(Register RegNo, unsigned Flags=0, unsigned SubReg=0) const
Add a virtual register use operand.
const MachineInstrBuilder & setMIFlags(unsigned Flags) const
const MachineInstrBuilder & addDef(Register RegNo, unsigned Flags=0, unsigned SubReg=0) const
Add a virtual register definition operand.
Representation of each machine instruction.
MachineOperand class - Representation of each machine instruction operand.
void setImm(int64_t immVal)
bool isSymbol() const
isSymbol - Tests if this is a MO_ExternalSymbol operand.
static MachineOperand CreateImm(int64_t Val)
const char * getSymbolName() const
Wrapper class representing virtual and physical registers.
StackOffset holds a fixed and a scalable offset in bytes.
int64_t getFixed() const
Returns the fixed component of the stack.
int64_t getScalable() const
Returns the scalable component of the stack.
static StackOffset getScalable(int64_t Scalable)
static StackOffset getFixed(int64_t Fixed)
StringRef - Represent a constant reference to a string, i.e.
const char * getLibcallName(RTLIB::Libcall Call) const
Get the libcall routine name for the specified libcall.
This class defines information used to lower LLVM code to legal SelectionDAG operators that the targe...
TargetRegisterInfo base class - We assume that the target defines a static array of TargetRegisterDes...
static constexpr TypeSize getFixed(ScalarTy ExactSize)
constexpr ScalarTy getFixedValue() const
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
@ MO_GOT
MO_GOT - This flag indicates that a symbol operand represents the address of the GOT entry for the sy...
static unsigned getArithExtendImm(AArch64_AM::ShiftExtendType ET, unsigned Imm)
getArithExtendImm - Encode the extend type and shift amount for an arithmetic instruction: imm: 3-bit...
static uint64_t encodeLogicalImmediate(uint64_t imm, unsigned regSize)
encodeLogicalImmediate - Return the encoded immediate value for a logical immediate instruction of th...
static unsigned getShifterImm(AArch64_AM::ShiftExtendType ST, unsigned Imm)
getShifterImm - Encode the shift type and amount: imm: 6-bit shift amount shifter: 000 ==> lsl 001 ==...
const unsigned StackProbeMaxUnprobedStack
Maximum allowed number of unprobed bytes above SP at an ABI boundary.
constexpr char Attrs[]
Key for Kernel::Metadata::mAttrs.
@ GHC
Used by the Glasgow Haskell Compiler (GHC).
@ Implicit
Not emitted register (e.g. carry, or temporary result).
@ Define
Register definition.
@ Kill
The last use of a register.
@ Undef
Value of the register doesn't matter.
This is an optimization pass for GlobalISel generic memory operations.
MCCFIInstruction createDefCFA(const TargetRegisterInfo &TRI, unsigned FrameReg, unsigned Reg, const StackOffset &Offset, bool LastAdjustmentWasScalable=true)
MachineInstrBuilder BuildMI(MachineFunction &MF, const MIMetadata &MIMD, const MCInstrDesc &MCID)
Builder interface. Specify how to create the initial instruction itself.
static bool isSVECalleeSave(MachineBasicBlock::iterator I)
iterator_range< T > make_range(T x, T y)
Convenience function for iterating over sub-ranges.
unsigned getBLRCallOpcode(const MachineFunction &MF)
Return opcode to be used for indirect calls.
static bool matchLibcall(const TargetLowering &TLI, const MachineOperand &MO, RTLIB::Libcall LC)
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
static bool isFuncletReturnInstr(const MachineInstr &MI)
auto reverse(ContainerTy &&C)
static void getLivePhysRegsUpTo(MachineInstr &MI, const TargetRegisterInfo &TRI, LivePhysRegs &LiveRegs)
Collect live registers from the end of MI's parent up to (including) MI in LiveRegs.
@ Always
Always set the bit.
@ Never
Never set the bit.
@ DeploymentBased
Determine whether to set the bit statically or dynamically based on the deployment target.
LLVM_ABI raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
void emitFrameOffset(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, const DebugLoc &DL, unsigned DestReg, unsigned SrcReg, StackOffset Offset, const TargetInstrInfo *TII, MachineInstr::MIFlag=MachineInstr::NoFlags, bool SetNZCV=false, bool NeedsWinCFI=false, bool *HasWinCFI=nullptr, bool EmitCFAOffset=false, StackOffset InitialOffset={}, unsigned FrameReg=AArch64::SP)
emitFrameOffset - Emit instructions as needed to set DestReg to SrcReg plus Offset.
LLVM_ABI void report_fatal_error(Error Err, bool gen_crash_diag=true)
LLVM_ABI EHPersonality classifyEHPersonality(const Value *Pers)
See if the given exception handling personality function is one that we understand.
@ Success
The lock was released successfully.
static void fixupSEHOpcode(MachineBasicBlock::iterator MBBI, unsigned LocalStackSize)
MCCFIInstruction createCFAOffset(const TargetRegisterInfo &MRI, unsigned Reg, const StackOffset &OffsetFromDefCFA, std::optional< int64_t > IncomingVGOffsetFromDefCFA)
DWARFExpression::Operation Op
bool isAsynchronousEHPersonality(EHPersonality Pers)
Returns true if this personality function catches asynchronous exceptions.
auto find_if(R &&Range, UnaryPredicate P)
Provide wrappers to std::find_if which take ranges instead of having to pass begin/end explicitly.
static int64_t upperBound(StackOffset Size)
LLVM_ABI void reportFatalUsageError(Error Err)
Report a fatal error that does not indicate a bug in LLVM.
This struct is a compact representation of a valid (non-zero power of two) alignment.
constexpr uint64_t value() const
This is a hole in the type system and should not be abused.