39#include "llvm/Config/llvm-config.h"
59#define DEBUG_TYPE "arm-cp-islands"
61#define ARM_CP_ISLANDS_OPT_NAME \
62 "ARM constant island placement and branch shortening pass"
64STATISTIC(NumSplit,
"Number of uncond branches inserted");
65STATISTIC(NumCBrFixed,
"Number of cond branches fixed");
66STATISTIC(NumUBrFixed,
"Number of uncond branches fixed");
67STATISTIC(NumTBs,
"Number of table branches generated");
68STATISTIC(NumT2CPShrunk,
"Number of Thumb2 constantpool instructions shrunk");
69STATISTIC(NumT2BrShrunk,
"Number of Thumb2 immediate branches shrunk");
71STATISTIC(NumJTMoved,
"Number of jump table destination blocks moved");
72STATISTIC(NumJTInserted,
"Number of jump table intermediate blocks inserted");
73STATISTIC(NumLEInserted,
"Number of LE backwards branches inserted");
77 cl::desc(
"Adjust basic block layout to better use TB[BH]"));
81 cl::desc(
"The max number of iteration for converge"));
85 cl::desc(
"Use compressed jump tables in Thumb-1 by synthesizing an "
86 "equivalent to the TBB/TBH instructions"));
102 std::unique_ptr<ARMBasicBlockUtils> BBUtils =
nullptr;
107 std::vector<MachineBasicBlock*> WaterList;
113 using water_iterator = std::vector<MachineBasicBlock *>::iterator;
134 bool KnownAlignment =
false;
137 bool neg,
bool soimm)
138 :
MI(mi), CPEMI(cpemi), MaxDisp(maxdisp), NegOk(
neg), IsSoImm(soimm) {
145 unsigned getMaxDisp()
const {
146 return (KnownAlignment ? MaxDisp : MaxDisp - 2) - 2;
152 std::vector<CPUser> CPUsers;
162 CPEntry(MachineInstr *cpemi,
unsigned cpi,
unsigned rc = 0)
163 : CPEMI(cpemi), CPI(cpi), RefCount(
rc) {}
175 std::vector<std::vector<CPEntry>> CPEntries;
179 DenseMap<int, int> JumpTableEntryIndices;
183 DenseMap<int, int> JumpTableUserIndices;
191 unsigned MaxDisp : 31;
196 ImmBranch(MachineInstr *mi,
unsigned maxdisp,
bool cond,
unsigned ubr)
197 : MI(mi), MaxDisp(maxdisp), isCond(cond), UncondBr(ubr) {}
201 std::vector<ImmBranch> ImmBranches;
204 SmallVector<MachineInstr*, 4> PushPopMIs;
207 SmallVector<MachineInstr*, 4> T2JumpTables;
210 MachineConstantPool *MCP;
211 const ARMBaseInstrInfo *TII;
212 const ARMSubtarget *STI;
213 ARMFunctionInfo *AFI;
214 MachineDominatorTree *DT =
nullptr;
218 bool isPositionIndependentOrROPI;
223 ARMConstantIslands() : MachineFunctionPass(ID) {}
225 bool runOnMachineFunction(MachineFunction &MF)
override;
227 void getAnalysisUsage(AnalysisUsage &AU)
const override {
232 MachineFunctionProperties getRequiredProperties()
const override {
233 return MachineFunctionProperties().setNoVRegs();
236 StringRef getPassName()
const override {
241 void doInitialConstPlacement(std::vector<MachineInstr *> &CPEMIs);
242 void doInitialJumpTablePlacement(std::vector<MachineInstr *> &CPEMIs);
244 CPEntry *findConstPoolEntry(
unsigned CPI,
const MachineInstr *CPEMI);
245 Align getCPEAlign(
const MachineInstr *CPEMI);
246 void scanFunctionJumpTables();
247 void initializeFunctionInfo(
const std::vector<MachineInstr*> &CPEMIs);
248 MachineBasicBlock *splitBlockBeforeInstr(MachineInstr *
MI);
249 void updateForInsertedWaterBlock(MachineBasicBlock *NewBB);
250 bool decrementCPEReferenceCount(
unsigned CPI, MachineInstr* CPEMI);
251 unsigned getCombinedIndex(
const MachineInstr *CPEMI);
252 int findInRangeCPEntry(CPUser& U,
unsigned UserOffset);
253 bool findAvailableWater(CPUser&U,
unsigned UserOffset,
254 water_iterator &WaterIter,
bool CloserWater);
255 void createNewWater(
unsigned CPUserIndex,
unsigned UserOffset,
256 MachineBasicBlock *&NewMBB);
257 bool handleConstantPoolUser(
unsigned CPUserIndex,
bool CloserWater);
258 void removeDeadCPEMI(MachineInstr *CPEMI);
259 bool removeUnusedCPEntries();
260 bool isCPEntryInRange(MachineInstr *
MI,
unsigned UserOffset,
261 MachineInstr *CPEMI,
unsigned Disp,
bool NegOk,
262 bool DoDump =
false);
263 bool isWaterInRange(
unsigned UserOffset, MachineBasicBlock *Water,
264 CPUser &U,
unsigned &Growth);
265 bool fixupImmediateBr(ImmBranch &Br);
266 bool fixupConditionalBr(ImmBranch &Br);
267 bool fixupUnconditionalBr(ImmBranch &Br);
268 bool optimizeThumb2Instructions();
269 bool optimizeThumb2Branches();
270 bool reorderThumb2JumpTables();
271 bool preserveBaseRegister(MachineInstr *JumpMI, MachineInstr *LEAMI,
272 unsigned &DeadSize,
bool &CanDeleteLEA,
274 bool optimizeThumb2JumpTables();
275 MachineBasicBlock *adjustJTTargetBlockForward(
unsigned JTI,
276 MachineBasicBlock *BB,
277 MachineBasicBlock *JTBB);
279 unsigned getUserOffset(CPUser&)
const;
283 bool isOffsetInRange(
unsigned UserOffset,
unsigned TrialOffset,
284 unsigned Disp,
bool NegativeOK,
bool IsSoImm =
false);
285 bool isOffsetInRange(
unsigned UserOffset,
unsigned TrialOffset,
287 return isOffsetInRange(UserOffset, TrialOffset,
288 U.getMaxDisp(),
U.NegOk,
U.IsSoImm);
294char ARMConstantIslands::ID = 0;
297void ARMConstantIslands::verify() {
301 const MachineBasicBlock &
RHS) {
302 return BBInfo[
LHS.getNumber()].postOffset() <
303 BBInfo[
RHS.getNumber()].postOffset();
305 LLVM_DEBUG(
dbgs() <<
"Verifying " << CPUsers.size() <<
" CP users.\n");
306 for (CPUser &U : CPUsers) {
307 unsigned UserOffset = getUserOffset(U);
310 if (isCPEntryInRange(
U.MI, UserOffset,
U.CPEMI,
U.getMaxDisp()+2,
U.NegOk,
323#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
328 for (
unsigned J = 0,
E = BBInfo.
size(); J !=
E; ++J) {
329 const BasicBlockInfo &BBI = BBInfo[J];
348 const Align Alignment = TLI->getPrefLoopAlignment();
353 bool PrevCanFallthough =
true;
354 for (
auto &
MBB : *MF) {
355 if (!PrevCanFallthough) {
357 MBB.setAlignment(Alignment);
360 PrevCanFallthough =
MBB.canFallThrough();
366 if (
MI.getOpcode() == ARM::t2B &&
367 MI.getOperand(0).getMBB() ==
MBB.getNextNode())
370 MI.getOpcode() == ARM::t2LoopEndDec) {
371 PrevCanFallthough =
true;
383bool ARMConstantIslands::runOnMachineFunction(MachineFunction &mf) {
386 BBUtils = std::make_unique<ARMBasicBlockUtils>(mf);
389 << MCP->
getConstants().size() <<
" CP entries, aligned to "
394 isPositionIndependentOrROPI =
396 AFI = MF->
getInfo<ARMFunctionInfo>();
397 DT = &getAnalysis<MachineDominatorTreeWrapperPass>().getDomTree();
406 if (STI->hardenSlsRetBr())
412 DT->updateBlockNumbers();
416 bool MadeChange =
false;
418 scanFunctionJumpTables();
419 MadeChange |= reorderThumb2JumpTables();
421 T2JumpTables.
clear();
424 DT->updateBlockNumbers();
432 std::vector<MachineInstr*> CPEMIs;
434 doInitialConstPlacement(CPEMIs);
437 doInitialJumpTablePlacement(CPEMIs);
445 initializeFunctionInfo(CPEMIs);
451 if (!T2JumpTables.
empty())
455 MadeChange |= removeUnusedCPEntries();
459 unsigned NoCPIters = 0, NoBRIters = 0;
461 LLVM_DEBUG(
dbgs() <<
"Beginning CP iteration #" << NoCPIters <<
'\n');
462 bool CPChange =
false;
463 for (
unsigned i = 0, e = CPUsers.size(); i != e; ++i)
467 CPChange |= handleConstantPoolUser(i, NoCPIters >=
CPMaxIteration / 2);
474 NewWaterList.
clear();
476 LLVM_DEBUG(
dbgs() <<
"Beginning BR iteration #" << NoBRIters <<
'\n');
477 bool BRChange =
false;
478 for (
unsigned i = 0, e = ImmBranches.size(); i != e; ++i) {
480 BRChange |= fixupImmediateBr(ImmBranches[i]);
482 if (BRChange && ++NoBRIters > 30)
486 if (!CPChange && !BRChange)
492 if (isThumb2 && !STI->prefers32BitThumb())
493 MadeChange |= optimizeThumb2Instructions();
496 if (
isThumb && STI->hasV8MBaselineOps())
497 MadeChange |= optimizeThumb2Branches();
500 if (GenerateTBB && !STI->genExecuteOnly())
501 MadeChange |= optimizeThumb2JumpTables();
507 for (
unsigned i = 0, e = CPEntries.size(); i != e; ++i) {
508 for (
unsigned j = 0, je = CPEntries[i].
size();
j != je; ++
j) {
509 const CPEntry & CPE = CPEntries[i][
j];
510 if (CPE.CPEMI && CPE.CPEMI->getOperand(1).isCPI())
521 JumpTableEntryIndices.
clear();
522 JumpTableUserIndices.
clear();
525 T2JumpTables.
clear();
533ARMConstantIslands::doInitialConstPlacement(std::vector<MachineInstr*> &CPEMIs) {
540 const unsigned MaxLogAlign =
Log2(MaxAlign);
548 Align FuncAlign = MaxAlign;
550 FuncAlign =
Align(4);
562 const std::vector<MachineConstantPoolEntry> &CPs = MCP->
getConstants();
565 for (
unsigned i = 0, e = CPs.size(); i != e; ++i) {
566 unsigned Size = CPs[i].getSizeInBytes(TD);
567 Align Alignment = CPs[i].getAlign();
573 unsigned LogAlign =
Log2(Alignment);
575 MachineInstr *CPEMI =
578 CPEMIs.push_back(CPEMI);
582 for (
unsigned a = LogAlign + 1; a <= MaxLogAlign; ++a)
583 if (InsPoint[a] == InsAt)
587 CPEntries.emplace_back(1, CPEntry(CPEMI, i));
589 LLVM_DEBUG(
dbgs() <<
"Moved CPI#" << i <<
" to end of function, size = "
590 <<
Size <<
", align = " << Alignment.
value() <<
'\n');
600void ARMConstantIslands::doInitialJumpTablePlacement(
601 std::vector<MachineInstr *> &CPEMIs) {
602 unsigned i = CPEntries.size();
604 const std::vector<MachineJumpTableEntry> &
JT = MJTI->getJumpTables();
610 MachineBasicBlock *LastCorrectlyNumberedBB =
nullptr;
611 for (MachineBasicBlock &
MBB : *MF) {
623 switch (
MI->getOpcode()) {
629 case ARM::BR_JTm_i12:
634 assert(!MF->getInfo<ARMFunctionInfo>()->branchTargetEnforcement() &&
635 "Branch protection must not be enabled for Arm or Thumb1 modes");
636 JTOpcode = ARM::JUMPTABLE_ADDRS;
639 JTOpcode = ARM::JUMPTABLE_INSTS;
643 JTOpcode = ARM::JUMPTABLE_TBB;
647 JTOpcode = ARM::JUMPTABLE_TBH;
651 unsigned NumOps =
MI->getDesc().getNumOperands();
652 MachineOperand JTOp =
653 MI->getOperand(
NumOps - (
MI->isPredicable() ? 2 : 1));
655 unsigned Size =
JT[JTI].MBBs.size() *
sizeof(uint32_t);
656 MachineBasicBlock *JumpTableBB = MF->CreateMachineBasicBlock();
658 MachineInstr *CPEMI =
BuildMI(*JumpTableBB, JumpTableBB->
begin(),
663 CPEMIs.push_back(CPEMI);
664 CPEntries.emplace_back(1, CPEntry(CPEMI, JTI));
665 JumpTableEntryIndices.
insert(std::make_pair(JTI, CPEntries.size() - 1));
666 if (!LastCorrectlyNumberedBB)
667 LastCorrectlyNumberedBB = &
MBB;
671 if (LastCorrectlyNumberedBB) {
672 MF->RenumberBlocks(LastCorrectlyNumberedBB);
673 DT->updateBlockNumbers();
679bool ARMConstantIslands::BBHasFallthrough(MachineBasicBlock *
MBB) {
686 MachineBasicBlock *NextBB = &*std::next(
MBBI);
692 MachineBasicBlock *
TBB, *FBB;
695 return TooDifficult || FBB ==
nullptr;
700ARMConstantIslands::CPEntry *
701ARMConstantIslands::findConstPoolEntry(
unsigned CPI,
702 const MachineInstr *CPEMI) {
703 std::vector<CPEntry> &CPEs = CPEntries[CPI];
706 for (CPEntry &CPE : CPEs)
707 if (CPE.CPEMI == CPEMI)
714Align ARMConstantIslands::getCPEAlign(
const MachineInstr *CPEMI) {
716 case ARM::CONSTPOOL_ENTRY:
718 case ARM::JUMPTABLE_TBB:
720 case ARM::JUMPTABLE_TBH:
722 case ARM::JUMPTABLE_INSTS:
724 case ARM::JUMPTABLE_ADDRS:
730 unsigned CPI = getCombinedIndex(CPEMI);
731 assert(CPI < MCP->getConstants().
size() &&
"Invalid constant pool index.");
738void ARMConstantIslands::scanFunctionJumpTables() {
739 for (MachineBasicBlock &
MBB : *MF) {
740 for (MachineInstr &
I :
MBB)
742 (
I.getOpcode() == ARM::t2BR_JT ||
I.getOpcode() == ARM::tBR_JTr))
750void ARMConstantIslands::
751initializeFunctionInfo(
const std::vector<MachineInstr*> &CPEMIs) {
753 BBUtils->computeAllBlockSizes();
757 BBInfo.
front().KnownBits =
Log2(MF->getAlignment());
760 BBUtils->adjustBBOffsetsAfter(&MF->front());
763 MachineJumpTableInfo *MJTI = MF->getJumpTableInfo();
764 bool InlineJumpTables =
768 for (MachineBasicBlock &
MBB : *MF) {
772 WaterList.push_back(&
MBB);
774 for (MachineInstr &
I :
MBB) {
775 if (
I.isDebugInstr())
778 unsigned Opc =
I.getOpcode();
789 if (InlineJumpTables)
823 unsigned MaxOffs = ((1 << (
Bits-1))-1) * Scale;
824 ImmBranches.push_back(ImmBranch(&
I, MaxOffs, isCond, UOpc));
827 if (
Opc == ARM::tPUSH ||
Opc == ARM::tPOP_RET)
830 if (
Opc == ARM::CONSTPOOL_ENTRY ||
Opc == ARM::JUMPTABLE_ADDRS ||
831 Opc == ARM::JUMPTABLE_INSTS ||
Opc == ARM::JUMPTABLE_TBB ||
832 Opc == ARM::JUMPTABLE_TBH)
836 for (
unsigned op = 0, e =
I.getNumOperands();
op != e; ++
op)
837 if (
I.getOperand(
op).isCPI() ||
838 (
I.getOperand(
op).isJTI() && InlineJumpTables)) {
846 bool IsSoImm =
false;
854 case ARM::LEApcrelJT: {
862 unsigned CPI =
I.getOperand(
op).getIndex();
863 assert(CPI < CPEMIs.size());
864 MachineInstr *CPEMI = CPEMIs[CPI];
865 const Align CPEAlign = getCPEAlign(CPEMI);
866 const unsigned LogCPEAlign =
Log2(CPEAlign);
867 if (LogCPEAlign >= 2)
875 case ARM::t2LEApcrel:
876 case ARM::t2LEApcrelJT:
881 case ARM::tLEApcrelJT:
891 case ARM::t2LDRSHpci:
893 case ARM::t2LDRSBpci:
917 unsigned CPI =
I.getOperand(
op).getIndex();
918 if (
I.getOperand(
op).isJTI()) {
919 JumpTableUserIndices.
insert(std::make_pair(CPI, CPUsers.size()));
920 CPI = JumpTableEntryIndices[CPI];
923 MachineInstr *CPEMI = CPEMIs[CPI];
924 unsigned MaxOffs = ((1 <<
Bits)-1) * Scale;
925 CPUsers.push_back(CPUser(&
I, CPEMI, MaxOffs, NegOk, IsSoImm));
928 CPEntry *CPE = findConstPoolEntry(CPI, CPEMI);
929 assert(CPE &&
"Cannot find a corresponding CPEntry!");
944 return LHS->getNumber() <
RHS->getNumber();
950void ARMConstantIslands::updateForInsertedWaterBlock(MachineBasicBlock *NewBB) {
953 DT->updateBlockNumbers();
957 BBUtils->insert(NewBB->
getNumber(), BasicBlockInfo());
962 WaterList.insert(IP, NewBB);
968MachineBasicBlock *ARMConstantIslands::splitBlockBeforeInstr(MachineInstr *
MI) {
969 MachineBasicBlock *OrigBB =
MI->getParent();
972 LivePhysRegs LRs(*MF->getSubtarget().getRegisterInfo());
973 LRs.addLiveOuts(*OrigBB);
976 LRs.stepBackward(LiveMI);
979 MachineBasicBlock *NewBB =
982 MF->insert(
MBBI, NewBB);
991 unsigned Opc =
isThumb ? (isThumb2 ? ARM::t2B : ARM::tB) :
ARM::
B;
1007 MachineRegisterInfo &
MRI = MF->getRegInfo();
1009 if (!
MRI.isReserved(L))
1015 MF->RenumberBlocks(NewBB);
1016 DT->updateBlockNumbers();
1020 BBUtils->insert(NewBB->
getNumber(), BasicBlockInfo());
1027 MachineBasicBlock* WaterBB = *IP;
1028 if (WaterBB == OrigBB)
1029 WaterList.insert(std::next(IP), NewBB);
1031 WaterList.insert(IP, OrigBB);
1032 NewWaterList.
insert(OrigBB);
1039 BBUtils->computeBlockSize(OrigBB);
1043 BBUtils->computeBlockSize(NewBB);
1046 BBUtils->adjustBBOffsetsAfter(OrigBB);
1054unsigned ARMConstantIslands::getUserOffset(CPUser &U)
const {
1055 unsigned UserOffset = BBUtils->getOffsetOf(
U.MI);
1057 SmallVectorImpl<BasicBlockInfo> &BBInfo = BBUtils->getBBInfo();
1058 const BasicBlockInfo &BBI = BBInfo[
U.MI->getParent()->getNumber()];
1062 UserOffset += (
isThumb ? 4 : 8);
1066 U.KnownAlignment = (KnownBits >= 2);
1083bool ARMConstantIslands::isOffsetInRange(
unsigned UserOffset,
1084 unsigned TrialOffset,
unsigned MaxDisp,
1085 bool NegativeOK,
bool IsSoImm) {
1086 if (UserOffset <= TrialOffset) {
1088 if (TrialOffset - UserOffset <= MaxDisp)
1091 }
else if (NegativeOK) {
1092 if (UserOffset - TrialOffset <= MaxDisp)
1103bool ARMConstantIslands::isWaterInRange(
unsigned UserOffset,
1104 MachineBasicBlock* Water, CPUser &U,
1107 const Align CPEAlign = getCPEAlign(
U.CPEMI);
1108 const unsigned CPEOffset = BBInfo[Water->
getNumber()].postOffset(CPEAlign);
1109 unsigned NextBlockOffset;
1110 Align NextBlockAlignment;
1112 if (++NextBlock == MF->end()) {
1113 NextBlockOffset = BBInfo[Water->
getNumber()].postOffset();
1115 NextBlockOffset = BBInfo[NextBlock->getNumber()].Offset;
1116 NextBlockAlignment = NextBlock->getAlignment();
1118 unsigned Size =
U.CPEMI->getOperand(2).getImm();
1119 unsigned CPEEnd = CPEOffset +
Size;
1124 if (CPEEnd > NextBlockOffset) {
1125 Growth = CPEEnd - NextBlockOffset;
1133 if (CPEOffset < UserOffset)
1139 return isOffsetInRange(UserOffset, CPEOffset, U);
1144bool ARMConstantIslands::isCPEntryInRange(MachineInstr *
MI,
unsigned UserOffset,
1145 MachineInstr *CPEMI,
unsigned MaxDisp,
1146 bool NegOk,
bool DoDump) {
1147 unsigned CPEOffset = BBUtils->getOffsetOf(CPEMI);
1152 unsigned Block =
MI->getParent()->getNumber();
1153 const BasicBlockInfo &BBI = BBInfo[
Block];
1155 <<
" max delta=" << MaxDisp
1156 <<
format(
" insn address=%#x", UserOffset) <<
" in "
1159 <<
format(
"CPE address=%#x offset=%+d: ", CPEOffset,
1160 int(CPEOffset - UserOffset));
1164 return isOffsetInRange(UserOffset, CPEOffset, MaxDisp, NegOk);
1171 if (
MBB->pred_size() != 1 ||
MBB->succ_size() != 1)
1188bool ARMConstantIslands::decrementCPEReferenceCount(
unsigned CPI,
1189 MachineInstr *CPEMI) {
1191 CPEntry *CPE = findConstPoolEntry(CPI, CPEMI);
1192 assert(CPE &&
"Unexpected!");
1193 if (--CPE->RefCount == 0) {
1194 removeDeadCPEMI(CPEMI);
1195 CPE->CPEMI =
nullptr;
1202unsigned ARMConstantIslands::getCombinedIndex(
const MachineInstr *CPEMI) {
1215int ARMConstantIslands::findInRangeCPEntry(CPUser& U,
unsigned UserOffset) {
1216 MachineInstr *UserMI =
U.MI;
1217 MachineInstr *CPEMI =
U.CPEMI;
1220 if (isCPEntryInRange(UserMI, UserOffset, CPEMI,
U.getMaxDisp(),
U.NegOk,
1227 unsigned CPI = getCombinedIndex(CPEMI);
1228 std::vector<CPEntry> &CPEs = CPEntries[CPI];
1229 for (CPEntry &CPE : CPEs) {
1231 if (CPE.CPEMI == CPEMI)
1234 if (CPE.CPEMI ==
nullptr)
1236 if (isCPEntryInRange(UserMI, UserOffset, CPE.CPEMI,
U.getMaxDisp(),
1238 LLVM_DEBUG(
dbgs() <<
"Replacing CPE#" << CPI <<
" with CPE#" << CPE.CPI
1241 U.CPEMI = CPE.CPEMI;
1243 for (MachineOperand &MO : UserMI->
operands())
1245 MO.setIndex(CPE.CPI);
1252 return decrementCPEReferenceCount(CPI, CPEMI) ? 2 : 1;
1263 return ((1<<10)-1)*2;
1265 return ((1<<23)-1)*2;
1270 return ((1<<23)-1)*4;
1281bool ARMConstantIslands::findAvailableWater(CPUser &U,
unsigned UserOffset,
1282 water_iterator &WaterIter,
1284 if (WaterList.empty())
1287 unsigned BestGrowth = ~0
u;
1298 MachineBasicBlock *UserBB =
U.MI->getParent();
1300 const Align CPEAlign = getCPEAlign(
U.CPEMI);
1301 unsigned MinNoSplitDisp = BBInfo[UserBB->
getNumber()].postOffset(CPEAlign);
1302 if (CloserWater && MinNoSplitDisp >
U.getMaxDisp() / 2)
1304 for (water_iterator IP = std::prev(WaterList.end()),
B = WaterList.begin();;
1306 MachineBasicBlock* WaterBB = *IP;
1318 if (isWaterInRange(UserOffset, WaterBB, U, Growth) &&
1319 (WaterBB->
getNumber() <
U.HighWaterMark->getNumber() ||
1320 NewWaterList.
count(WaterBB) || WaterBB ==
U.MI->getParent()) &&
1321 Growth < BestGrowth) {
1323 BestGrowth = Growth;
1326 <<
" Growth=" << Growth <<
'\n');
1328 if (CloserWater && WaterBB ==
U.MI->getParent())
1332 if (!CloserWater && BestGrowth == 0)
1338 return BestGrowth != ~0
u;
1348void ARMConstantIslands::createNewWater(
unsigned CPUserIndex,
1349 unsigned UserOffset,
1350 MachineBasicBlock *&NewMBB) {
1351 CPUser &
U = CPUsers[CPUserIndex];
1352 MachineInstr *UserMI =
U.MI;
1353 MachineInstr *CPEMI =
U.CPEMI;
1354 const Align CPEAlign = getCPEAlign(CPEMI);
1355 MachineBasicBlock *UserMBB = UserMI->
getParent();
1357 const BasicBlockInfo &UserBBI = BBInfo[UserMBB->
getNumber()];
1365 unsigned Delta = isThumb1 ? 2 : 4;
1367 unsigned CPEOffset = UserBBI.
postOffset(CPEAlign) + Delta;
1369 if (isOffsetInRange(UserOffset, CPEOffset, U)) {
1371 <<
format(
", expected CPE offset %#x\n", CPEOffset));
1378 int UncondBr =
isThumb ? ((isThumb2) ? ARM::t2B : ARM::tB) :
ARM::
B;
1386 ImmBranches.push_back(ImmBranch(&UserMBB->
back(),
1387 MaxDisp,
false, UncondBr));
1388 BBUtils->computeBlockSize(UserMBB);
1389 BBUtils->adjustBBOffsetsAfter(UserMBB);
1410 assert(Align >= CPEAlign &&
"Over-aligned constant pool entry");
1413 unsigned BaseInsertOffset = UserOffset +
U.getMaxDisp() - UPad;
1420 BaseInsertOffset -= 4;
1423 <<
" la=" <<
Log2(Align) <<
" kb=" << KnownBits
1424 <<
" up=" << UPad <<
'\n');
1430 if (BaseInsertOffset + 8 >= UserBBI.
postOffset()) {
1436 UserOffset +
TII->getInstSizeInBytes(*UserMI) + 1);
1451 for (
unsigned Offset = UserOffset +
TII->getInstSizeInBytes(*UserMI);
1452 I->getOpcode() != ARM::t2IT &&
1454 Offset +=
TII->getInstSizeInBytes(*
I),
I = std::next(
I)) {
1456 std::max(BaseInsertOffset,
Offset +
TII->getInstSizeInBytes(*
I) + 1);
1457 assert(
I != UserMBB->
end() &&
"Fell off end of block");
1461 unsigned EndInsertOffset = BaseInsertOffset + 4 + UPad +
1465 unsigned CPUIndex = CPUserIndex+1;
1466 unsigned NumCPUsers = CPUsers.size();
1467 MachineInstr *LastIT =
nullptr;
1468 for (
unsigned Offset = UserOffset +
TII->getInstSizeInBytes(*UserMI);
1469 Offset < BaseInsertOffset;
1471 assert(
MI != UserMBB->
end() &&
"Fell off end of block");
1472 if (CPUIndex < NumCPUsers && CPUsers[CPUIndex].
MI == &*
MI) {
1473 CPUser &
U = CPUsers[CPUIndex];
1474 if (!isOffsetInRange(
Offset, EndInsertOffset, U)) {
1476 BaseInsertOffset -=
Align.value();
1477 EndInsertOffset -=
Align.value();
1483 EndInsertOffset +=
U.CPEMI->getOperand(2).getImm();
1488 if (
MI->getOpcode() == ARM::t2IT)
1513 assert(
MI->getOpcode() == ARM::t2MOVi16 &&
1523 NewMBB = splitBlockBeforeInstr(&*
MI);
1530bool ARMConstantIslands::handleConstantPoolUser(
unsigned CPUserIndex,
1532 CPUser &
U = CPUsers[CPUserIndex];
1533 MachineInstr *UserMI =
U.MI;
1534 MachineInstr *CPEMI =
U.CPEMI;
1535 unsigned CPI = getCombinedIndex(CPEMI);
1538 unsigned UserOffset = getUserOffset(U);
1542 int result = findInRangeCPEntry(U, UserOffset);
1543 if (result==1)
return false;
1544 else if (result==2)
return true;
1551 MachineBasicBlock *NewIsland = MF->CreateMachineBasicBlock();
1552 MachineBasicBlock *NewMBB;
1554 if (findAvailableWater(U, UserOffset, IP, CloserWater)) {
1556 MachineBasicBlock *WaterBB = *IP;
1561 if (NewWaterList.
erase(WaterBB))
1562 NewWaterList.
insert(NewIsland);
1569 createNewWater(CPUserIndex, UserOffset, NewMBB);
1576 MachineBasicBlock *WaterBB = &*--NewMBB->
getIterator();
1577 IP =
find(WaterList, WaterBB);
1578 if (IP != WaterList.end())
1579 NewWaterList.
erase(WaterBB);
1582 NewWaterList.
insert(NewIsland);
1595 if (IP != WaterList.end())
1596 WaterList.erase(IP);
1602 updateForInsertedWaterBlock(NewIsland);
1606 U.HighWaterMark = NewIsland;
1611 CPEntries[CPI].push_back(CPEntry(
U.CPEMI,
ID, 1));
1615 decrementCPEReferenceCount(CPI, CPEMI);
1621 BBUtils->adjustBBSize(NewIsland,
Size);
1622 BBUtils->adjustBBOffsetsAfter(&*--NewIsland->
getIterator());
1625 for (MachineOperand &MO : UserMI->
operands())
1632 dbgs() <<
" Moved CPE to #" <<
ID <<
" CPI=" << CPI
1633 <<
format(
" offset=%#x\n",
1634 BBUtils->getBBInfo()[NewIsland->
getNumber()].Offset));
1641void ARMConstantIslands::removeDeadCPEMI(MachineInstr *CPEMI) {
1642 MachineBasicBlock *CPEBB = CPEMI->
getParent();
1646 BBUtils->adjustBBSize(CPEBB, -
Size);
1648 if (CPEBB->
empty()) {
1658 BBUtils->adjustBBOffsetsAfter(CPEBB);
1668bool ARMConstantIslands::removeUnusedCPEntries() {
1669 unsigned MadeChange =
false;
1670 for (std::vector<CPEntry> &CPEs : CPEntries) {
1671 for (CPEntry &CPE : CPEs) {
1672 if (CPE.RefCount == 0 && CPE.CPEMI) {
1673 removeDeadCPEMI(CPE.CPEMI);
1674 CPE.CPEMI =
nullptr;
1685bool ARMConstantIslands::fixupImmediateBr(ImmBranch &Br) {
1686 MachineInstr *
MI = Br.MI;
1687 MachineBasicBlock *DestBB =
MI->getOperand(0).getMBB();
1690 if (BBUtils->isBBInRange(
MI, DestBB, Br.MaxDisp))
1694 return fixupUnconditionalBr(Br);
1695 return fixupConditionalBr(Br);
1703ARMConstantIslands::fixupUnconditionalBr(ImmBranch &Br) {
1704 MachineInstr *
MI = Br.MI;
1705 MachineBasicBlock *
MBB =
MI->getParent();
1713 Br.MaxDisp = (1 << 21) * 2;
1714 MI->setDesc(
TII->get(ARM::tBfar));
1717 BBUtils->adjustBBOffsetsAfter(
MBB);
1729ARMConstantIslands::fixupConditionalBr(ImmBranch &Br) {
1730 MachineInstr *
MI = Br.MI;
1731 MachineBasicBlock *DestBB =
MI->getOperand(0).getMBB();
1747 MachineBasicBlock *
MBB =
MI->getParent();
1748 MachineInstr *BMI = &
MBB->
back();
1763 if (BBUtils->isBBInRange(
MI, NewDest, Br.MaxDisp)) {
1765 dbgs() <<
" Invert Bcc condition and swap its destination with "
1768 MI->getOperand(0).setMBB(NewDest);
1769 MI->getOperand(1).setImm(CC);
1776 splitBlockBeforeInstr(
MI);
1779 int delta =
TII->getInstSizeInBytes(
MBB->
back());
1780 BBUtils->adjustBBSize(
MBB, -delta);
1793 <<
" also invert condition and change dest. to "
1801 BBUtils->adjustBBSize(
MBB,
TII->getInstSizeInBytes(
MBB->
back()));
1808 BBUtils->adjustBBSize(
MBB,
TII->getInstSizeInBytes(
MBB->
back()));
1810 ImmBranches.push_back(ImmBranch(&
MBB->
back(), MaxDisp,
false, Br.UncondBr));
1813 BBUtils->adjustBBSize(
MI->getParent(), -
TII->getInstSizeInBytes(*
MI));
1814 MI->eraseFromParent();
1815 BBUtils->adjustBBOffsetsAfter(
MBB);
1819bool ARMConstantIslands::optimizeThumb2Instructions() {
1820 bool MadeChange =
false;
1823 for (CPUser &U : CPUsers) {
1824 unsigned Opcode =
U.MI->getOpcode();
1825 unsigned NewOpc = 0;
1830 case ARM::t2LEApcrel:
1832 NewOpc = ARM::tLEApcrel;
1839 NewOpc = ARM::tLDRpci;
1849 unsigned UserOffset = getUserOffset(U);
1850 unsigned MaxOffs = ((1 <<
Bits) - 1) * Scale;
1853 if (!
U.KnownAlignment)
1857 if (isCPEntryInRange(
U.MI, UserOffset,
U.CPEMI, MaxOffs,
false,
true)) {
1859 U.MI->setDesc(
TII->get(NewOpc));
1860 MachineBasicBlock *
MBB =
U.MI->getParent();
1861 BBUtils->adjustBBSize(
MBB, -2);
1862 BBUtils->adjustBBOffsetsAfter(
MBB);
1872bool ARMConstantIslands::optimizeThumb2Branches() {
1874 auto TryShrinkBranch = [
this](ImmBranch &Br) {
1875 unsigned Opcode = Br.MI->getOpcode();
1876 unsigned NewOpc = 0;
1893 unsigned MaxOffs = ((1 << (
Bits-1))-1) * Scale;
1894 MachineBasicBlock *DestBB = Br.MI->getOperand(0).getMBB();
1895 if (BBUtils->isBBInRange(Br.MI, DestBB, MaxOffs)) {
1897 Br.MI->setDesc(
TII->get(NewOpc));
1898 MachineBasicBlock *
MBB = Br.MI->getParent();
1899 BBUtils->adjustBBSize(
MBB, -2);
1900 BBUtils->adjustBBOffsetsAfter(
MBB);
1909 MachineInstr*
MI =
nullptr;
1910 unsigned NewOpc = 0;
1913 auto FindCmpForCBZ = [
this](ImmBranch &Br, ImmCompare &ImmCmp,
1914 MachineBasicBlock *DestBB) {
1915 ImmCmp.MI =
nullptr;
1920 if (!Br.MI->killsRegister(ARM::CPSR,
nullptr))
1924 unsigned NewOpc = 0;
1929 NewOpc = ARM::tCBNZ;
1935 unsigned BrOffset = BBUtils->getOffsetOf(Br.MI) + 4 - 2;
1937 unsigned DestOffset = BBInfo[DestBB->
getNumber()].Offset;
1938 if (BrOffset >= DestOffset || (DestOffset - BrOffset) > 126)
1944 if (!CmpMI || CmpMI->
getOpcode() != ARM::tCMPi8)
1948 ImmCmp.NewOpc = NewOpc;
1952 auto TryConvertToLE = [
this](ImmBranch &Br, ImmCompare &
Cmp) {
1953 if (Br.MI->getOpcode() != ARM::t2Bcc || !STI->hasLOB() ||
1957 MachineBasicBlock *
MBB = Br.MI->getParent();
1958 MachineBasicBlock *DestBB = Br.MI->getOperand(0).getMBB();
1959 if (BBUtils->getOffsetOf(
MBB) < BBUtils->getOffsetOf(DestBB) ||
1960 !BBUtils->isBBInRange(Br.MI, DestBB, 4094))
1963 if (!DT->dominates(DestBB,
MBB))
1968 Cmp.NewOpc =
Cmp.NewOpc == ARM::tCBZ ? ARM::tCBNZ : ARM::tCBZ;
1970 MachineInstrBuilder MIB =
BuildMI(*
MBB, Br.MI, Br.MI->getDebugLoc(),
1971 TII->get(ARM::t2LE));
1973 MIB.
add(Br.MI->getOperand(0));
1974 Br.MI->eraseFromParent();
1980 bool MadeChange =
false;
1987 for (ImmBranch &Br :
reverse(ImmBranches)) {
1988 MachineBasicBlock *DestBB = Br.MI->getOperand(0).getMBB();
1989 MachineBasicBlock *
MBB = Br.MI->getParent();
1990 MachineBasicBlock *ExitBB = &
MBB->
back() == Br.MI ?
1995 if (FindCmpForCBZ(Br, Cmp, ExitBB) && TryConvertToLE(Br, Cmp)) {
1999 FindCmpForCBZ(Br, Cmp, DestBB);
2000 MadeChange |= TryShrinkBranch(Br);
2003 unsigned Opcode = Br.MI->getOpcode();
2004 if ((Opcode != ARM::tBcc && Opcode != ARM::t2LE) || !
Cmp.NewOpc)
2013 bool RegKilled =
false;
2016 if (KillMI->killsRegister(
Reg,
TRI)) {
2017 KillMI->clearRegisterKills(
Reg,
TRI);
2021 }
while (KillMI !=
Cmp.MI);
2025 MachineInstr *NewBR =
2029 .
addMBB(DestBB, Br.MI->getOperand(0).getTargetFlags());
2031 Cmp.MI->eraseFromParent();
2033 if (Br.MI->getOpcode() == ARM::tBcc) {
2034 Br.MI->eraseFromParent();
2036 BBUtils->adjustBBSize(
MBB, -2);
2043 MachineInstr *LastMI = &
MBB->
back();
2047 BBUtils->adjustBBOffsetsAfter(
MBB);
2057 if (
I.getOpcode() != ARM::t2ADDrs)
2060 if (
I.getOperand(0).getReg() != EntryReg)
2063 if (
I.getOperand(1).getReg() != BaseReg)
2076bool ARMConstantIslands::preserveBaseRegister(MachineInstr *JumpMI,
2077 MachineInstr *LEAMI,
2080 bool &BaseRegKill) {
2103 CanDeleteLEA =
true;
2104 BaseRegKill =
false;
2105 MachineInstr *RemovableAdd =
nullptr;
2107 for (++
I; &*
I != JumpMI; ++
I) {
2113 for (
const MachineOperand &MO :
I->operands()) {
2114 if (!MO.isReg() || !MO.getReg())
2116 if (MO.isDef() && MO.getReg() == BaseReg)
2118 if (MO.isUse() && MO.getReg() == BaseReg) {
2119 BaseRegKill = BaseRegKill || MO.isKill();
2120 CanDeleteLEA =
false;
2130 for (++
I; &*
I != JumpMI; ++
I) {
2131 for (
const MachineOperand &MO :
I->operands()) {
2132 if (!MO.isReg() || !MO.getReg())
2134 if (MO.isDef() && MO.getReg() == BaseReg)
2136 if (MO.isUse() && MO.getReg() == EntryReg)
2137 RemovableAdd =
nullptr;
2143 DeadSize += isThumb2 ? 4 : 2;
2144 }
else if (BaseReg == EntryReg) {
2165 return MBB != MF->
end() && !
MBB->empty() && &*
MBB->begin() == CPEMI;
2170 unsigned &DeadSize) {
2179 for (++
I; &*
I != JumpMI; ++
I) {
2180 if (
I->getOpcode() == ARM::t2ADDrs &&
I->getOperand(0).getReg() == EntryReg)
2189 for (++J; &*J != JumpMI; ++J) {
2191 if (!MO.isReg() || !MO.getReg())
2193 if (MO.isDef() && MO.getReg() == EntryReg)
2195 if (MO.isUse() && MO.getReg() == EntryReg)
2207bool ARMConstantIslands::optimizeThumb2JumpTables() {
2208 bool MadeChange =
false;
2212 MachineJumpTableInfo *MJTI = MF->getJumpTableInfo();
2213 if (!MJTI)
return false;
2216 for (MachineInstr *
MI : T2JumpTables) {
2217 const MCInstrDesc &MCID =
MI->getDesc();
2219 unsigned JTOpIdx =
NumOps - (
MI->isPredicable() ? 2 : 1);
2220 MachineOperand JTOP =
MI->getOperand(JTOpIdx);
2225 bool HalfWordOk =
true;
2226 unsigned JTOffset = BBUtils->getOffsetOf(
MI) + 4;
2227 const std::vector<MachineBasicBlock*> &JTBBs =
JT[JTI].MBBs;
2229 for (MachineBasicBlock *
MBB : JTBBs) {
2233 if (ByteOk && (DstOffset - JTOffset) > ((1<<8)-1)*2)
2235 unsigned TBHLimit = ((1<<16)-1)*2;
2236 if (HalfWordOk && (DstOffset - JTOffset) > TBHLimit)
2238 if (!ByteOk && !HalfWordOk)
2242 if (!ByteOk && !HalfWordOk)
2245 CPUser &
User = CPUsers[JumpTableUserIndices[JTI]];
2246 MachineBasicBlock *
MBB =
MI->getParent();
2247 if (!
MI->getOperand(0).isKill())
2250 unsigned DeadSize = 0;
2251 bool CanDeleteLEA =
false;
2252 bool BaseRegKill =
false;
2254 unsigned IdxReg = ~0
U;
2255 bool IdxRegKill =
true;
2257 IdxReg =
MI->getOperand(1).getReg();
2258 IdxRegKill =
MI->getOperand(1).isKill();
2260 bool PreservedBaseReg =
2261 preserveBaseRegister(
MI,
User.MI, DeadSize, CanDeleteLEA, BaseRegKill);
2271 MachineBasicBlock *UserMBB =
User.MI->getParent();
2273 if (Shift == UserMBB->
begin())
2277 if (Shift->getOpcode() != ARM::tLSLri ||
2278 Shift->getOperand(3).getImm() != 2 ||
2279 !Shift->getOperand(2).isKill())
2281 IdxReg = Shift->getOperand(2).getReg();
2282 Register ShiftedIdxReg = Shift->getOperand(0).getReg();
2290 MachineInstr *
Load =
User.MI->getNextNode();
2291 if (
Load->getOpcode() != ARM::tLDRr)
2293 if (
Load->getOperand(1).getReg() != BaseReg ||
2294 Load->getOperand(2).getReg() != ShiftedIdxReg ||
2295 !
Load->getOperand(2).isKill())
2309 if (isPositionIndependentOrROPI) {
2310 MachineInstr *
Add =
Load->getNextNode();
2311 if (
Add->getOpcode() != ARM::tADDrr ||
2312 Add->getOperand(2).getReg() != BaseReg ||
2313 Add->getOperand(3).getReg() !=
Load->getOperand(0).getReg() ||
2314 !
Add->getOperand(3).isKill())
2316 if (
Add->getOperand(0).getReg() !=
MI->getOperand(0).getReg())
2321 Add->eraseFromParent();
2324 if (
Load->getOperand(0).getReg() !=
MI->getOperand(0).getReg())
2332 CanDeleteLEA =
true;
2333 Shift->eraseFromParent();
2334 Load->eraseFromParent();
2339 MachineInstr *CPEMI =
User.CPEMI;
2340 unsigned Opc = ByteOk ? ARM::t2TBB_JT : ARM::t2TBH_JT;
2342 Opc = ByteOk ? ARM::tTBB_JT : ARM::tTBH_JT;
2345 MachineInstr *NewJTMI =
2354 unsigned JTOpc = ByteOk ? ARM::JUMPTABLE_TBB : ARM::JUMPTABLE_TBH;
2365 User.MI->eraseFromParent();
2366 DeadSize += isThumb2 ? 4 : 2;
2373 User.IsSoImm =
false;
2374 User.KnownAlignment =
false;
2378 int CPEntryIdx = JumpTableEntryIndices[JTI];
2379 auto &CPEs = CPEntries[CPEntryIdx];
2381 find_if(CPEs, [&](CPEntry &
E) {
return E.CPEMI ==
User.CPEMI; });
2383 CPUsers.emplace_back(CPUser(NewJTMI,
User.CPEMI, 4,
false,
false));
2387 unsigned NewSize =
TII->getInstSizeInBytes(*NewJTMI);
2388 unsigned OrigSize =
TII->getInstSizeInBytes(*
MI);
2389 MI->eraseFromParent();
2391 int Delta = OrigSize - NewSize + DeadSize;
2393 BBUtils->adjustBBOffsetsAfter(
MBB);
2404bool ARMConstantIslands::reorderThumb2JumpTables() {
2405 bool MadeChange =
false;
2407 MachineJumpTableInfo *MJTI = MF->getJumpTableInfo();
2408 if (!MJTI)
return false;
2411 for (MachineInstr *
MI : T2JumpTables) {
2412 const MCInstrDesc &MCID =
MI->getDesc();
2414 unsigned JTOpIdx =
NumOps - (
MI->isPredicable() ? 2 : 1);
2415 MachineOperand JTOP =
MI->getOperand(JTOpIdx);
2422 int JTNumber =
MI->getParent()->getNumber();
2423 const std::vector<MachineBasicBlock*> &JTBBs =
JT[JTI].MBBs;
2424 for (MachineBasicBlock *
MBB : JTBBs) {
2427 if (DTNumber < JTNumber) {
2430 MachineBasicBlock *NewBB =
2431 adjustJTTargetBlockForward(JTI,
MBB,
MI->getParent());
2442MachineBasicBlock *ARMConstantIslands::adjustJTTargetBlockForward(
2443 unsigned JTI, MachineBasicBlock *BB, MachineBasicBlock *JTBB) {
2448 MachineBasicBlock *
TBB =
nullptr, *FBB =
nullptr;
2461 if (!
B &&
Cond.empty() && BB != &MF->front() &&
2464 OldPrior->updateTerminator(BB);
2467 MF->RenumberBlocks();
2468 DT->updateBlockNumbers();
2474 MachineBasicBlock *NewBB =
2477 MF->insert(
MBBI, NewBB);
2480 for (
const MachineBasicBlock::RegisterMaskPair &RegMaskPair : BB->
liveins())
2496 MF->RenumberBlocks(NewBB);
2497 DT->updateBlockNumbers();
2510 return new ARMConstantIslands();
unsigned const MachineRegisterInfo * MRI
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
static bool isThumb(const MCSubtargetInfo &STI)
static cl::opt< unsigned > CPMaxIteration("arm-constant-island-max-iteration", cl::Hidden, cl::init(30), cl::desc("The max number of iteration for converge"))
static bool isSimpleIndexCalc(MachineInstr &I, unsigned EntryReg, unsigned BaseReg)
static bool jumpTableFollowsTB(MachineInstr *JTMI, MachineInstr *CPEMI)
Returns whether CPEMI is the first instruction in the block immediately following JTMI (assumed to be...
static bool CompareMBBNumbers(const MachineBasicBlock *LHS, const MachineBasicBlock *RHS)
CompareMBBNumbers - Little predicate function to sort the WaterList by MBB ID.
static unsigned getUnconditionalBrDisp(int Opc)
getUnconditionalBrDisp - Returns the maximum displacement that can fit in the specific unconditional ...
static void RemoveDeadAddBetweenLEAAndJT(MachineInstr *LEAMI, MachineInstr *JumpMI, unsigned &DeadSize)
static bool AlignBlocks(MachineFunction *MF, const ARMSubtarget *STI)
static cl::opt< bool > SynthesizeThumb1TBB("arm-synthesize-thumb-1-tbb", cl::Hidden, cl::init(true), cl::desc("Use compressed jump tables in Thumb-1 by synthesizing an " "equivalent to the TBB/TBH instructions"))
static cl::opt< bool > AdjustJumpTableBlocks("arm-adjust-jump-tables", cl::Hidden, cl::init(true), cl::desc("Adjust basic block layout to better use TB[BH]"))
#define ARM_CP_ISLANDS_OPT_NAME
static bool BBIsJumpedOver(MachineBasicBlock *MBB)
BBIsJumpedOver - Return true of the specified basic block's only predecessor unconditionally branches...
MachineBasicBlock MachineBasicBlock::iterator MBBI
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
#define LLVM_PREFERRED_TYPE(T)
\macro LLVM_PREFERRED_TYPE Adjust type of bit-field in debug info.
#define LLVM_DUMP_METHOD
Mark debug helper function definitions like dump() that should not be stripped from debug builds.
This file defines the DenseMap class.
const HexagonInstrInfo * TII
const size_t AbstractManglingParser< Derived, Alloc >::NumOps
This file implements the LivePhysRegs utility for tracking liveness of physical registers.
This file declares the MachineConstantPool class which is an abstract constant pool to keep track of ...
Register const TargetRegisterInfo * TRI
Promote Memory to Register
static bool BBHasFallthrough(MachineBasicBlock *MBB)
BBHasFallthrough - Return true if the specified basic block can fallthrough into the block immediatel...
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
const SmallVectorImpl< MachineOperand > MachineBasicBlock * TBB
const SmallVectorImpl< MachineOperand > & Cond
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
bool isThumb2Function() const
void initPICLabelUId(unsigned UId)
unsigned createPICLabelUId()
bool isThumb1OnlyFunction() const
bool isThumbFunction() const
void recordCPEClone(unsigned CPIdx, unsigned CPCloneIdx)
const ARMBaseInstrInfo * getInstrInfo() const override
bool isTargetWindows() const
const ARMTargetLowering * getTargetLowering() const override
const ARMBaseRegisterInfo * getRegisterInfo() const override
AnalysisUsage & addRequired()
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
FunctionPass class - This class is used to implement most global optimizations.
bool hasOptSize() const
Optimize this function for size (-Os) or minimum size (-Oz).
bool analyzeBranch(MachineBasicBlock &MBB, MachineBasicBlock *&TBB, MachineBasicBlock *&FBB, SmallVectorImpl< MachineOperand > &Cond, bool AllowModify) const override
Analyze the branching code at the end of MBB, returning true if it cannot be understood (e....
unsigned getNumOperands() const
Return the number of declared MachineOperands for this MachineInstruction.
unsigned getSize() const
Return the number of bytes in the encoding of this instruction, or zero if the encoding size cannot b...
LLVM_ABI void replaceSuccessor(MachineBasicBlock *Old, MachineBasicBlock *New)
Replace successor OLD with NEW and update probability info.
LLVM_ABI MachineBasicBlock * getFallThrough(bool JumpToFallThrough=true)
Return the fallthrough block if the block can implicitly transfer control to the block after it by fa...
LLVM_ABI void transferSuccessors(MachineBasicBlock *FromMBB)
Transfers all the successors from MBB to this machine basic block (i.e., copies all the successors Fr...
iterator_range< livein_iterator > liveins() const
int getNumber() const
MachineBasicBlocks are uniquely numbered at the function level, unless they're not in a MachineFuncti...
const BasicBlock * getBasicBlock() const
Return the LLVM basic block that this instance corresponded to originally.
LLVM_ABI void updateTerminator(MachineBasicBlock *PreviousLayoutSuccessor)
Update the terminator instructions in block to account for changes to block layout which may have bee...
void setAlignment(Align A)
Set alignment of the basic block.
LLVM_ABI void dump() const
LLVM_ABI void addSuccessor(MachineBasicBlock *Succ, BranchProbability Prob=BranchProbability::getUnknown())
Add Succ as a successor of this MachineBasicBlock.
LLVM_ABI iterator getLastNonDebugInstr(bool SkipPseudoOp=true)
Returns an iterator to the last non-debug instruction in the basic block, or end().
void addLiveIn(MCRegister PhysReg, LaneBitmask LaneMask=LaneBitmask::getAll())
Adds the specified register as a live in.
const MachineFunction * getParent() const
Return the MachineFunction containing this basic block.
reverse_iterator rbegin()
LLVM_ABI bool isSuccessor(const MachineBasicBlock *MBB) const
Return true if the specified MBB is a successor of this block.
void splice(iterator Where, MachineBasicBlock *Other, iterator From)
Take an instruction from MBB 'Other' at the position From, and insert it into this MBB right before '...
Align getAlignment() const
Return alignment of the basic block.
MachineInstrBundleIterator< MachineInstr > iterator
LLVM_ABI void moveAfter(MachineBasicBlock *NewBefore)
Align getConstantPoolAlign() const
Return the alignment required by the whole constant pool, of which the first element must be aligned.
const std::vector< MachineConstantPoolEntry > & getConstants() const
bool isEmpty() const
isEmpty - Return true if this constant pool contains no constants.
MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...
void getAnalysisUsage(AnalysisUsage &AU) const override
getAnalysisUsage - Subclasses that override getAnalysisUsage must call this.
const TargetSubtargetInfo & getSubtarget() const
getSubtarget - Return the subtarget for which this machine code is being compiled.
void dump() const
dump - Print the current MachineFunction to cerr, useful for debugger use.
void ensureAlignment(Align A)
ensureAlignment - Make sure the function is at least A bytes aligned.
void push_back(MachineBasicBlock *MBB)
const DataLayout & getDataLayout() const
Return the DataLayout attached to the Module associated to this MF.
Function & getFunction()
Return the LLVM function that this machine code represents.
BasicBlockListType::iterator iterator
Ty * getInfo()
getInfo - Keep track of various per-function pieces of information for backends that would like to do...
MachineConstantPool * getConstantPool()
getConstantPool - Return the constant pool object for the current function.
void RenumberBlocks(MachineBasicBlock *MBBFrom=nullptr)
RenumberBlocks - This discards all of the MachineBasicBlock numbers and recomputes them.
const MachineJumpTableInfo * getJumpTableInfo() const
getJumpTableInfo - Return the jump table info object for the current function.
MachineBasicBlock * CreateMachineBasicBlock(const BasicBlock *BB=nullptr, std::optional< UniqueBBID > BBID=std::nullopt)
CreateMachineInstr - Allocate a new MachineInstr.
const TargetMachine & getTarget() const
getTarget - Return the target machine this machine code is compiled with
BasicBlockListType::const_iterator const_iterator
const MachineInstrBuilder & addImm(int64_t Val) const
Add a new immediate operand.
const MachineInstrBuilder & add(const MachineOperand &MO) const
const MachineInstrBuilder & addConstantPoolIndex(unsigned Idx, int Offset=0, unsigned TargetFlags=0) const
const MachineInstrBuilder & addReg(Register RegNo, unsigned flags=0, unsigned SubReg=0) const
Add a new virtual register operand.
const MachineInstrBuilder & addJumpTableIndex(unsigned Idx, unsigned TargetFlags=0) const
const MachineInstrBuilder & addMBB(MachineBasicBlock *MBB, unsigned TargetFlags=0) const
reverse_iterator getReverse() const
Get a reverse iterator to the same node.
Representation of each machine instruction.
unsigned getOpcode() const
Returns the opcode of this MachineInstr.
const MachineBasicBlock * getParent() const
const MCInstrDesc & getDesc() const
Returns the target instruction descriptor of this MachineInstr.
LLVM_ABI void setDesc(const MCInstrDesc &TID)
Replace the instruction descriptor (thus opcode) of the current instruction with a new one.
LLVM_ABI void eraseFromParent()
Unlink 'this' from the containing basic block and delete it.
const MachineOperand & getOperand(unsigned i) const
LLVM_ABI bool ReplaceMBBInJumpTable(unsigned Idx, MachineBasicBlock *Old, MachineBasicBlock *New)
ReplaceMBBInJumpTable - If Old is a target of the jump tables, update the jump table to branch to New...
@ EK_Inline
EK_Inline - Jump table entries are emitted inline at their point of use.
JTEntryKind getEntryKind() const
const std::vector< MachineJumpTableEntry > & getJumpTables() const
MachineOperand class - Representation of each machine instruction operand.
MachineBasicBlock * getMBB() const
bool isCPI() const
isCPI - Tests if this is a MO_ConstantPoolIndex operand.
LLVM_ABI void setReg(Register Reg)
Change the register this operand corresponds to.
void setIsKill(bool Val=true)
void setMBB(MachineBasicBlock *MBB)
unsigned getTargetFlags() const
Register getReg() const
getReg - Returns the register number.
Wrapper class representing virtual and physical registers.
bool erase(PtrType Ptr)
Remove pointer from the set.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
void push_back(const T &Elt)
bool isPositionIndependent() const
CodeGenOptLevel getOptLevel() const
Returns the optimization level: None, Less, Default, or Aggressive.
self_iterator getIterator()
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
constexpr char Align[]
Key for Kernel::Arg::Metadata::mAlign.
static CondCodes getOppositeCondition(CondCodes CC)
@ MO_OPTION_MASK
MO_OPTION_MASK - Most flags are mutually exclusive; this mask selects just that part of the flag set.
@ MO_LO16
MO_LO16 - On a symbol operand, this represents a relocation containing lower 16 bit of the address.
@ MO_HI16
MO_HI16 - On a symbol operand, this represents a relocation containing higher 16 bit of the address.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
initializer< Ty > init(const Ty &Val)
@ User
could "use" a pointer
BaseReg
Stack frame base register. Bit 0 of FREInfo.Info.
This is an optimization pass for GlobalISel generic memory operations.
MachineInstr * findCMPToFoldIntoCBZ(MachineInstr *Br, const TargetRegisterInfo *TRI)
Search backwards from a tBcc to find a tCMPi8 against 0, meaning we can convert them to a tCBZ or tCB...
auto find(R &&Range, const T &Val)
Provide wrappers to std::find which take ranges instead of having to pass begin/end explicitly.
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
MachineInstrBuilder BuildMI(MachineFunction &MF, const MIMetadata &MIMD, const MCInstrDesc &MCID)
Builder interface. Specify how to create the initial instruction itself.
static bool isARMLowRegister(MCRegister Reg)
isARMLowRegister - Returns true if the register is a low register (r0-r7).
bool isAligned(Align Lhs, uint64_t SizeInBytes)
Checks that SizeInBytes is a multiple of the alignment.
iterator_range< T > make_range(T x, T y)
Convenience function for iterating over sub-ranges.
bool registerDefinedBetween(unsigned Reg, MachineBasicBlock::iterator From, MachineBasicBlock::iterator To, const TargetRegisterInfo *TRI)
Return true if Reg is defd between From and To.
static std::array< MachineOperand, 2 > predOps(ARMCC::CondCodes Pred, unsigned PredReg=0)
Get the operands corresponding to the given Pred value.
ARMCC::CondCodes getITInstrPredicate(const MachineInstr &MI, Register &PredReg)
getITInstrPredicate - Valid only in Thumb2 mode.
auto reverse(ContainerTy &&C)
LLVM_ABI raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
LLVM_ABI void report_fatal_error(Error Err, bool gen_crash_diag=true)
static bool isLoopStart(const MachineInstr &MI)
bool is_sorted(R &&Range, Compare C)
Wrapper function around std::is_sorted to check if elements in a range R are sorted with respect to a...
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
format_object< Ts... > format(const char *Fmt, const Ts &... Vals)
These are helper functions used to produce formatted output.
uint64_t offsetToAlignment(uint64_t Value, Align Alignment)
Returns the offset to the next integer (mod 2**64) that is greater than or equal to Value and is a mu...
unsigned getRegState(const MachineOperand &RegOp)
Get all register state flags from machine operand RegOp.
auto lower_bound(R &&Range, T &&Value)
Provide wrappers to std::lower_bound which take ranges instead of having to pass begin/end explicitly...
unsigned getKillRegState(bool B)
uint16_t MCPhysReg
An unsigned integer type large enough to represent all physical registers, but not necessarily virtua...
ARMCC::CondCodes getInstrPredicate(const MachineInstr &MI, Register &PredReg)
getInstrPredicate - If instruction is predicated, returns its predicate condition,...
FunctionPass * createARMConstantIslandPass()
createARMConstantIslandPass - returns an instance of the constpool island pass.
auto find_if(R &&Range, UnaryPredicate P)
Provide wrappers to std::find_if which take ranges instead of having to pass begin/end explicitly.
unsigned UnknownPadding(Align Alignment, unsigned KnownBits)
UnknownPadding - Return the worst case padding that could result from unknown offset bits.
APFloat neg(APFloat X)
Returns the negated value of the argument.
SmallVectorImpl< BasicBlockInfo > BBInfoVector
unsigned Log2(Align A)
Returns the log2 of the alignment.
static bool isSpeculationBarrierEndBBOpcode(int Opc)
IterT prev_nodbg(IterT It, IterT Begin, bool SkipPseudoOp=true)
Decrement It, then continue decrementing it while it points to a debug instruction.
LLVM_ABI Printable printMBBReference(const MachineBasicBlock &MBB)
Prints a machine basic block reference.
This struct is a compact representation of a valid (non-zero power of two) alignment.
uint64_t value() const
This is a hole in the type system and should not be abused.
Align PostAlign
PostAlign - When > 1, the block terminator contains a .align directive, so the end of the block is al...
uint8_t KnownBits
KnownBits - The number of low bits in Offset that are known to be exact.
unsigned internalKnownBits() const
Compute the number of known offset bits internally to this block.
unsigned postOffset(Align Alignment=Align(1)) const
Compute the offset immediately following this block.
uint8_t Unalign
Unalign - When non-zero, the block contains instructions (inline asm) of unknown size.
unsigned Offset
Offset - Distance from the beginning of the function to the beginning of this basic block.