44#define AARCH64_EXPAND_PSEUDO_NAME "AArch64 pseudo instruction expansion pass"
68 unsigned ContiguousOpc,
unsigned StridedOpc);
79 unsigned LdarOp,
unsigned StlrOp,
unsigned CmpOp,
80 unsigned ExtendImm,
unsigned ZeroReg,
98 struct ConditionalBlocks {
116char AArch64ExpandPseudo::ID = 0;
128 assert(MO.isReg() && MO.getReg());
145 uint64_t
Imm =
MI.getOperand(1).getImm();
147 if (DstReg == AArch64::XZR || DstReg == AArch64::WZR) {
150 MI.eraseFromParent();
158 SmallVector<MachineInstrBuilder, 4> MIBS;
160 bool LastItem = std::next(
I) ==
E;
165 case AArch64::ORRWri:
166 case AArch64::ORRXri:
169 .
add(
MI.getOperand(0))
170 .
addReg(BitSize == 32 ? AArch64::WZR : AArch64::XZR)
174 bool DstIsDead =
MI.getOperand(0).isDead();
177 .
addReg(DstReg, RegState::Define |
184 case AArch64::EONXrs:
185 case AArch64::EORXrs:
186 case AArch64::ORRWrs:
187 case AArch64::ORRXrs: {
189 bool DstIsDead =
MI.getOperand(0).isDead();
192 .
addReg(DstReg, RegState::Define |
199 case AArch64::ANDXri:
200 case AArch64::EORXri:
203 .
add(
MI.getOperand(0))
204 .
addReg(BitSize == 32 ? AArch64::WZR : AArch64::XZR)
208 bool DstIsDead =
MI.getOperand(0).isDead();
211 .
addReg(DstReg, RegState::Define |
218 case AArch64::MOVNWi:
219 case AArch64::MOVNXi:
220 case AArch64::MOVZWi:
221 case AArch64::MOVZXi: {
222 bool DstIsDead =
MI.getOperand(0).isDead();
224 .
addReg(DstReg, RegState::Define |
230 case AArch64::MOVKWi:
231 case AArch64::MOVKXi: {
233 bool DstIsDead =
MI.getOperand(0).isDead();
246 MI.eraseFromParent();
250bool AArch64ExpandPseudo::expandCMP_SWAP(
252 unsigned StlrOp,
unsigned CmpOp,
unsigned ExtendImm,
unsigned ZeroReg,
256 const MachineOperand &Dest =
MI.getOperand(0);
257 Register StatusReg =
MI.getOperand(1).getReg();
258 bool StatusDead =
MI.getOperand(1).isDead();
261 assert(!
MI.getOperand(2).isUndef() &&
"cannot handle undef");
263 Register DesiredReg =
MI.getOperand(3).getReg();
272 MF->
insert(++LoadCmpBB->getIterator(), StoreBB);
273 MF->
insert(++StoreBB->getIterator(), DoneBB);
281 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::MOVZWi), StatusReg)
285 BuildMI(LoadCmpBB, MIMD,
TII->get(CmpOp), ZeroReg)
289 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::Bcc))
292 .
addReg(AArch64::NZCV, RegState::Implicit | RegState::Kill);
293 LoadCmpBB->addSuccessor(DoneBB);
294 LoadCmpBB->addSuccessor(StoreBB);
299 BuildMI(StoreBB, MIMD,
TII->get(StlrOp), StatusReg)
302 BuildMI(StoreBB, MIMD,
TII->get(AArch64::CBNZW))
305 StoreBB->addSuccessor(LoadCmpBB);
306 StoreBB->addSuccessor(DoneBB);
308 DoneBB->splice(DoneBB->end(), &
MBB,
MI,
MBB.
end());
309 DoneBB->transferSuccessors(&
MBB);
314 MI.eraseFromParent();
317 LivePhysRegs LiveRegs;
322 StoreBB->clearLiveIns();
324 LoadCmpBB->clearLiveIns();
330bool AArch64ExpandPseudo::expandCMP_SWAP_128(
335 MachineOperand &DestLo =
MI.getOperand(0);
336 MachineOperand &DestHi =
MI.getOperand(1);
337 Register StatusReg =
MI.getOperand(2).getReg();
338 bool StatusDead =
MI.getOperand(2).isDead();
341 assert(!
MI.getOperand(3).isUndef() &&
"cannot handle undef");
343 Register DesiredLoReg =
MI.getOperand(4).getReg();
344 Register DesiredHiReg =
MI.getOperand(5).getReg();
345 Register NewLoReg =
MI.getOperand(6).getReg();
346 Register NewHiReg =
MI.getOperand(7).getReg();
348 unsigned LdxpOp, StxpOp;
350 switch (
MI.getOpcode()) {
351 case AArch64::CMP_SWAP_128_MONOTONIC:
352 LdxpOp = AArch64::LDXPX;
353 StxpOp = AArch64::STXPX;
355 case AArch64::CMP_SWAP_128_RELEASE:
356 LdxpOp = AArch64::LDXPX;
357 StxpOp = AArch64::STLXPX;
359 case AArch64::CMP_SWAP_128_ACQUIRE:
360 LdxpOp = AArch64::LDAXPX;
361 StxpOp = AArch64::STXPX;
363 case AArch64::CMP_SWAP_128:
364 LdxpOp = AArch64::LDAXPX;
365 StxpOp = AArch64::STLXPX;
378 MF->
insert(++LoadCmpBB->getIterator(), StoreBB);
379 MF->
insert(++StoreBB->getIterator(), FailBB);
380 MF->
insert(++FailBB->getIterator(), DoneBB);
391 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::SUBSXrs), AArch64::XZR)
395 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::CSINCWr), StatusReg)
399 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::SUBSXrs), AArch64::XZR)
403 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::CSINCWr), StatusReg)
404 .
addUse(StatusReg, RegState::Kill)
405 .
addUse(StatusReg, RegState::Kill)
407 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::CBNZW))
410 LoadCmpBB->addSuccessor(FailBB);
411 LoadCmpBB->addSuccessor(StoreBB);
416 BuildMI(StoreBB, MIMD,
TII->get(StxpOp), StatusReg)
420 BuildMI(StoreBB, MIMD,
TII->get(AArch64::CBNZW))
424 StoreBB->addSuccessor(LoadCmpBB);
425 StoreBB->addSuccessor(DoneBB);
430 BuildMI(FailBB, MIMD,
TII->get(StxpOp), StatusReg)
434 BuildMI(FailBB, MIMD,
TII->get(AArch64::CBNZW))
437 FailBB->addSuccessor(LoadCmpBB);
438 FailBB->addSuccessor(DoneBB);
440 DoneBB->splice(DoneBB->end(), &
MBB,
MI,
MBB.
end());
441 DoneBB->transferSuccessors(&
MBB);
446 MI.eraseFromParent();
449 LivePhysRegs LiveRegs;
456 FailBB->clearLiveIns();
458 StoreBB->clearLiveIns();
460 LoadCmpBB->clearLiveIns();
504bool AArch64ExpandPseudo::expand_DestructiveOp(
506 MachineBasicBlock &
MBB,
513 bool DstIsDead =
MI.getOperand(0).isDead();
515 unsigned PredIdx, DOPIdx, SrcIdx, Src2Idx;
520 if (DstReg ==
MI.getOperand(3).getReg()) {
522 std::tie(PredIdx, DOPIdx, SrcIdx) = std::make_tuple(1, 3, 2);
529 std::tie(PredIdx, DOPIdx, SrcIdx) = std::make_tuple(1, 2, 3);
532 std::tie(PredIdx, DOPIdx, SrcIdx) = std::make_tuple(2, 3, 3);
535 std::tie(PredIdx, DOPIdx, SrcIdx, Src2Idx) = std::make_tuple(1, 2, 3, 4);
536 if (DstReg ==
MI.getOperand(3).getReg()) {
538 std::tie(PredIdx, DOPIdx, SrcIdx, Src2Idx) = std::make_tuple(1, 3, 4, 2);
540 }
else if (DstReg ==
MI.getOperand(4).getReg()) {
542 std::tie(PredIdx, DOPIdx, SrcIdx, Src2Idx) = std::make_tuple(1, 4, 3, 2);
549 std::tie(DOPIdx, SrcIdx, Src2Idx) = std::make_tuple(1, 1, 2);
558 bool DOPRegIsUnique =
false;
561 DOPRegIsUnique = DstReg !=
MI.getOperand(SrcIdx).getReg();
566 DstReg !=
MI.getOperand(DOPIdx).getReg() ||
567 MI.getOperand(DOPIdx).getReg() !=
MI.getOperand(SrcIdx).getReg();
572 DOPRegIsUnique =
true;
576 DstReg !=
MI.getOperand(DOPIdx).getReg() ||
577 (
MI.getOperand(DOPIdx).
getReg() !=
MI.getOperand(SrcIdx).getReg() &&
578 MI.getOperand(DOPIdx).getReg() !=
MI.getOperand(Src2Idx).getReg());
594 uint64_t ElementSize =
TII->getElementSizeForOpcode(Opcode);
595 unsigned MovPrfx, LSLZero, MovPrfxZero;
596 switch (ElementSize) {
599 MovPrfx = AArch64::MOVPRFX_ZZ;
600 LSLZero = AArch64::LSL_ZPmI_B;
601 MovPrfxZero = AArch64::MOVPRFX_ZPzZ_B;
604 MovPrfx = AArch64::MOVPRFX_ZZ;
605 LSLZero = AArch64::LSL_ZPmI_H;
606 MovPrfxZero = AArch64::MOVPRFX_ZPzZ_H;
609 MovPrfx = AArch64::MOVPRFX_ZZ;
610 LSLZero = AArch64::LSL_ZPmI_S;
611 MovPrfxZero = AArch64::MOVPRFX_ZPzZ_S;
614 MovPrfx = AArch64::MOVPRFX_ZZ;
615 LSLZero = AArch64::LSL_ZPmI_D;
616 MovPrfxZero = AArch64::MOVPRFX_ZPzZ_D;
628 MachineInstrBuilder PRFX, DOP;
635 "The destructive operand should be unique");
637 "This instruction is unpredicated");
641 .
addReg(DstReg, RegState::Define)
642 .
addReg(
MI.getOperand(PredIdx).getReg())
643 .
addReg(
MI.getOperand(DOPIdx).getReg(), DOPRegState);
657 .
addReg(DstReg, RegState::Define)
658 .
add(
MI.getOperand(PredIdx))
662 }
else if (DstReg !=
MI.getOperand(DOPIdx).getReg()) {
663 assert(DOPRegIsUnique &&
"The destructive operand should be unique");
665 .
addReg(DstReg, RegState::Define)
666 .
addReg(
MI.getOperand(DOPIdx).getReg(), DOPRegState);
676 DOPRegState = DOPRegState | RegState::Kill;
680 DOP.
addReg(
MI.getOperand(DOPIdx).getReg(), DOPRegState)
681 .
add(
MI.getOperand(PredIdx))
682 .
add(
MI.getOperand(SrcIdx));
688 DOP.
add(
MI.getOperand(PredIdx))
689 .
addReg(
MI.getOperand(DOPIdx).getReg(), DOPRegState)
690 .
add(
MI.getOperand(SrcIdx));
693 DOP.
add(
MI.getOperand(PredIdx))
694 .
addReg(
MI.getOperand(DOPIdx).getReg(), DOPRegState)
695 .
add(
MI.getOperand(SrcIdx))
696 .
add(
MI.getOperand(Src2Idx));
699 DOP.
addReg(
MI.getOperand(DOPIdx).getReg(), DOPRegState)
700 .
add(
MI.getOperand(SrcIdx))
701 .
add(
MI.getOperand(Src2Idx));
706 transferImpOps(
MI, PRFX, DOP);
709 transferImpOps(
MI, DOP, DOP);
711 MI.eraseFromParent();
715bool AArch64ExpandPseudo::expandSetTagLoop(
721 Register AddressReg =
MI.getOperand(1).getReg();
725 bool ZeroData =
MI.getOpcode() == AArch64::STZGloop_wback;
726 const unsigned OpCode1 =
727 ZeroData ? AArch64::STZGPostIndex : AArch64::STGPostIndex;
728 const unsigned OpCode2 =
729 ZeroData ? AArch64::STZ2GPostIndex : AArch64::ST2GPostIndex;
731 unsigned Size =
MI.getOperand(2).getImm();
733 if (
Size % (16 * 2) != 0) {
749 MF->
insert(++LoopBB->getIterator(), DoneBB);
766 .
addReg(AArch64::NZCV, RegState::Implicit | RegState::Kill);
768 LoopBB->addSuccessor(LoopBB);
769 LoopBB->addSuccessor(DoneBB);
771 DoneBB->splice(DoneBB->end(), &
MBB,
MI,
MBB.
end());
772 DoneBB->transferSuccessors(&
MBB);
777 MI.eraseFromParent();
779 LivePhysRegs LiveRegs;
784 LoopBB->clearLiveIns();
786 DoneBB->clearLiveIns();
792bool AArch64ExpandPseudo::expandSVESpillFill(MachineBasicBlock &
MBB,
794 unsigned Opc,
unsigned N) {
795 assert((
Opc == AArch64::LDR_ZXI ||
Opc == AArch64::STR_ZXI ||
796 Opc == AArch64::LDR_PXI ||
Opc == AArch64::STR_PXI) &&
797 "Unexpected opcode");
800 unsigned sub0 = (
Opc == AArch64::LDR_ZXI ||
Opc == AArch64::STR_ZXI)
803 const TargetRegisterInfo *
TRI =
807 int ImmOffset =
MI.getOperand(2).getImm() +
Offset;
808 bool Kill = (
Offset + 1 ==
N) ?
MI.getOperand(1).isKill() :
false;
809 assert(ImmOffset >= -256 && ImmOffset < 256 &&
810 "Immediate spill offset out of range");
817 MI.eraseFromParent();
828 unsigned RegMaskStartIdx) {
837 while (!
MBBI->getOperand(RegMaskStartIdx).isRegMask()) {
839 assert(MOP.
isReg() &&
"can only add register operands");
841 MOP.
getReg(),
false,
true,
false,
847 Call->addOperand(MO);
858 unsigned RegMaskStartIdx) {
859 unsigned Opc = CallTarget.
isGlobal() ? AArch64::BL : AArch64::BLR;
862 "invalid operand for regular call");
866bool AArch64ExpandPseudo::expandCALL_RVMARKER(
874 MachineOperand &RVTarget =
MI.getOperand(0);
875 bool DoEmitMarker =
MI.getOperand(1).getImm();
876 assert(RVTarget.
isGlobal() &&
"invalid operand for attached call");
878 MachineInstr *OriginalCall =
nullptr;
880 if (
MI.getOpcode() == AArch64::BLRA_RVMARKER) {
882 const MachineOperand &CallTarget =
MI.getOperand(2);
883 const MachineOperand &
Key =
MI.getOperand(3);
884 const MachineOperand &IntDisc =
MI.getOperand(4);
885 const MachineOperand &AddrDisc =
MI.getOperand(5);
889 "Invalid auth call key");
891 MachineOperand
Ops[] = {CallTarget,
Key, IntDisc, AddrDisc};
896 assert(
MI.getOpcode() == AArch64::BLR_RVMARKER &&
"unknown rvmarker MI");
904 .
addReg(AArch64::FP, RegState::Define)
913 if (
MI.shouldUpdateAdditionalCallInfo())
916 MI.eraseFromParent();
918 std::next(RVCall->getIterator()));
922bool AArch64ExpandPseudo::expandCALL_BTI(MachineBasicBlock &
MBB,
942 if (
MI.shouldUpdateAdditionalCallInfo())
945 MI.eraseFromParent();
950bool AArch64ExpandPseudo::expandStoreSwiftAsyncContext(
958 if (STI.getTargetTriple().getArchName() !=
"arm64e") {
975 unsigned Opc =
Offset >= 0 ? AArch64::ADDXri : AArch64::SUBXri;
1007bool AArch64ExpandPseudo::expandSTSHHAtomicStore(
1009 MachineInstr &
MI = *
MBBI;
1012 unsigned Order =
MI.getOperand(2).getImm();
1013 unsigned Policy =
MI.getOperand(3).getImm();
1014 unsigned Size =
MI.getOperand(4).getImm();
1016 bool IsRelaxed = Order == 0;
1017 unsigned StoreOpc = 0;
1022 StoreOpc = IsRelaxed ? AArch64::STRBBui : AArch64::STLRB;
1025 StoreOpc = IsRelaxed ? AArch64::STRHHui : AArch64::STLRH;
1028 StoreOpc = IsRelaxed ? AArch64::STRWui : AArch64::STLRW;
1031 StoreOpc = IsRelaxed ? AArch64::STRXui : AArch64::STLRX;
1046 const TargetRegisterInfo *
TRI =
1048 Register SubReg =
TRI->getSubReg(ValReg, AArch64::sub_32);
1055 .
add(
MI.getOperand(1));
1063 transferImpOps(
MI, Store, Store);
1068 MI.eraseFromParent();
1072AArch64ExpandPseudo::ConditionalBlocks
1073AArch64ExpandPseudo::expandConditionalPseudo(MachineBasicBlock &
MBB,
1076 MachineInstrBuilder &Branch) {
1079 "Unexpected unreachable in block");
1085 MachineInstr &PrevMI = *std::prev(
MBBI);
1086 MachineBasicBlock *CondBB =
MBB.
splitAt(PrevMI,
true);
1087 MachineBasicBlock *EndBB =
1088 std::next(
MBBI) == CondBB->
end()
1101 return {*CondBB, *EndBB};
1105AArch64ExpandPseudo::expandRestoreZASave(MachineBasicBlock &
MBB,
1107 MachineInstr &
MI = *
MBBI;
1111 MachineInstrBuilder
Branch =
1114 auto [CondBB, EndBB] = expandConditionalPseudo(
MBB,
MBBI,
DL, Branch);
1116 MachineInstrBuilder MIB =
1119 for (
unsigned I = 2;
I <
MI.getNumOperands(); ++
I)
1120 MIB.
add(
MI.getOperand(
I));
1122 MIB.
addReg(
MI.getOperand(1).getReg(), RegState::Implicit);
1124 MI.eraseFromParent();
1135 [[maybe_unused]]
auto *RI =
MBB.getParent()->getSubtarget().getRegisterInfo();
1141 auto [CondBB, EndBB] = expandConditionalPseudo(
MBB,
MBBI,
DL, Branch);
1146 for (
unsigned I = 3;
I <
MI.getNumOperands(); ++
I)
1147 MIB.
add(
MI.getOperand(
I));
1150 .
addImm(AArch64SysReg::TPIDR2_EL0)
1152 bool ZeroZA =
MI.getOperand(1).getImm() != 0;
1153 bool ZeroZT0 =
MI.getOperand(2).getImm() != 0;
1155 assert(
MI.definesRegister(AArch64::ZAB0, RI) &&
"should define ZA!");
1161 assert(
MI.definesRegister(AArch64::ZT0, RI) &&
"should define ZT0!");
1166 MI.eraseFromParent();
1171AArch64ExpandPseudo::expandCondSMToggle(MachineBasicBlock &
MBB,
1173 MachineInstr &
MI = *
MBBI;
1180 MI.getParent()->successors().begin() ==
1181 MI.getParent()->successors().end()) {
1182 MI.eraseFromParent();
1225 switch (
MI.getOperand(2).getImm()) {
1229 Opc = AArch64::TBNZW;
1232 Opc = AArch64::TBZW;
1235 auto PStateSM =
MI.getOperand(3).getReg();
1237 unsigned SMReg32 =
TRI->getSubReg(PStateSM, AArch64::sub_32);
1238 MachineInstrBuilder Tbx =
1241 auto [CondBB, EndBB] = expandConditionalPseudo(
MBB,
MBBI,
DL, Tbx);
1243 MachineInstrBuilder MIB =
BuildMI(CondBB, CondBB.
back(),
MI.getDebugLoc(),
1244 TII->get(AArch64::MSRpstatesvcrImm1));
1248 MIB.
add(
MI.getOperand(0));
1249 MIB.
add(
MI.getOperand(1));
1250 for (
unsigned i = 4; i <
MI.getNumOperands(); ++i)
1251 MIB.
add(
MI.getOperand(i));
1253 MI.eraseFromParent();
1257bool AArch64ExpandPseudo::expandMultiVecPseudo(
1259 TargetRegisterClass ContiguousClass, TargetRegisterClass StridedClass,
1260 unsigned ContiguousOp,
unsigned StridedOpc) {
1261 MachineInstr &
MI = *
MBBI;
1275 .
add(
MI.getOperand(0))
1276 .
add(
MI.getOperand(1))
1277 .
add(
MI.getOperand(2))
1278 .
add(
MI.getOperand(3));
1279 transferImpOps(
MI, MIB, MIB);
1280 MI.eraseFromParent();
1284bool AArch64ExpandPseudo::expandFormTuplePseudo(
1288 MachineInstr &
MI = *
MBBI;
1289 Register ReturnTuple =
MI.getOperand(0).getReg();
1291 const TargetRegisterInfo *
TRI =
1293 for (
unsigned I = 0;
I <
Size; ++
I) {
1294 Register FormTupleOpReg =
MI.getOperand(
I + 1).getReg();
1296 TRI->getSubReg(ReturnTuple, AArch64::zsub0 +
I);
1299 if (FormTupleOpReg != ReturnTupleSubReg)
1301 .
addReg(ReturnTupleSubReg, RegState::Define)
1306 MI.eraseFromParent();
1312bool AArch64ExpandPseudo::expandMI(MachineBasicBlock &
MBB,
1315 MachineInstr &
MI = *
MBBI;
1316 unsigned Opcode =
MI.getOpcode();
1320 if (OrigInstr != -1) {
1321 auto &Orig =
TII->get(OrigInstr);
1324 return expand_DestructiveOp(
MI,
MBB,
MBBI);
1332 case AArch64::BSPv8i8:
1333 case AArch64::BSPv16i8: {
1335 if (DstReg ==
MI.getOperand(3).getReg()) {
1338 TII->get(Opcode == AArch64::BSPv8i8 ? AArch64::BITv8i8
1339 : AArch64::BITv16i8))
1340 .
add(
MI.getOperand(0))
1341 .
add(
MI.getOperand(3))
1342 .
add(
MI.getOperand(2))
1343 .
add(
MI.getOperand(1));
1344 transferImpOps(
MI,
I,
I);
1345 }
else if (DstReg ==
MI.getOperand(2).getReg()) {
1348 TII->get(Opcode == AArch64::BSPv8i8 ? AArch64::BIFv8i8
1349 : AArch64::BIFv16i8))
1350 .
add(
MI.getOperand(0))
1351 .
add(
MI.getOperand(2))
1352 .
add(
MI.getOperand(3))
1353 .
add(
MI.getOperand(1));
1354 transferImpOps(
MI,
I,
I);
1357 if (DstReg ==
MI.getOperand(1).getReg()) {
1360 TII->get(Opcode == AArch64::BSPv8i8 ? AArch64::BSLv8i8
1361 : AArch64::BSLv16i8))
1362 .
add(
MI.getOperand(0))
1363 .
add(
MI.getOperand(1))
1364 .
add(
MI.getOperand(2))
1365 .
add(
MI.getOperand(3));
1366 transferImpOps(
MI,
I,
I);
1371 MI.getOperand(1).isKill() &&
1372 MI.getOperand(1).getReg() !=
MI.getOperand(2).getReg() &&
1373 MI.getOperand(1).getReg() !=
MI.getOperand(3).getReg());
1375 TII->get(Opcode == AArch64::BSPv8i8 ? AArch64::ORRv8i8
1376 : AArch64::ORRv16i8))
1384 TII->get(Opcode == AArch64::BSPv8i8 ? AArch64::BSLv8i8
1385 : AArch64::BSLv16i8))
1386 .
add(
MI.getOperand(0))
1389 MI.getOperand(0).isRenamable()))
1390 .
add(
MI.getOperand(2))
1391 .
add(
MI.getOperand(3));
1392 transferImpOps(
MI, I2, I2);
1395 MI.eraseFromParent();
1399 case AArch64::ADDWrr:
1400 case AArch64::SUBWrr:
1401 case AArch64::ADDXrr:
1402 case AArch64::SUBXrr:
1403 case AArch64::ADDSWrr:
1404 case AArch64::SUBSWrr:
1405 case AArch64::ADDSXrr:
1406 case AArch64::SUBSXrr:
1407 case AArch64::ANDWrr:
1408 case AArch64::ANDXrr:
1409 case AArch64::BICWrr:
1410 case AArch64::BICXrr:
1411 case AArch64::ANDSWrr:
1412 case AArch64::ANDSXrr:
1413 case AArch64::BICSWrr:
1414 case AArch64::BICSXrr:
1415 case AArch64::EONWrr:
1416 case AArch64::EONXrr:
1417 case AArch64::EORWrr:
1418 case AArch64::EORXrr:
1419 case AArch64::ORNWrr:
1420 case AArch64::ORNXrr:
1421 case AArch64::ORRWrr:
1422 case AArch64::ORRXrr: {
1424 switch (
MI.getOpcode()) {
1427 case AArch64::ADDWrr: Opcode = AArch64::ADDWrs;
break;
1428 case AArch64::SUBWrr: Opcode = AArch64::SUBWrs;
break;
1429 case AArch64::ADDXrr: Opcode = AArch64::ADDXrs;
break;
1430 case AArch64::SUBXrr: Opcode = AArch64::SUBXrs;
break;
1431 case AArch64::ADDSWrr: Opcode = AArch64::ADDSWrs;
break;
1432 case AArch64::SUBSWrr: Opcode = AArch64::SUBSWrs;
break;
1433 case AArch64::ADDSXrr: Opcode = AArch64::ADDSXrs;
break;
1434 case AArch64::SUBSXrr: Opcode = AArch64::SUBSXrs;
break;
1435 case AArch64::ANDWrr: Opcode = AArch64::ANDWrs;
break;
1436 case AArch64::ANDXrr: Opcode = AArch64::ANDXrs;
break;
1437 case AArch64::BICWrr: Opcode = AArch64::BICWrs;
break;
1438 case AArch64::BICXrr: Opcode = AArch64::BICXrs;
break;
1439 case AArch64::ANDSWrr: Opcode = AArch64::ANDSWrs;
break;
1440 case AArch64::ANDSXrr: Opcode = AArch64::ANDSXrs;
break;
1441 case AArch64::BICSWrr: Opcode = AArch64::BICSWrs;
break;
1442 case AArch64::BICSXrr: Opcode = AArch64::BICSXrs;
break;
1443 case AArch64::EONWrr: Opcode = AArch64::EONWrs;
break;
1444 case AArch64::EONXrr: Opcode = AArch64::EONXrs;
break;
1445 case AArch64::EORWrr: Opcode = AArch64::EORWrs;
break;
1446 case AArch64::EORXrr: Opcode = AArch64::EORXrs;
break;
1447 case AArch64::ORNWrr: Opcode = AArch64::ORNWrs;
break;
1448 case AArch64::ORNXrr: Opcode = AArch64::ORNXrs;
break;
1449 case AArch64::ORRWrr: Opcode = AArch64::ORRWrs;
break;
1450 case AArch64::ORRXrr: Opcode = AArch64::ORRXrs;
break;
1454 MachineInstr *NewMI = MF.CreateMachineInstr(
1455 TII->get(Opcode),
MI.getDebugLoc(),
true);
1457 MachineInstrBuilder MIB1(MF, NewMI);
1458 MIB1->setPCSections(MF,
MI.getPCSections());
1459 MIB1.addReg(
MI.getOperand(0).getReg(), RegState::Define)
1460 .add(
MI.getOperand(1))
1461 .add(
MI.getOperand(2))
1463 transferImpOps(
MI, MIB1, MIB1);
1464 if (
auto DebugNumber =
MI.peekDebugInstrNum())
1466 MI.eraseFromParent();
1470 case AArch64::LOADgot: {
1473 const MachineOperand &MO1 =
MI.getOperand(1);
1479 TII->get(AArch64::LDRXl), DstReg);
1487 "Only expect globals, externalsymbols, or constant pools");
1492 MachineFunction &MF = *
MI.getParent()->getParent();
1494 MachineInstrBuilder MIB1 =
1497 MachineInstrBuilder MIB2;
1498 if (MF.
getSubtarget<AArch64Subtarget>().isTargetILP32()) {
1500 unsigned Reg32 =
TRI->getSubReg(DstReg, AArch64::sub_32);
1503 .
addReg(DstReg, RegState::Kill)
1504 .
addReg(DstReg, RegState::Implicit);
1508 .
add(
MI.getOperand(0))
1509 .
addUse(DstReg, RegState::Kill);
1523 "Only expect globals, externalsymbols, or constant pools");
1534 if (
MI.peekDebugInstrNum() != 0)
1536 transferImpOps(
MI, MIB1, MIB2);
1538 MI.eraseFromParent();
1541 case AArch64::MOVaddrBA: {
1542 MachineFunction &MF = *
MI.getParent()->getParent();
1543 if (MF.
getSubtarget<AArch64Subtarget>().isTargetMachO()) {
1548 assert(
MI.getOperand(1).getOffset() == 0 &&
"unexpected offset");
1558 TII->get(AArch64::LDRXui), DstReg)
1562 transferImpOps(
MI, MIB1, MIB2);
1563 MI.eraseFromParent();
1568 case AArch64::MOVaddr:
1569 case AArch64::MOVaddrJT:
1570 case AArch64::MOVaddrCP:
1571 case AArch64::MOVaddrTLS:
1572 case AArch64::MOVaddrEXT: {
1575 assert(DstReg != AArch64::XZR);
1576 MachineInstrBuilder MIB1 =
1578 .
add(
MI.getOperand(1));
1588 auto Tag =
MI.getOperand(1);
1590 Tag.setOffset(0x100000000);
1597 MachineInstrBuilder MIB2 =
1599 .
add(
MI.getOperand(0))
1601 .
add(
MI.getOperand(2))
1604 transferImpOps(
MI, MIB1, MIB2);
1605 MI.eraseFromParent();
1608 case AArch64::ADDlowTLS:
1611 .
add(
MI.getOperand(0))
1612 .
add(
MI.getOperand(1))
1613 .
add(
MI.getOperand(2))
1615 MI.eraseFromParent();
1618 case AArch64::MOVbaseTLS: {
1620 auto SysReg = AArch64SysReg::TPIDR_EL0;
1622 if (MF->
getSubtarget<AArch64Subtarget>().useEL3ForTP())
1623 SysReg = AArch64SysReg::TPIDR_EL3;
1624 else if (MF->
getSubtarget<AArch64Subtarget>().useEL2ForTP())
1625 SysReg = AArch64SysReg::TPIDR_EL2;
1626 else if (MF->
getSubtarget<AArch64Subtarget>().useEL1ForTP())
1627 SysReg = AArch64SysReg::TPIDR_EL1;
1628 else if (MF->
getSubtarget<AArch64Subtarget>().useROEL0ForTP())
1629 SysReg = AArch64SysReg::TPIDRRO_EL0;
1632 MI.eraseFromParent();
1636 case AArch64::MOVi32imm:
1638 case AArch64::MOVi64imm:
1640 case AArch64::RET_ReallyLR: {
1646 MachineInstrBuilder MIB =
1648 .
addReg(AArch64::LR, RegState::Undef);
1649 transferImpOps(
MI, MIB, MIB);
1650 MI.eraseFromParent();
1653 case AArch64::CMP_SWAP_8:
1654 return expandCMP_SWAP(
MBB,
MBBI, AArch64::LDAXRB, AArch64::STLXRB,
1657 AArch64::WZR, NextMBBI);
1658 case AArch64::CMP_SWAP_16:
1659 return expandCMP_SWAP(
MBB,
MBBI, AArch64::LDAXRH, AArch64::STLXRH,
1662 AArch64::WZR, NextMBBI);
1663 case AArch64::CMP_SWAP_32:
1664 return expandCMP_SWAP(
MBB,
MBBI, AArch64::LDAXRW, AArch64::STLXRW,
1667 AArch64::WZR, NextMBBI);
1668 case AArch64::CMP_SWAP_64:
1669 return expandCMP_SWAP(
MBB,
MBBI,
1670 AArch64::LDAXRX, AArch64::STLXRX, AArch64::SUBSXrs,
1672 AArch64::XZR, NextMBBI);
1673 case AArch64::CMP_SWAP_128:
1674 case AArch64::CMP_SWAP_128_RELEASE:
1675 case AArch64::CMP_SWAP_128_ACQUIRE:
1676 case AArch64::CMP_SWAP_128_MONOTONIC:
1677 return expandCMP_SWAP_128(
MBB,
MBBI, NextMBBI);
1679 case AArch64::AESMCrrTied:
1680 case AArch64::AESIMCrrTied: {
1681 MachineInstrBuilder MIB =
1683 TII->get(Opcode == AArch64::AESMCrrTied ? AArch64::AESMCrr :
1685 .
add(
MI.getOperand(0))
1686 .
add(
MI.getOperand(1));
1687 transferImpOps(
MI, MIB, MIB);
1688 MI.eraseFromParent();
1691 case AArch64::IRGstack: {
1693 const AArch64FunctionInfo *AFI = MF.
getInfo<AArch64FunctionInfo>();
1694 const AArch64FrameLowering *TFI =
1695 MF.
getSubtarget<AArch64Subtarget>().getFrameLowering();
1702 StackOffset FrameRegOffset = TFI->resolveFrameOffsetReference(
1708 if (FrameRegOffset) {
1710 SrcReg =
MI.getOperand(0).getReg();
1712 FrameRegOffset,
TII);
1715 .
add(
MI.getOperand(0))
1717 .
add(
MI.getOperand(2));
1718 MI.eraseFromParent();
1721 case AArch64::TAGPstack: {
1722 int64_t
Offset =
MI.getOperand(2).getImm();
1724 TII->get(
Offset >= 0 ? AArch64::ADDG : AArch64::SUBG))
1725 .
add(
MI.getOperand(0))
1726 .
add(
MI.getOperand(1))
1728 .
add(
MI.getOperand(4));
1729 MI.eraseFromParent();
1732 case AArch64::STGloop_wback:
1733 case AArch64::STZGloop_wback:
1734 return expandSetTagLoop(
MBB,
MBBI, NextMBBI);
1735 case AArch64::STGloop:
1736 case AArch64::STZGloop:
1738 "Non-writeback variants of STGloop / STZGloop should not "
1739 "survive past PrologEpilogInserter.");
1740 case AArch64::STR_ZZZZXI:
1741 case AArch64::STR_ZZZZXI_STRIDED_CONTIGUOUS:
1742 return expandSVESpillFill(
MBB,
MBBI, AArch64::STR_ZXI, 4);
1743 case AArch64::STR_ZZZXI:
1744 return expandSVESpillFill(
MBB,
MBBI, AArch64::STR_ZXI, 3);
1745 case AArch64::STR_ZZXI:
1746 case AArch64::STR_ZZXI_STRIDED_CONTIGUOUS:
1747 return expandSVESpillFill(
MBB,
MBBI, AArch64::STR_ZXI, 2);
1748 case AArch64::STR_PPXI:
1749 return expandSVESpillFill(
MBB,
MBBI, AArch64::STR_PXI, 2);
1750 case AArch64::LDR_ZZZZXI:
1751 case AArch64::LDR_ZZZZXI_STRIDED_CONTIGUOUS:
1752 return expandSVESpillFill(
MBB,
MBBI, AArch64::LDR_ZXI, 4);
1753 case AArch64::LDR_ZZZXI:
1754 return expandSVESpillFill(
MBB,
MBBI, AArch64::LDR_ZXI, 3);
1755 case AArch64::LDR_ZZXI:
1756 case AArch64::LDR_ZZXI_STRIDED_CONTIGUOUS:
1757 return expandSVESpillFill(
MBB,
MBBI, AArch64::LDR_ZXI, 2);
1758 case AArch64::LDR_PPXI:
1759 return expandSVESpillFill(
MBB,
MBBI, AArch64::LDR_PXI, 2);
1760 case AArch64::BLR_RVMARKER:
1761 case AArch64::BLRA_RVMARKER:
1762 return expandCALL_RVMARKER(
MBB,
MBBI);
1763 case AArch64::BLR_BTI:
1764 return expandCALL_BTI(
MBB,
MBBI);
1765 case AArch64::StoreSwiftAsyncContext:
1766 return expandStoreSwiftAsyncContext(
MBB,
MBBI);
1767 case AArch64::STSHH_ATOMIC_STORE_SZ:
1768 return expandSTSHHAtomicStore(
MBB,
MBBI);
1769 case AArch64::RestoreZAPseudo:
1770 case AArch64::CommitZASavePseudo:
1771 case AArch64::MSRpstatePseudo: {
1772 auto *NewMBB = [&] {
1774 case AArch64::RestoreZAPseudo:
1775 return expandRestoreZASave(
MBB,
MBBI);
1776 case AArch64::CommitZASavePseudo:
1777 return expandCommitZASave(
MBB,
MBBI);
1778 case AArch64::MSRpstatePseudo:
1779 return expandCondSMToggle(
MBB,
MBBI);
1788 case AArch64::InOutZAUsePseudo:
1789 case AArch64::RequiresZASavePseudo:
1790 case AArch64::RequiresZT0SavePseudo:
1791 case AArch64::SMEStateAllocPseudo:
1792 case AArch64::COALESCER_BARRIER_FPR16:
1793 case AArch64::COALESCER_BARRIER_FPR32:
1794 case AArch64::COALESCER_BARRIER_FPR64:
1795 case AArch64::COALESCER_BARRIER_FPR128:
1796 MI.eraseFromParent();
1798 case AArch64::LD1B_2Z_IMM_PSEUDO:
1799 return expandMultiVecPseudo(
1800 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1801 AArch64::LD1B_2Z_IMM, AArch64::LD1B_2Z_STRIDED_IMM);
1802 case AArch64::LD1H_2Z_IMM_PSEUDO:
1803 return expandMultiVecPseudo(
1804 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1805 AArch64::LD1H_2Z_IMM, AArch64::LD1H_2Z_STRIDED_IMM);
1806 case AArch64::LD1W_2Z_IMM_PSEUDO:
1807 return expandMultiVecPseudo(
1808 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1809 AArch64::LD1W_2Z_IMM, AArch64::LD1W_2Z_STRIDED_IMM);
1810 case AArch64::LD1D_2Z_IMM_PSEUDO:
1811 return expandMultiVecPseudo(
1812 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1813 AArch64::LD1D_2Z_IMM, AArch64::LD1D_2Z_STRIDED_IMM);
1814 case AArch64::LDNT1B_2Z_IMM_PSEUDO:
1815 return expandMultiVecPseudo(
1816 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1817 AArch64::LDNT1B_2Z_IMM, AArch64::LDNT1B_2Z_STRIDED_IMM);
1818 case AArch64::LDNT1H_2Z_IMM_PSEUDO:
1819 return expandMultiVecPseudo(
1820 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1821 AArch64::LDNT1H_2Z_IMM, AArch64::LDNT1H_2Z_STRIDED_IMM);
1822 case AArch64::LDNT1W_2Z_IMM_PSEUDO:
1823 return expandMultiVecPseudo(
1824 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1825 AArch64::LDNT1W_2Z_IMM, AArch64::LDNT1W_2Z_STRIDED_IMM);
1826 case AArch64::LDNT1D_2Z_IMM_PSEUDO:
1827 return expandMultiVecPseudo(
1828 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1829 AArch64::LDNT1D_2Z_IMM, AArch64::LDNT1D_2Z_STRIDED_IMM);
1830 case AArch64::LD1B_2Z_PSEUDO:
1831 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR2RegClass,
1832 AArch64::ZPR2StridedRegClass, AArch64::LD1B_2Z,
1833 AArch64::LD1B_2Z_STRIDED);
1834 case AArch64::LD1H_2Z_PSEUDO:
1835 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR2RegClass,
1836 AArch64::ZPR2StridedRegClass, AArch64::LD1H_2Z,
1837 AArch64::LD1H_2Z_STRIDED);
1838 case AArch64::LD1W_2Z_PSEUDO:
1839 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR2RegClass,
1840 AArch64::ZPR2StridedRegClass, AArch64::LD1W_2Z,
1841 AArch64::LD1W_2Z_STRIDED);
1842 case AArch64::LD1D_2Z_PSEUDO:
1843 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR2RegClass,
1844 AArch64::ZPR2StridedRegClass, AArch64::LD1D_2Z,
1845 AArch64::LD1D_2Z_STRIDED);
1846 case AArch64::LDNT1B_2Z_PSEUDO:
1847 return expandMultiVecPseudo(
1848 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1849 AArch64::LDNT1B_2Z, AArch64::LDNT1B_2Z_STRIDED);
1850 case AArch64::LDNT1H_2Z_PSEUDO:
1851 return expandMultiVecPseudo(
1852 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1853 AArch64::LDNT1H_2Z, AArch64::LDNT1H_2Z_STRIDED);
1854 case AArch64::LDNT1W_2Z_PSEUDO:
1855 return expandMultiVecPseudo(
1856 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1857 AArch64::LDNT1W_2Z, AArch64::LDNT1W_2Z_STRIDED);
1858 case AArch64::LDNT1D_2Z_PSEUDO:
1859 return expandMultiVecPseudo(
1860 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1861 AArch64::LDNT1D_2Z, AArch64::LDNT1D_2Z_STRIDED);
1862 case AArch64::LD1B_4Z_IMM_PSEUDO:
1863 return expandMultiVecPseudo(
1864 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1865 AArch64::LD1B_4Z_IMM, AArch64::LD1B_4Z_STRIDED_IMM);
1866 case AArch64::LD1H_4Z_IMM_PSEUDO:
1867 return expandMultiVecPseudo(
1868 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1869 AArch64::LD1H_4Z_IMM, AArch64::LD1H_4Z_STRIDED_IMM);
1870 case AArch64::LD1W_4Z_IMM_PSEUDO:
1871 return expandMultiVecPseudo(
1872 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1873 AArch64::LD1W_4Z_IMM, AArch64::LD1W_4Z_STRIDED_IMM);
1874 case AArch64::LD1D_4Z_IMM_PSEUDO:
1875 return expandMultiVecPseudo(
1876 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1877 AArch64::LD1D_4Z_IMM, AArch64::LD1D_4Z_STRIDED_IMM);
1878 case AArch64::LDNT1B_4Z_IMM_PSEUDO:
1879 return expandMultiVecPseudo(
1880 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1881 AArch64::LDNT1B_4Z_IMM, AArch64::LDNT1B_4Z_STRIDED_IMM);
1882 case AArch64::LDNT1H_4Z_IMM_PSEUDO:
1883 return expandMultiVecPseudo(
1884 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1885 AArch64::LDNT1H_4Z_IMM, AArch64::LDNT1H_4Z_STRIDED_IMM);
1886 case AArch64::LDNT1W_4Z_IMM_PSEUDO:
1887 return expandMultiVecPseudo(
1888 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1889 AArch64::LDNT1W_4Z_IMM, AArch64::LDNT1W_4Z_STRIDED_IMM);
1890 case AArch64::LDNT1D_4Z_IMM_PSEUDO:
1891 return expandMultiVecPseudo(
1892 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1893 AArch64::LDNT1D_4Z_IMM, AArch64::LDNT1D_4Z_STRIDED_IMM);
1894 case AArch64::LD1B_4Z_PSEUDO:
1895 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR4RegClass,
1896 AArch64::ZPR4StridedRegClass, AArch64::LD1B_4Z,
1897 AArch64::LD1B_4Z_STRIDED);
1898 case AArch64::LD1H_4Z_PSEUDO:
1899 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR4RegClass,
1900 AArch64::ZPR4StridedRegClass, AArch64::LD1H_4Z,
1901 AArch64::LD1H_4Z_STRIDED);
1902 case AArch64::LD1W_4Z_PSEUDO:
1903 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR4RegClass,
1904 AArch64::ZPR4StridedRegClass, AArch64::LD1W_4Z,
1905 AArch64::LD1W_4Z_STRIDED);
1906 case AArch64::LD1D_4Z_PSEUDO:
1907 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR4RegClass,
1908 AArch64::ZPR4StridedRegClass, AArch64::LD1D_4Z,
1909 AArch64::LD1D_4Z_STRIDED);
1910 case AArch64::LDNT1B_4Z_PSEUDO:
1911 return expandMultiVecPseudo(
1912 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1913 AArch64::LDNT1B_4Z, AArch64::LDNT1B_4Z_STRIDED);
1914 case AArch64::LDNT1H_4Z_PSEUDO:
1915 return expandMultiVecPseudo(
1916 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1917 AArch64::LDNT1H_4Z, AArch64::LDNT1H_4Z_STRIDED);
1918 case AArch64::LDNT1W_4Z_PSEUDO:
1919 return expandMultiVecPseudo(
1920 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1921 AArch64::LDNT1W_4Z, AArch64::LDNT1W_4Z_STRIDED);
1922 case AArch64::LDNT1D_4Z_PSEUDO:
1923 return expandMultiVecPseudo(
1924 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1925 AArch64::LDNT1D_4Z, AArch64::LDNT1D_4Z_STRIDED);
1926 case AArch64::FORM_TRANSPOSED_REG_TUPLE_X2_PSEUDO:
1927 return expandFormTuplePseudo(
MBB,
MBBI, NextMBBI, 2);
1928 case AArch64::FORM_TRANSPOSED_REG_TUPLE_X4_PSEUDO:
1929 return expandFormTuplePseudo(
MBB,
MBBI, NextMBBI, 4);
1936bool AArch64ExpandPseudo::expandMBB(MachineBasicBlock &
MBB) {
1949bool AArch64ExpandPseudo::runOnMachineFunction(MachineFunction &MF) {
1953 for (
auto &
MBB : MF)
1960 return new AArch64ExpandPseudo();
#define AARCH64_EXPAND_PSEUDO_NAME
MachineInstrBuilder & UseMI
static MachineInstr * createCallWithOps(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, const AArch64InstrInfo *TII, unsigned Opcode, ArrayRef< MachineOperand > ExplicitOps, unsigned RegMaskStartIdx)
static constexpr unsigned ZERO_ALL_ZA_MASK
static MachineInstr * createCall(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, const AArch64InstrInfo *TII, MachineOperand &CallTarget, unsigned RegMaskStartIdx)
MachineInstrBuilder MachineInstrBuilder & DefMI
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
MachineBasicBlock MachineBasicBlock::iterator MBBI
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
const HexagonInstrInfo * TII
const AbstractManglingParser< Derived, Alloc >::OperatorInfo AbstractManglingParser< Derived, Alloc >::Ops[]
This file implements the LivePhysRegs utility for tracking liveness of physical registers.
This file declares the MachineConstantPool class which is an abstract constant pool to keep track of ...
Register const TargetRegisterInfo * TRI
Promote Memory to Register
static MCRegister getReg(const MCDisassembler *D, unsigned RC, unsigned RegNo)
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
unsigned getTaggedBasePointerOffset() const
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
FunctionPass class - This class is used to implement most global optimizations.
Describe properties that are true of each instruction in the target description file.
LLVM_ABI instr_iterator insert(instr_iterator I, MachineInstr *M)
Insert MI into the instruction list before I, possibly inside a bundle.
const BasicBlock * getBasicBlock() const
Return the LLVM basic block that this instance corresponded to originally.
LLVM_ABI void addSuccessor(MachineBasicBlock *Succ, BranchProbability Prob=BranchProbability::getUnknown())
Add Succ as a successor of this MachineBasicBlock.
LLVM_ABI MachineBasicBlock * splitAt(MachineInstr &SplitInst, bool UpdateLiveIns=true, LiveIntervals *LIS=nullptr)
Split a basic block into 2 pieces at SplitPoint.
LLVM_ABI void eraseFromParent()
This method unlinks 'this' from the containing function and deletes it.
const MachineFunction * getParent() const
Return the MachineFunction containing this basic block.
iterator_range< succ_iterator > successors()
MachineInstrBundleIterator< MachineInstr > iterator
unsigned getConstantPoolIndex(const Constant *C, Align Alignment)
getConstantPoolIndex - Create a new entry in the constant pool or return an existing one.
MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...
void moveAdditionalCallInfo(const MachineInstr *Old, const MachineInstr *New)
Move the call site info from Old to \New call site info.
const TargetSubtargetInfo & getSubtarget() const
getSubtarget - Return the subtarget for which this machine code is being compiled.
Ty * getInfo()
getInfo - Keep track of various per-function pieces of information for backends that would like to do...
MachineConstantPool * getConstantPool()
getConstantPool - Return the constant pool object for the current function.
MachineBasicBlock * CreateMachineBasicBlock(const BasicBlock *BB=nullptr, std::optional< UniqueBBID > BBID=std::nullopt)
CreateMachineInstr - Allocate a new MachineInstr.
void insert(iterator MBBI, MachineBasicBlock *MBB)
const TargetMachine & getTarget() const
getTarget - Return the target machine this machine code is compiled with
const MachineInstrBuilder & addExternalSymbol(const char *FnName, unsigned TargetFlags=0) const
const MachineInstrBuilder & addUse(Register RegNo, RegState Flags={}, unsigned SubReg=0) const
Add a virtual register use operand.
const MachineInstrBuilder & addReg(Register RegNo, RegState Flags={}, unsigned SubReg=0) const
Add a new virtual register operand.
const MachineInstrBuilder & setMIFlag(MachineInstr::MIFlag Flag) const
const MachineInstrBuilder & addImm(int64_t Val) const
Add a new immediate operand.
const MachineInstrBuilder & add(const MachineOperand &MO) const
const MachineInstrBuilder & addConstantPoolIndex(unsigned Idx, int Offset=0, unsigned TargetFlags=0) const
const MachineInstrBuilder & addGlobalAddress(const GlobalValue *GV, int64_t Offset=0, unsigned TargetFlags=0) const
const MachineInstrBuilder & addMBB(MachineBasicBlock *MBB, unsigned TargetFlags=0) const
const MachineInstrBuilder & addDef(Register RegNo, RegState Flags={}, unsigned SubReg=0) const
Add a virtual register definition operand.
const MachineInstrBuilder & cloneMemRefs(const MachineInstr &OtherMI) const
const MachineInstrBuilder & setMIFlags(unsigned Flags) const
MachineInstr * getInstr() const
If conversion operators fail, use this method to get the MachineInstr explicitly.
Representation of each machine instruction.
void setDebugInstrNum(unsigned Num)
Set instruction number of this MachineInstr.
MachineOperand class - Representation of each machine instruction operand.
const GlobalValue * getGlobal() const
bool isReg() const
isReg - Tests if this is a MO_Register operand.
bool isCPI() const
isCPI - Tests if this is a MO_ConstantPoolIndex operand.
bool isSymbol() const
isSymbol - Tests if this is a MO_ExternalSymbol operand.
unsigned getTargetFlags() const
bool isGlobal() const
isGlobal - Tests if this is a MO_GlobalAddress operand.
const char * getSymbolName() const
Register getReg() const
getReg - Returns the register number.
static MachineOperand CreateReg(Register Reg, bool isDef, bool isImp=false, bool isKill=false, bool isDead=false, bool isUndef=false, bool isEarlyClobber=false, unsigned SubReg=0, bool isDebug=false, bool isInternalRead=false, bool isRenamable=false)
int64_t getOffset() const
Return the offset from the symbol in this operand.
MCRegister asMCReg() const
Utility to check-convert this value to a MCRegister.
void push_back(const T &Elt)
StringRef - Represent a constant reference to a string, i.e.
CodeModel::Model getCodeModel() const
Returns the code model.
ArrayRef< MCPhysReg > getRegisters() const
virtual const TargetRegisterInfo * getRegisterInfo() const =0
Return the target's register information.
self_iterator getIterator()
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
@ MO_NC
MO_NC - Indicates whether the linker is expected to check the symbol reference for overflow.
@ MO_PAGEOFF
MO_PAGEOFF - A symbol operand with this flag represents the offset of that symbol within a 4K page.
@ MO_PREL
MO_PREL - Indicates that the bits of the symbol operand represented by MO_G0 etc are PC relative.
@ MO_PAGE
MO_PAGE - A symbol operand with this flag represents the pc-relative offset of the 4K page containing...
@ MO_TAGGED
MO_TAGGED - With MO_PAGE, indicates that the page includes a memory tag in bits 56-63.
@ MO_G3
MO_G3 - A symbol operand with this flag (granule 3) represents the high 16-bits of a 64-bit address,...
static unsigned getArithExtendImm(AArch64_AM::ShiftExtendType ET, unsigned Imm)
getArithExtendImm - Encode the extend type and shift amount for an arithmetic instruction: imm: 3-bit...
static unsigned getShifterImm(AArch64_AM::ShiftExtendType ST, unsigned Imm)
getShifterImm - Encode the shift type and amount: imm: 6-bit shift amount shifter: 000 ==> lsl 001 ==...
void expandMOVImm(uint64_t Imm, unsigned BitSize, SmallVectorImpl< ImmInsnModel > &Insn)
Expand a MOVi32imm or MOVi64imm pseudo instruction to one or more real move-immediate instructions to...
@ Destructive2xRegImmUnpred
@ DestructiveInstTypeMask
@ DestructiveUnaryPassthru
@ DestructiveTernaryCommWithRev
@ DestructiveBinaryCommWithRev
int32_t getSVERevInstr(uint32_t Opcode)
int32_t getSVENonRevInstr(uint32_t Opcode)
int32_t getSVEPseudoMap(uint32_t Opcode)
constexpr char Align[]
Key for Kernel::Arg::Metadata::mAlign.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
BaseReg
Stack frame base register. Bit 0 of FREInfo.Info.
This is an optimization pass for GlobalISel generic memory operations.
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
LLVM_ABI void finalizeBundle(MachineBasicBlock &MBB, MachineBasicBlock::instr_iterator FirstMI, MachineBasicBlock::instr_iterator LastMI)
finalizeBundle - Finalize a machine instruction bundle which includes a sequence of instructions star...
MachineInstrBuilder BuildMI(MachineFunction &MF, const MIMetadata &MIMD, const MCInstrDesc &MCID)
Builder interface. Specify how to create the initial instruction itself.
RegState
Flags to represent properties of register accesses.
@ Kill
The last use of a register.
constexpr RegState getKillRegState(bool B)
APFloat abs(APFloat X)
Returns the absolute value of the argument.
constexpr RegState getDeadRegState(bool B)
constexpr RegState getRenamableRegState(bool B)
FunctionPass * createAArch64ExpandPseudoPass()
Returns an instance of the pseudo instruction expansion pass.
void emitFrameOffset(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, const DebugLoc &DL, unsigned DestReg, unsigned SrcReg, StackOffset Offset, const TargetInstrInfo *TII, MachineInstr::MIFlag=MachineInstr::NoFlags, bool SetNZCV=false, bool NeedsWinCFI=false, bool *HasWinCFI=nullptr, bool EmitCFAOffset=false, StackOffset InitialOffset={}, unsigned FrameReg=AArch64::SP)
emitFrameOffset - Emit instructions as needed to set DestReg to SrcReg plus Offset.
LLVM_ABI void report_fatal_error(Error Err, bool gen_crash_diag=true)
constexpr RegState getDefRegState(bool B)
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
LLVM_ATTRIBUTE_VISIBILITY_DEFAULT AnalysisKey InnerAnalysisManagerProxy< AnalysisManagerT, IRUnitT, ExtraArgTs... >::Key
bool is_contained(R &&Range, const E &Element)
Returns true if Element is found in Range.
void computeAndAddLiveIns(LivePhysRegs &LiveRegs, MachineBasicBlock &MBB)
Convenience function combining computeLiveIns() and addLiveIns().
constexpr RegState getUndefRegState(bool B)