@@ -441,10 +441,6 @@ class LowerTypeTestsModule {
441
441
// Cache variable used by hasBranchTargetEnforcement().
442
442
int HasBranchTargetEnforcement = -1 ;
443
443
444
- // The jump table type we ended up deciding on. (Usually the same as
445
- // Arch, except that 'arm' and 'thumb' are often interchangeable.)
446
- Triple::ArchType JumpTableArch = Triple::UnknownArch;
447
-
448
444
IntegerType *Int1Ty = Type::getInt1Ty(M.getContext());
449
445
IntegerType *Int8Ty = Type::getInt8Ty(M.getContext());
450
446
PointerType *PtrTy = PointerType::getUnqual(M.getContext());
@@ -525,11 +521,8 @@ class LowerTypeTestsModule {
525
521
Triple::ArchType
526
522
selectJumpTableArmEncoding (ArrayRef<GlobalTypeMember *> Functions);
527
523
bool hasBranchTargetEnforcement ();
528
- unsigned getJumpTableEntrySize ();
529
- Type *getJumpTableEntryType ();
530
- void createJumpTableEntry (raw_ostream &AsmOS, raw_ostream &ConstraintOS,
531
- Triple::ArchType JumpTableArch,
532
- SmallVectorImpl<Value *> &AsmArgs, Function *Dest);
524
+ unsigned getJumpTableEntrySize (Triple::ArchType JumpTableArch);
525
+ InlineAsm *createJumpTableEntryAsm (Triple::ArchType JumpTableArch);
533
526
void verifyTypeMDNode (GlobalObject *GO, MDNode *Type);
534
527
void buildBitSetsFromFunctions (ArrayRef<Metadata *> TypeIds,
535
528
ArrayRef<GlobalTypeMember *> Functions);
@@ -548,7 +541,8 @@ class LowerTypeTestsModule {
548
541
void findGlobalVariableUsersOf (Constant *C,
549
542
SmallSetVector<GlobalVariable *, 8 > &Out);
550
543
551
- void createJumpTable (Function *F, ArrayRef<GlobalTypeMember *> Functions);
544
+ void createJumpTable (Function *F, ArrayRef<GlobalTypeMember *> Functions,
545
+ Triple::ArchType JumpTableArch);
552
546
553
547
// / replaceCfiUses - Go through the uses list for this definition
554
548
// / and make each use point to "V" instead of "this" when the use is outside
@@ -1245,7 +1239,8 @@ bool LowerTypeTestsModule::hasBranchTargetEnforcement() {
1245
1239
return HasBranchTargetEnforcement;
1246
1240
}
1247
1241
1248
- unsigned LowerTypeTestsModule::getJumpTableEntrySize () {
1242
+ unsigned
1243
+ LowerTypeTestsModule::getJumpTableEntrySize (Triple::ArchType JumpTableArch) {
1249
1244
switch (JumpTableArch) {
1250
1245
case Triple::x86:
1251
1246
case Triple::x86_64:
@@ -1278,33 +1273,32 @@ unsigned LowerTypeTestsModule::getJumpTableEntrySize() {
1278
1273
}
1279
1274
}
1280
1275
1281
- // Create a jump table entry for the target. This consists of an instruction
1282
- // sequence containing a relative branch to Dest. Appends inline asm text,
1283
- // constraints and arguments to AsmOS, ConstraintOS and AsmArgs.
1284
- void LowerTypeTestsModule::createJumpTableEntry (
1285
- raw_ostream &AsmOS, raw_ostream &ConstraintOS,
1286
- Triple::ArchType JumpTableArch, SmallVectorImpl<Value *> &AsmArgs,
1287
- Function *Dest) {
1288
- unsigned ArgIndex = AsmArgs.size ();
1276
+ // Create an inline asm constant representing a jump table entry for the target.
1277
+ // This consists of an instruction sequence containing a relative branch to
1278
+ // Dest.
1279
+ InlineAsm *
1280
+ LowerTypeTestsModule::createJumpTableEntryAsm (Triple::ArchType JumpTableArch) {
1281
+ std::string Asm;
1282
+ raw_string_ostream AsmOS (Asm);
1289
1283
1290
1284
if (JumpTableArch == Triple::x86 || JumpTableArch == Triple::x86_64) {
1291
1285
bool Endbr = false ;
1292
1286
if (const auto *MD = mdconst::extract_or_null<ConstantInt>(
1293
- Dest-> getParent ()-> getModuleFlag (" cf-protection-branch" )))
1287
+ M. getModuleFlag (" cf-protection-branch" )))
1294
1288
Endbr = !MD->isZero ();
1295
1289
if (Endbr)
1296
1290
AsmOS << (JumpTableArch == Triple::x86 ? " endbr32\n " : " endbr64\n " );
1297
- AsmOS << " jmp ${" << ArgIndex << " :c}@plt\n " ;
1291
+ AsmOS << " jmp ${0 :c}@plt\n " ;
1298
1292
if (Endbr)
1299
1293
AsmOS << " .balign 16, 0xcc\n " ;
1300
1294
else
1301
1295
AsmOS << " int3\n int3\n int3\n " ;
1302
1296
} else if (JumpTableArch == Triple::arm) {
1303
- AsmOS << " b $" << ArgIndex << " \n " ;
1297
+ AsmOS << " b $0 \n " ;
1304
1298
} else if (JumpTableArch == Triple::aarch64) {
1305
1299
if (hasBranchTargetEnforcement ())
1306
1300
AsmOS << " bti c\n " ;
1307
- AsmOS << " b $" << ArgIndex << " \n " ;
1301
+ AsmOS << " b $0 \n " ;
1308
1302
} else if (JumpTableArch == Triple::thumb) {
1309
1303
if (!CanUseThumbBWJumpTable) {
1310
1304
// In Armv6-M, this sequence will generate a branch without corrupting
@@ -1328,28 +1322,26 @@ void LowerTypeTestsModule::createJumpTableEntry(
1328
1322
<< " str r0, [sp, #4]\n "
1329
1323
<< " pop {r0,pc}\n "
1330
1324
<< " .balign 4\n "
1331
- << " 1: .word $" << ArgIndex << " - (0b + 4)\n " ;
1325
+ << " 1: .word $0 - (0b + 4)\n " ;
1332
1326
} else {
1333
1327
if (hasBranchTargetEnforcement ())
1334
1328
AsmOS << " bti\n " ;
1335
- AsmOS << " b.w $" << ArgIndex << " \n " ;
1329
+ AsmOS << " b.w $0 \n " ;
1336
1330
}
1337
1331
} else if (JumpTableArch == Triple::riscv32 ||
1338
1332
JumpTableArch == Triple::riscv64) {
1339
- AsmOS << " tail $" << ArgIndex << " @plt\n " ;
1333
+ AsmOS << " tail $0 @plt\n " ;
1340
1334
} else if (JumpTableArch == Triple::loongarch64) {
1341
- AsmOS << " pcalau12i $$t0, %pc_hi20($" << ArgIndex << " )\n "
1342
- << " jirl $$r0, $$t0, %pc_lo12($" << ArgIndex << " )\n " ;
1335
+ AsmOS << " pcalau12i $$t0, %pc_hi20($0 )\n "
1336
+ << " jirl $$r0, $$t0, %pc_lo12($0 )\n " ;
1343
1337
} else {
1344
1338
report_fatal_error (" Unsupported architecture for jump tables" );
1345
1339
}
1346
1340
1347
- ConstraintOS << (ArgIndex > 0 ? " ,s" : " s" );
1348
- AsmArgs.push_back (Dest);
1349
- }
1350
-
1351
- Type *LowerTypeTestsModule::getJumpTableEntryType () {
1352
- return ArrayType::get (Int8Ty, getJumpTableEntrySize ());
1341
+ return InlineAsm::get (
1342
+ FunctionType::get (Type::getVoidTy (M.getContext ()), PtrTy, false ),
1343
+ AsmOS.str (), " s" ,
1344
+ /* hasSideEffects=*/ true );
1353
1345
}
1354
1346
1355
1347
// / Given a disjoint set of type identifiers and functions, build the bit sets
@@ -1498,12 +1490,18 @@ Triple::ArchType LowerTypeTestsModule::selectJumpTableArmEncoding(
1498
1490
}
1499
1491
1500
1492
void LowerTypeTestsModule::createJumpTable (
1501
- Function *F, ArrayRef<GlobalTypeMember *> Functions) {
1493
+ Function *F, ArrayRef<GlobalTypeMember *> Functions,
1494
+ Triple::ArchType JumpTableArch) {
1502
1495
std::string AsmStr, ConstraintStr;
1503
1496
raw_string_ostream AsmOS (AsmStr), ConstraintOS (ConstraintStr);
1504
1497
SmallVector<Value *, 16 > AsmArgs;
1505
1498
AsmArgs.reserve (Functions.size () * 2 );
1506
1499
1500
+ BasicBlock *BB = BasicBlock::Create (M.getContext (), " entry" , F);
1501
+ IRBuilder<> IRB (BB);
1502
+
1503
+ InlineAsm *JumpTableAsm = createJumpTableEntryAsm (JumpTableArch);
1504
+
1507
1505
// Check if all entries have the NoUnwind attribute.
1508
1506
// If all entries have it, we can safely mark the
1509
1507
// cfi.jumptable as NoUnwind, otherwise, direct calls
@@ -1514,12 +1512,12 @@ void LowerTypeTestsModule::createJumpTable(
1514
1512
->hasFnAttribute (llvm::Attribute::NoUnwind)) {
1515
1513
areAllEntriesNounwind = false ;
1516
1514
}
1517
- createJumpTableEntry (AsmOS, ConstraintOS, JumpTableArch, AsmArgs,
1518
- cast<Function>(GTM->getGlobal ()));
1515
+ IRB.CreateCall (JumpTableAsm, GTM->getGlobal ());
1519
1516
}
1517
+ IRB.CreateUnreachable ();
1520
1518
1521
1519
// Align the whole table by entry size.
1522
- F->setAlignment (Align (getJumpTableEntrySize ()));
1520
+ F->setAlignment (Align (getJumpTableEntrySize (JumpTableArch )));
1523
1521
// Skip prologue.
1524
1522
// Disabled on win32 due to https://llvm.org/bugs/show_bug.cgi?id=28641#c3.
1525
1523
// Luckily, this function does not get any prologue even without the
@@ -1568,21 +1566,6 @@ void LowerTypeTestsModule::createJumpTable(
1568
1566
1569
1567
// Make sure we do not inline any calls to the cfi.jumptable.
1570
1568
F->addFnAttr (Attribute::NoInline);
1571
-
1572
- BasicBlock *BB = BasicBlock::Create (M.getContext (), " entry" , F);
1573
- IRBuilder<> IRB (BB);
1574
-
1575
- SmallVector<Type *, 16 > ArgTypes;
1576
- ArgTypes.reserve (AsmArgs.size ());
1577
- for (const auto &Arg : AsmArgs)
1578
- ArgTypes.push_back (Arg->getType ());
1579
- InlineAsm *JumpTableAsm =
1580
- InlineAsm::get (FunctionType::get (IRB.getVoidTy (), ArgTypes, false ),
1581
- AsmOS.str (), ConstraintOS.str (),
1582
- /* hasSideEffects=*/ true );
1583
-
1584
- IRB.CreateCall (JumpTableAsm, AsmArgs);
1585
- IRB.CreateUnreachable ();
1586
1569
}
1587
1570
1588
1571
// / Given a disjoint set of type identifiers and functions, build a jump table
@@ -1669,11 +1652,11 @@ void LowerTypeTestsModule::buildBitSetsFromFunctionsNative(
1669
1652
1670
1653
// Decide on the jump table encoding, so that we know how big the
1671
1654
// entries will be.
1672
- JumpTableArch = selectJumpTableArmEncoding (Functions);
1655
+ Triple::ArchType JumpTableArch = selectJumpTableArmEncoding (Functions);
1673
1656
1674
1657
// Build a simple layout based on the regular layout of jump tables.
1675
1658
DenseMap<GlobalTypeMember *, uint64_t > GlobalLayout;
1676
- unsigned EntrySize = getJumpTableEntrySize ();
1659
+ unsigned EntrySize = getJumpTableEntrySize (JumpTableArch );
1677
1660
for (unsigned I = 0 ; I != Functions.size (); ++I)
1678
1661
GlobalLayout[Functions[I]] = I * EntrySize;
1679
1662
@@ -1684,7 +1667,7 @@ void LowerTypeTestsModule::buildBitSetsFromFunctionsNative(
1684
1667
M.getDataLayout ().getProgramAddressSpace (),
1685
1668
" .cfi.jumptable" , &M);
1686
1669
ArrayType *JumpTableType =
1687
- ArrayType::get (getJumpTableEntryType ( ), Functions.size ());
1670
+ ArrayType::get (ArrayType::get (Int8Ty, EntrySize ), Functions.size ());
1688
1671
auto JumpTable = ConstantExpr::getPointerCast (
1689
1672
JumpTableFn, PointerType::getUnqual (M.getContext ()));
1690
1673
@@ -1742,7 +1725,7 @@ void LowerTypeTestsModule::buildBitSetsFromFunctionsNative(
1742
1725
}
1743
1726
}
1744
1727
1745
- createJumpTable (JumpTableFn, Functions);
1728
+ createJumpTable (JumpTableFn, Functions, JumpTableArch );
1746
1729
}
1747
1730
1748
1731
// / Assign a dummy layout using an incrementing counter, tag each function
0 commit comments