Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions llvm/lib/Target/AArch64/AArch64FrameLowering.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -723,7 +723,8 @@ void AArch64FrameLowering::resetCFIToInitialState(
CFIBuilder.buildDefCFA(AArch64::SP, 0);

// Flip the RA sign state.
if (MFI.shouldSignReturnAddress(MF))
if (MFI.shouldSignReturnAddress(MF) &&
!MF.getTarget().getTargetTriple().isOSBinFormatMachO())
MFI.branchProtectionPAuthLR() ? CFIBuilder.buildNegateRAStateWithPC()
: CFIBuilder.buildNegateRAState();

Expand Down Expand Up @@ -984,7 +985,8 @@ bool AArch64FrameLowering::shouldSignReturnAddressEverywhere(
if (MF.getTarget().getMCAsmInfo()->usesWindowsCFI())
return false;
const AArch64FunctionInfo *AFI = MF.getInfo<AArch64FunctionInfo>();
bool SignReturnAddressAll = AFI->shouldSignReturnAddress(/*SpillsLR=*/false);
bool SignReturnAddressAll =
AFI->shouldSignReturnAddress(MF, /*SpillsLR=*/false);
return SignReturnAddressAll;
}

Expand Down
14 changes: 7 additions & 7 deletions llvm/lib/Target/AArch64/AArch64InstrInfo.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -9555,8 +9555,10 @@ outliningCandidatesSigningScopeConsensus(const outliner::Candidate &a,
const auto &MFIa = a.getMF()->getInfo<AArch64FunctionInfo>();
const auto &MFIb = b.getMF()->getInfo<AArch64FunctionInfo>();

return MFIa->shouldSignReturnAddress(false) == MFIb->shouldSignReturnAddress(false) &&
MFIa->shouldSignReturnAddress(true) == MFIb->shouldSignReturnAddress(true);
return MFIa->shouldSignReturnAddress(*a.getMF(), false) ==
MFIb->shouldSignReturnAddress(*b.getMF(), false) &&
MFIa->shouldSignReturnAddress(*a.getMF(), true) ==
MFIb->shouldSignReturnAddress(*b.getMF(), true);
}

static bool
Expand Down Expand Up @@ -9626,10 +9628,8 @@ AArch64InstrInfo::getOutliningCandidateInfo(
// Performing a tail call may require extra checks when PAuth is enabled.
// If PAuth is disabled, set it to zero for uniformity.
unsigned NumBytesToCheckLRInTCEpilogue = 0;
if (RepeatedSequenceLocs[0]
.getMF()
->getInfo<AArch64FunctionInfo>()
->shouldSignReturnAddress(true)) {
const MachineFunction &MF = *RepeatedSequenceLocs[0].getMF();
if (MF.getInfo<AArch64FunctionInfo>()->shouldSignReturnAddress(MF, true)) {
// One PAC and one AUT instructions
NumBytesToCreateFrame += 8;

Expand Down Expand Up @@ -10433,7 +10433,7 @@ void AArch64InstrInfo::buildOutlinedFrame(
Et = MBB.insert(Et, LDRXpost);
}

bool ShouldSignReturnAddr = FI->shouldSignReturnAddress(!IsLeafFunction);
bool ShouldSignReturnAddr = FI->shouldSignReturnAddress(MF, !IsLeafFunction);

// If this is a tail call outlined function, then there's already a return.
if (OF.FrameConstructionID == MachineOutlinerTailCall ||
Expand Down
18 changes: 14 additions & 4 deletions llvm/lib/Target/AArch64/AArch64MachineFunctionInfo.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -86,8 +86,7 @@ static std::pair<bool, bool> GetSignReturnAddress(const Function &F,
}

static bool ShouldSignWithBKey(const Function &F, const AArch64Subtarget &STI) {
if (!STI.getTargetTriple().isOSBinFormatMachO() &&
F.hasFnAttribute("ptrauth-returns"))
if (F.hasFnAttribute("ptrauth-returns"))
return true;
if (!F.hasFnAttribute("sign-return-address-key")) {
if (STI.getTargetTriple().isOSWindows())
Expand Down Expand Up @@ -173,7 +172,18 @@ MachineFunctionInfo *AArch64FunctionInfo::clone(
return DestMF.cloneInfo<AArch64FunctionInfo>(*this);
}

bool AArch64FunctionInfo::shouldSignReturnAddress(bool SpillsLR) const {
static bool shouldAuthenticateLR(const MachineFunction &MF) {
// Return address authentication can be enabled at the function level, using
// the "ptrauth-returns" attribute.
const AArch64Subtarget &Subtarget = MF.getSubtarget<AArch64Subtarget>();
return Subtarget.isTargetMachO() &&
MF.getFunction().hasFnAttribute("ptrauth-returns");
}

bool AArch64FunctionInfo::shouldSignReturnAddress(const MachineFunction &MF,
bool SpillsLR) const {
if (SpillsLR && shouldAuthenticateLR(MF))
return true;
if (!SignReturnAddress)
return false;
if (SignReturnAddressAll)
Expand All @@ -189,7 +199,7 @@ static bool isLRSpilled(const MachineFunction &MF) {

bool AArch64FunctionInfo::shouldSignReturnAddress(
const MachineFunction &MF) const {
return shouldSignReturnAddress(isLRSpilled(MF));
return shouldSignReturnAddress(MF, isLRSpilled(MF));
}

bool AArch64FunctionInfo::needsShadowCallStackPrologueEpilogue(
Expand Down
2 changes: 1 addition & 1 deletion llvm/lib/Target/AArch64/AArch64MachineFunctionInfo.h
Original file line number Diff line number Diff line change
Expand Up @@ -596,7 +596,7 @@ class AArch64FunctionInfo final : public MachineFunctionInfo {
}

bool shouldSignReturnAddress(const MachineFunction &MF) const;
bool shouldSignReturnAddress(bool SpillsLR) const;
bool shouldSignReturnAddress(const MachineFunction &MF, bool SpillsLR) const;

bool needsShadowCallStackPrologueEpilogue(MachineFunction &MF) const;

Expand Down
73 changes: 66 additions & 7 deletions llvm/lib/Target/AArch64/AArch64PointerAuth.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,11 @@
#include "AArch64Subtarget.h"
#include "llvm/CodeGen/CFIInstBuilder.h"
#include "llvm/CodeGen/MachineBasicBlock.h"
#include "llvm/CodeGen/MachineFrameInfo.h"
#include "llvm/CodeGen/MachineInstr.h"
#include "llvm/CodeGen/MachineInstrBuilder.h"
#include "llvm/CodeGen/MachineModuleInfo.h"
#include "llvm/IR/CallingConv.h"

using namespace llvm;
using namespace llvm::AArch64PAuth;
Expand Down Expand Up @@ -96,6 +99,9 @@ static void emitPACCFI(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI,
if (!EmitCFI)
return;

if (MBB.getParent()->getTarget().getTargetTriple().isOSBinFormatMachO())
return;

auto &MF = *MBB.getParent();
auto &MFnI = *MF.getInfo<AArch64FunctionInfo>();

Expand All @@ -116,7 +122,7 @@ void AArch64PointerAuth::signLR(MachineFunction &MF,
// Debug location must be unknown, see AArch64FrameLowering::emitPrologue.
DebugLoc DL;

if (UseBKey) {
if (UseBKey && !MF.getTarget().getTargetTriple().isOSBinFormatMachO()) {
BuildMI(MBB, MBBI, DL, TII->get(AArch64::EMITBKEY))
.setMIFlag(MachineInstr::FrameSetup);
}
Expand All @@ -133,17 +139,15 @@ void AArch64PointerAuth::signLR(MachineFunction &MF,
if (MFnI.branchProtectionPAuthLR() && Subtarget->hasPAuthLR()) {
emitPACCFI(MBB, MBBI, MachineInstr::FrameSetup, EmitCFI);
BuildMI(MBB, MBBI, DL,
TII->get(MFnI.shouldSignWithBKey() ? AArch64::PACIBSPPC
: AArch64::PACIASPPC))
TII->get(UseBKey ? AArch64::PACIBSPPC : AArch64::PACIASPPC))
.setMIFlag(MachineInstr::FrameSetup)
->setPreInstrSymbol(MF, MFnI.getSigningInstrLabel());
} else {
BuildPACM(*Subtarget, MBB, MBBI, DL, MachineInstr::FrameSetup);
if (MFnI.branchProtectionPAuthLR())
emitPACCFI(MBB, MBBI, MachineInstr::FrameSetup, EmitCFI);
BuildMI(MBB, MBBI, DL,
TII->get(MFnI.shouldSignWithBKey() ? AArch64::PACIBSP
: AArch64::PACIASP))
TII->get(UseBKey ? AArch64::PACIBSP : AArch64::PACIASP))
.setMIFlag(MachineInstr::FrameSetup)
->setPreInstrSymbol(MF, MFnI.getSigningInstrLabel());
if (!MFnI.branchProtectionPAuthLR())
Expand Down Expand Up @@ -178,11 +182,41 @@ void AArch64PointerAuth::authenticateLR(
// instructions, namely RETA{A,B}, that can be used instead. In this case the
// DW_CFA_AARCH64_negate_ra_state can't be emitted.
bool TerminatorIsCombinable =
TI != MBB.end() && TI->getOpcode() == AArch64::RET;
TI != MBB.end() && (TI->getOpcode() == AArch64::RET ||
TI->getOpcode() == AArch64::RET_ReallyLR);
MCSymbol *PACSym = MFnI->getSigningInstrLabel();

const MachineFrameInfo &MFI = MF.getFrameInfo();
bool IsLRSpilled =
llvm::any_of(MFI.getCalleeSavedInfo(), [](const CalleeSavedInfo &Info) {
return Info.getReg() == AArch64::LR;
});

// In functions with popless epilogues (swiftcorocc calling convention with
// llvm.ret.popless), some returns don't restore SP, so we can't use RETAB
// because the authenticating discriminator (SP) won't match the signing
// discriminator. We need to check if this specific block restores SP.
// If SP is restored before the return, we can use RETAB; otherwise we need
// to compute the discriminator from FP and use AUTIB + separate RET.
bool IsSwiftCoroPartialReturn = [&]() {
if (!MFnI->hasPoplessEpilogue())
return false;

// Check if any instruction in the epilogue modifies SP.
if (llvm::any_of(make_range(MBB.begin(), MBB.getFirstTerminator()),
[&](const MachineInstr &I) {
return I.getFlag(MachineInstr::FrameDestroy) &&
I.modifiesRegister(AArch64::SP,
Subtarget->getRegisterInfo());
}))
return false;

return true;
}();

if (Subtarget->hasPAuth() && TerminatorIsCombinable && !NeedsWinCFI &&
!MF.getFunction().hasFnAttribute(Attribute::ShadowCallStack)) {
!MF.getFunction().hasFnAttribute(Attribute::ShadowCallStack) &&
!IsSwiftCoroPartialReturn) {
if (MFnI->branchProtectionPAuthLR() && Subtarget->hasPAuthLR()) {
assert(PACSym && "No PAC instruction to refer to");
emitPACSymOffsetIntoX16(*TII, MBB, MBBI, DL, PACSym);
Expand All @@ -198,6 +232,31 @@ void AArch64PointerAuth::authenticateLR(
.setMIFlag(MachineInstr::FrameDestroy);
}
MBB.erase(TI);
} else if (IsSwiftCoroPartialReturn && IsLRSpilled) {
// For popless epilogues that don't restore SP, we can't use RETAB because
// SP doesn't match. Instead, compute the correct discriminator from FP.
const auto *TRI = Subtarget->getRegisterInfo();

MachineBasicBlock::iterator EpilogStartI = MBB.getFirstTerminator();
MachineBasicBlock::iterator Begin = MBB.begin();
while (EpilogStartI != Begin) {
--EpilogStartI;
if (!EpilogStartI->getFlag(MachineInstr::FrameDestroy)) {
++EpilogStartI;
break;
}
if (EpilogStartI->readsRegister(AArch64::X16, TRI) ||
EpilogStartI->modifiesRegister(AArch64::X16, TRI))
report_fatal_error("unable to use x16 for popless ret LR auth");
}

emitFrameOffset(MBB, EpilogStartI, DL, AArch64::X16, AArch64::FP,
StackOffset::getFixed(16), TII, MachineInstr::FrameDestroy);
emitPACCFI(MBB, MBBI, MachineInstr::FrameDestroy, EmitAsyncCFI);
BuildMI(MBB, TI, DL, TII->get(AArch64::AUTIB), AArch64::LR)
.addUse(AArch64::LR)
.addUse(AArch64::X16)
.setMIFlag(MachineInstr::FrameDestroy);
} else {
if (MFnI->branchProtectionPAuthLR() && Subtarget->hasPAuthLR()) {
assert(PACSym && "No PAC instruction to refer to");
Expand Down
81 changes: 1 addition & 80 deletions llvm/lib/Target/AArch64/AArch64PrologueEpilogue.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -536,14 +536,6 @@ void AArch64PrologueEmitter::verifyPrologueClobbers() const {
}
#endif

static bool shouldAuthenticateLR(const MachineFunction &MF) {
// Return address authentication can be enabled at the function level, using
// the "ptrauth-returns" attribute.
const AArch64Subtarget &Subtarget = MF.getSubtarget<AArch64Subtarget>();
return Subtarget.isTargetMachO() &&
MF.getFunction().hasFnAttribute("ptrauth-returns");
}

void AArch64PrologueEmitter::determineLocalsStackSize(
uint64_t StackSize, uint64_t PrologueSaveSize) {
AFI->setLocalStackSize(StackSize - PrologueSaveSize);
Expand Down Expand Up @@ -725,18 +717,6 @@ void AArch64PrologueEmitter::emitPrologue() {
BuildMI(MBB, PrologueBeginI, DL, TII->get(AArch64::EMITMTETAGGED))
.setMIFlag(MachineInstr::FrameSetup);

// If we're saving LR, sign it first.
if (shouldAuthenticateLR(MF)) {
if (LLVM_UNLIKELY(!Subtarget.hasPAuth()))
report_fatal_error("arm64e LR authentication requires ptrauth");
for (const CalleeSavedInfo &Info : MFI.getCalleeSavedInfo()) {
if (Info.getReg() != AArch64::LR)
continue;
BuildMI(MBB, PrologueBeginI, DL, TII->get(AArch64::PACIBSP))
.setMIFlags(MachineInstr::FrameSetup);
}
}

// We signal the presence of a Swift extended frame to external tools by
// storing FP with 0b0001 in bits 63:60. In normal userland operation a simple
// ORR is sufficient, it is assumed a Swift kernel would initialize the TBI
Expand Down Expand Up @@ -1436,66 +1416,6 @@ void AArch64EpilogueEmitter::emitEpilogue() {
if (MF.getFunction().getCallingConv() == CallingConv::GHC)
return;

// If we're restoring LR, authenticate it before returning.
// Use scope_exit to ensure we do that last on all return paths.
auto InsertAuthLROnExit = make_scope_exit([&]() {
if (shouldAuthenticateLR(MF)) {
if (LLVM_UNLIKELY(!Subtarget.hasPAuth()))
report_fatal_error("arm64e LR authentication requires ptrauth");
for (const CalleeSavedInfo &Info : MFI.getCalleeSavedInfo()) {
if (Info.getReg() != AArch64::LR)
continue;
MachineBasicBlock::iterator TI = MBB.getFirstTerminator();

// When we're doing a popless ret (i.e., that doesn't restore SP), we
// can't rely on the exit SP being the same as the entry, but they need
// to match for the LR auth to succeed. Instead, derive the entry SP
// from our FP (using a -16 static offset for the size of the frame
// record itself), save that into X16, and use that as the discriminator
// in an AUTIB.
if (IsSwiftCoroPartialReturn) {
const auto *TRI = Subtarget.getRegisterInfo();

MachineBasicBlock::iterator EpilogStartI = MBB.getFirstTerminator();
MachineBasicBlock::iterator Begin = MBB.begin();
while (EpilogStartI != Begin) {
--EpilogStartI;
if (!EpilogStartI->getFlag(MachineInstr::FrameDestroy)) {
++EpilogStartI;
break;
}
if (EpilogStartI->readsRegister(AArch64::X16, TRI) ||
EpilogStartI->modifiesRegister(AArch64::X16, TRI))
report_fatal_error("unable to use x16 for popless ret LR auth");
}

emitFrameOffset(MBB, EpilogStartI, DL, AArch64::X16, AArch64::FP,
StackOffset::getFixed(16), TII,
MachineInstr::FrameDestroy);
BuildMI(MBB, TI, DL, TII->get(AArch64::AUTIB), AArch64::LR)
.addUse(AArch64::LR)
.addUse(AArch64::X16)
.setMIFlag(MachineInstr::FrameDestroy);
return;
}

if (TI != MBB.end() && TI->getOpcode() == AArch64::RET_ReallyLR) {
// If there is a terminator and it's a RET, we can fold AUTH into it.
// Be careful to keep the implicitly returned registers.
// By now, we don't need the ReallyLR pseudo, since it's only there
// to make it possible for LR to be used for non-RET purposes, and
// that happens in RA and PEI.
BuildMI(MBB, TI, DL, TII->get(AArch64::RETAB)).copyImplicitOps(*TI);
MBB.erase(TI);
} else {
// Otherwise, we could be in a shrink-wrapped or tail-calling block.
BuildMI(MBB, TI, DL, TII->get(AArch64::AUTIBSP));
}
}
}
});


// How much of the stack used by incoming arguments this function is expected
// to restore in this particular epilogue.
int64_t ArgumentStackToRestore = AFL.getArgumentStackToRestore(MF, MBB);
Expand Down Expand Up @@ -1923,6 +1843,7 @@ void AArch64EpilogueEmitter::finalizeEpilogue() const {
}
if (EmitCFI)
emitCalleeSavedGPRRestores(MBB.getFirstTerminator());

if (AFI->shouldSignReturnAddress(MF)) {
// If pac-ret+leaf is in effect, PAUTH_EPILOGUE pseudo instructions
// are inserted by emitPacRetPlusLeafHardening().
Expand Down