summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorAndrew Trick <atrick@apple.com>2013-11-19 03:29:56 +0000
committerAndrew Trick <atrick@apple.com>2013-11-19 03:29:56 +0000
commit8ddf988ef4aaa8275bb2b58e9ef8b65ef8009f8c (patch)
treea162c337e7e7432977026200878a746727c4ba4a /lib
parentba0f991a78706068cc2e6a1c4ef4b0d8f7ce748b (diff)
downloadllvm-8ddf988ef4aaa8275bb2b58e9ef8b65ef8009f8c.tar.gz
llvm-8ddf988ef4aaa8275bb2b58e9ef8b65ef8009f8c.tar.bz2
llvm-8ddf988ef4aaa8275bb2b58e9ef8b65ef8009f8c.tar.xz
Add an abstraction to handle patchpoint operands.
Hard-coded operand indices were scattered throughout lowering stages and layers. It was super bug prone. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@195093 91177308-0d34-0410-b5e6-96231b3b80d8
Diffstat (limited to 'lib')
-rw-r--r--lib/CodeGen/StackMaps.cpp88
-rw-r--r--lib/Target/X86/X86MCInstLower.cpp80
2 files changed, 101 insertions, 67 deletions
diff --git a/lib/CodeGen/StackMaps.cpp b/lib/CodeGen/StackMaps.cpp
index 0eeec83117..86e51a1c79 100644
--- a/lib/CodeGen/StackMaps.cpp
+++ b/lib/CodeGen/StackMaps.cpp
@@ -28,10 +28,47 @@
using namespace llvm;
-void StackMaps::recordStackMap(const MachineInstr &MI, uint32_t ID,
- MachineInstr::const_mop_iterator MOI,
- MachineInstr::const_mop_iterator MOE,
- bool recordResult) {
+PatchPointOpers::PatchPointOpers(const MachineInstr *MI):
+ MI(MI),
+ HasDef(MI->getOperand(0).isReg() && MI->getOperand(0).isDef() &&
+ !MI->getOperand(0).isImplicit()),
+ IsAnyReg(MI->getOperand(getMetaIdx(CCPos)).getImm() == CallingConv::AnyReg) {
+
+#ifndef NDEBUG
+ {
+ unsigned CheckStartIdx = 0, e = MI->getNumOperands();
+ while (CheckStartIdx < e && MI->getOperand(CheckStartIdx).isReg() &&
+ MI->getOperand(CheckStartIdx).isDef() &&
+ !MI->getOperand(CheckStartIdx).isImplicit())
+ ++CheckStartIdx;
+
+ assert(getMetaIdx() == CheckStartIdx &&
+ "Unexpected additonal definition in Patchpoint intrinsic.");
+ }
+#endif
+}
+
+unsigned PatchPointOpers::getNextScratchIdx(unsigned StartIdx) const {
+ if (!StartIdx)
+ StartIdx = getVarIdx();
+
+ // Find the next scratch register (implicit def and early clobber)
+ unsigned ScratchIdx = StartIdx, e = MI->getNumOperands();
+ while (ScratchIdx < e &&
+ !(MI->getOperand(ScratchIdx).isReg() &&
+ MI->getOperand(ScratchIdx).isDef() &&
+ MI->getOperand(ScratchIdx).isImplicit() &&
+ MI->getOperand(ScratchIdx).isEarlyClobber()))
+ ++ScratchIdx;
+
+ assert(ScratchIdx != e && "No scratch register available");
+ return ScratchIdx;
+}
+
+void StackMaps::recordStackMapOpers(const MachineInstr &MI, uint32_t ID,
+ MachineInstr::const_mop_iterator MOI,
+ MachineInstr::const_mop_iterator MOE,
+ bool recordResult) {
MCContext &OutContext = AP.OutStreamer.getContext();
MCSymbol *MILabel = OutContext.CreateTempSymbol();
@@ -73,6 +110,49 @@ void StackMaps::recordStackMap(const MachineInstr &MI, uint32_t ID,
CSInfos.push_back(CallsiteInfo(CSOffsetExpr, ID, CallsiteLocs));
}
+static MachineInstr::const_mop_iterator
+getStackMapEndMOP(MachineInstr::const_mop_iterator MOI,
+ MachineInstr::const_mop_iterator MOE) {
+ for (; MOI != MOE; ++MOI)
+ if (MOI->isRegMask() || (MOI->isReg() && MOI->isImplicit()))
+ break;
+
+ return MOI;
+}
+
+void StackMaps::recordStackMap(const MachineInstr &MI) {
+ assert(MI.getOpcode() == TargetOpcode::STACKMAP && "exected stackmap");
+
+ int64_t ID = MI.getOperand(0).getImm();
+ assert((int32_t)ID == ID && "Stack maps hold 32-bit IDs");
+ recordStackMapOpers(MI, ID, llvm::next(MI.operands_begin(), 2),
+ getStackMapEndMOP(MI.operands_begin(),
+ MI.operands_end()));
+}
+
+void StackMaps::recordPatchPoint(const MachineInstr &MI) {
+ assert(MI.getOpcode() == TargetOpcode::PATCHPOINT && "exected stackmap");
+
+ PatchPointOpers opers(&MI);
+ int64_t ID = opers.getMetaOper(PatchPointOpers::IDPos).getImm();
+ assert((int32_t)ID == ID && "Stack maps hold 32-bit IDs");
+ MachineInstr::const_mop_iterator MOI =
+ llvm::next(MI.operands_begin(), opers.getStackMapStartIdx());
+ recordStackMapOpers(MI, ID, MOI, getStackMapEndMOP(MOI, MI.operands_end()),
+ opers.isAnyReg() && opers.hasDef());
+
+#ifndef NDEBUG
+ // verify anyregcc
+ LocationVec &Locations = CSInfos.back().Locations;
+ if (opers.isAnyReg()) {
+ unsigned NArgs = opers.getMetaOper(PatchPointOpers::NArgPos).getImm();
+ for (unsigned i = 0, e = (opers.hasDef() ? NArgs+1 : NArgs); i != e; ++i)
+ assert(Locations[i].LocType == Location::Register &&
+ "anyreg arg must be in reg.");
+ }
+#endif
+}
+
/// serializeToStackMapSection conceptually populates the following fields:
///
/// uint32 : Reserved (header)
diff --git a/lib/Target/X86/X86MCInstLower.cpp b/lib/Target/X86/X86MCInstLower.cpp
index 4e27ef0761..6649c825b6 100644
--- a/lib/Target/X86/X86MCInstLower.cpp
+++ b/lib/Target/X86/X86MCInstLower.cpp
@@ -716,6 +716,9 @@ X86AsmPrinter::stackmapOperandParser(MachineInstr::const_mop_iterator MOI,
"Register mask and implicit operands should not be processed.");
if (MOP.isImm()) {
+ // Verify anyregcc
+ // [<def>], <id>, <numBytes>, <target>, <numArgs>, <cc>, ...
+
switch (MOP.getImm()) {
default: llvm_unreachable("Unrecognized operand type.");
case StackMaps::DirectMemRefOp: {
@@ -756,82 +759,33 @@ X86AsmPrinter::stackmapOperandParser(MachineInstr::const_mop_iterator MOI,
Location(Location::Register, RC->getSize(), MOP.getReg(), 0), ++MOI);
}
-static MachineInstr::const_mop_iterator
-getStackMapEndMOP(MachineInstr::const_mop_iterator MOI,
- MachineInstr::const_mop_iterator MOE) {
- for (; MOI != MOE; ++MOI)
- if (MOI->isRegMask() || (MOI->isReg() && MOI->isImplicit()))
- break;
-
- return MOI;
-}
-
+// Lower a stackmap of the form:
+// <id>, <shadowBytes>, ...
static void LowerSTACKMAP(MCStreamer &OutStreamer,
StackMaps &SM,
const MachineInstr &MI)
{
- int64_t ID = MI.getOperand(0).getImm();
unsigned NumNOPBytes = MI.getOperand(1).getImm();
-
- assert((int32_t)ID == ID && "Stack maps hold 32-bit IDs");
- SM.recordStackMap(MI, ID, llvm::next(MI.operands_begin(), 2),
- getStackMapEndMOP(MI.operands_begin(), MI.operands_end()));
+ SM.recordStackMap(MI);
// Emit padding.
+ // FIXME: These nops ensure that the stackmap's shadow is covered by
+ // instructions from the same basic block, but the nops should not be
+ // necessary if instructions from the same block follow the stackmap.
for (unsigned i = 0; i < NumNOPBytes; ++i)
OutStreamer.EmitInstruction(MCInstBuilder(X86::NOOP));
}
// Lower a patchpoint of the form:
-// [<def>], <id>, <numBytes>, <target>, <numArgs>
+// [<def>], <id>, <numBytes>, <target>, <numArgs>, <cc>, ...
static void LowerPATCHPOINT(MCStreamer &OutStreamer,
StackMaps &SM,
const MachineInstr &MI) {
- bool hasDef = MI.getOperand(0).isReg() && MI.getOperand(0).isDef() &&
- !MI.getOperand(0).isImplicit();
- unsigned StartIdx = hasDef ? 1 : 0;
-#ifndef NDEBUG
- {
- unsigned StartIdx2 = 0, e = MI.getNumOperands();
- while (StartIdx2 < e && MI.getOperand(StartIdx2).isReg() &&
- MI.getOperand(StartIdx2).isDef() &&
- !MI.getOperand(StartIdx2).isImplicit())
- ++StartIdx2;
-
- assert(StartIdx == StartIdx2 &&
- "Unexpected additonal definition in Patchpoint intrinsic.");
- }
-#endif
-
- // Find the first scratch register (implicit def and early clobber)
- unsigned ScratchIdx = StartIdx, e = MI.getNumOperands();
- while (ScratchIdx < e &&
- !(MI.getOperand(ScratchIdx).isReg() &&
- MI.getOperand(ScratchIdx).isDef() &&
- MI.getOperand(ScratchIdx).isImplicit() &&
- MI.getOperand(ScratchIdx).isEarlyClobber()))
- ++ScratchIdx;
-
- assert(ScratchIdx != e && "No scratch register available");
-
- int64_t ID = MI.getOperand(StartIdx).getImm();
- assert((int32_t)ID == ID && "Stack maps hold 32-bit IDs");
-
- // Get the number of arguments participating in the call. This number was
- // adjusted during call lowering by subtracting stack args.
- bool isAnyRegCC = MI.getOperand(StartIdx + 4).getImm() == CallingConv::AnyReg;
- assert(((hasDef && isAnyRegCC) || !hasDef) &&
- "Only Patchpoints with AnyReg calling convention may have a result");
- int64_t StackMapIdx = isAnyRegCC ? StartIdx + 5 :
- StartIdx + 5 + MI.getOperand(StartIdx + 3).getImm();
- assert(StackMapIdx <= MI.getNumOperands() &&
- "Patchpoint intrinsic dropped arguments.");
-
- SM.recordStackMap(MI, ID, llvm::next(MI.operands_begin(), StackMapIdx),
- getStackMapEndMOP(MI.operands_begin(), MI.operands_end()),
- isAnyRegCC && hasDef);
+ SM.recordPatchPoint(MI);
+ PatchPointOpers opers(&MI);
+ unsigned ScratchIdx = opers.getNextScratchIdx();
unsigned EncodedBytes = 0;
- int64_t CallTarget = MI.getOperand(StartIdx + 2).getImm();
+ int64_t CallTarget = opers.getMetaOper(PatchPointOpers::TargetPos).getImm();
if (CallTarget) {
// Emit MOV to materialize the target address and the CALL to target.
// This is encoded with 12-13 bytes, depending on which register is used.
@@ -845,11 +799,11 @@ static void LowerPATCHPOINT(MCStreamer &OutStreamer,
.addReg(MI.getOperand(ScratchIdx).getReg()));
}
// Emit padding.
- unsigned NumNOPBytes = MI.getOperand(StartIdx + 1).getImm();
- assert(NumNOPBytes >= EncodedBytes &&
+ unsigned NumBytes = opers.getMetaOper(PatchPointOpers::NBytesPos).getImm();
+ assert(NumBytes >= EncodedBytes &&
"Patchpoint can't request size less than the length of a call.");
- for (unsigned i = EncodedBytes; i < NumNOPBytes; ++i)
+ for (unsigned i = EncodedBytes; i < NumBytes; ++i)
OutStreamer.EmitInstruction(MCInstBuilder(X86::NOOP));
}