summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJakob Stoklund Olesen <stoklund@2pi.dk>2010-07-01 00:13:04 +0000
committerJakob Stoklund Olesen <stoklund@2pi.dk>2010-07-01 00:13:04 +0000
commite72a5c5e2e3d09ef02bae00620d499d3e59c8a7f (patch)
tree51d7c5dd060243ba3e52ef35195dfcd18c2b8211
parent9e55afbf88f4853ac94b6a0ac021a89c5c929d6b (diff)
downloadllvm-e72a5c5e2e3d09ef02bae00620d499d3e59c8a7f.tar.gz
llvm-e72a5c5e2e3d09ef02bae00620d499d3e59c8a7f.tar.bz2
llvm-e72a5c5e2e3d09ef02bae00620d499d3e59c8a7f.tar.xz
Add memory operand folding support to InlineSpiller.
git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@107355 91177308-0d34-0410-b5e6-96231b3b80d8
-rw-r--r--lib/CodeGen/InlineSpiller.cpp37
1 files changed, 37 insertions, 0 deletions
diff --git a/lib/CodeGen/InlineSpiller.cpp b/lib/CodeGen/InlineSpiller.cpp
index fc6d3ec70e..32fb4430b3 100644
--- a/lib/CodeGen/InlineSpiller.cpp
+++ b/lib/CodeGen/InlineSpiller.cpp
@@ -59,6 +59,8 @@ public:
SmallVectorImpl<LiveInterval*> &spillIs,
SlotIndex *earliestIndex);
bool reMaterialize(LiveInterval &NewLI, MachineBasicBlock::iterator MI);
+ bool foldMemoryOperand(MachineBasicBlock::iterator MI,
+ const SmallVectorImpl<unsigned> &Ops);
void insertReload(LiveInterval &NewLI, MachineBasicBlock::iterator MI);
void insertSpill(LiveInterval &NewLI, MachineBasicBlock::iterator MI);
};
@@ -146,6 +148,37 @@ bool InlineSpiller::reMaterialize(LiveInterval &NewLI,
return true;
}
+/// foldMemoryOperand - Try folding stack slot references in Ops into MI.
+/// Return true on success, and MI will be erased.
+bool InlineSpiller::foldMemoryOperand(MachineBasicBlock::iterator MI,
+ const SmallVectorImpl<unsigned> &Ops) {
+ // TargetInstrInfo::foldMemoryOperand only expects explicit, non-tied
+ // operands.
+ SmallVector<unsigned, 8> FoldOps;
+ for (unsigned i = 0, e = Ops.size(); i != e; ++i) {
+ unsigned Idx = Ops[i];
+ MachineOperand &MO = MI->getOperand(Idx);
+ if (MO.isImplicit())
+ continue;
+ // FIXME: Teach targets to deal with subregs.
+ if (MO.getSubReg())
+ return false;
+ // Tied use operands should not be passed to foldMemoryOperand.
+ if (!MI->isRegTiedToDefOperand(Idx))
+ FoldOps.push_back(Idx);
+ }
+
+ MachineInstr *FoldMI = tii_.foldMemoryOperand(mf_, MI, FoldOps, stackSlot_);
+ if (!FoldMI)
+ return false;
+ MachineBasicBlock &MBB = *MI->getParent();
+ lis_.ReplaceMachineInstrInMaps(MI, FoldMI);
+ vrm_.addSpillSlotUse(stackSlot_, FoldMI);
+ MBB.insert(MBB.erase(MI), FoldMI);
+ DEBUG(dbgs() << "\tfolded: " << *FoldMI);
+ return true;
+}
+
/// insertReload - Insert a reload of NewLI.reg before MI.
void InlineSpiller::insertReload(LiveInterval &NewLI,
MachineBasicBlock::iterator MI) {
@@ -208,6 +241,10 @@ void InlineSpiller::spill(LiveInterval *li,
// Attempt remat instead of reload.
bool NeedsReload = Reads && !reMaterialize(NewLI, MI);
+ // Attempt to fold memory ops.
+ if (NewLI.empty() && foldMemoryOperand(MI, Ops))
+ continue;
+
if (NeedsReload)
insertReload(NewLI, MI);