aboutsummaryrefslogtreecommitdiff
path: root/lib/CodeGen/VirtRegMap.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'lib/CodeGen/VirtRegMap.cpp')
-rw-r--r--lib/CodeGen/VirtRegMap.cpp166
1 files changed, 58 insertions, 108 deletions
diff --git a/lib/CodeGen/VirtRegMap.cpp b/lib/CodeGen/VirtRegMap.cpp
index 2912bdd63426..bf1c0dce9e56 100644
--- a/lib/CodeGen/VirtRegMap.cpp
+++ b/lib/CodeGen/VirtRegMap.cpp
@@ -163,11 +163,12 @@ class VirtRegRewriter : public MachineFunctionPass {
SlotIndexes *Indexes;
LiveIntervals *LIS;
VirtRegMap *VRM;
- SparseSet<unsigned> PhysRegs;
void rewrite();
void addMBBLiveIns();
bool readsUndefSubreg(const MachineOperand &MO) const;
+ void addLiveInsForSubRanges(const LiveInterval &LI, unsigned PhysReg) const;
+
public:
static char ID;
VirtRegRewriter() : MachineFunctionPass(ID) {}
@@ -237,10 +238,52 @@ bool VirtRegRewriter::runOnMachineFunction(MachineFunction &fn) {
return true;
}
+void VirtRegRewriter::addLiveInsForSubRanges(const LiveInterval &LI,
+ unsigned PhysReg) const {
+ assert(!LI.empty());
+ assert(LI.hasSubRanges());
+
+ typedef std::pair<const LiveInterval::SubRange *,
+ LiveInterval::const_iterator> SubRangeIteratorPair;
+ SmallVector<SubRangeIteratorPair, 4> SubRanges;
+ SlotIndex First;
+ SlotIndex Last;
+ for (const LiveInterval::SubRange &SR : LI.subranges()) {
+ SubRanges.push_back(std::make_pair(&SR, SR.begin()));
+ if (!First.isValid() || SR.segments.front().start < First)
+ First = SR.segments.front().start;
+ if (!Last.isValid() || SR.segments.back().end > Last)
+ Last = SR.segments.back().end;
+ }
+
+ // Check all mbb start positions between First and Last while
+ // simulatenously advancing an iterator for each subrange.
+ for (SlotIndexes::MBBIndexIterator MBBI = Indexes->findMBBIndex(First);
+ MBBI != Indexes->MBBIndexEnd() && MBBI->first <= Last; ++MBBI) {
+ SlotIndex MBBBegin = MBBI->first;
+ // Advance all subrange iterators so that their end position is just
+ // behind MBBBegin (or the iterator is at the end).
+ LaneBitmask LaneMask = 0;
+ for (auto &RangeIterPair : SubRanges) {
+ const LiveInterval::SubRange *SR = RangeIterPair.first;
+ LiveInterval::const_iterator &SRI = RangeIterPair.second;
+ while (SRI != SR->end() && SRI->end <= MBBBegin)
+ ++SRI;
+ if (SRI == SR->end())
+ continue;
+ if (SRI->start <= MBBBegin)
+ LaneMask |= SR->LaneMask;
+ }
+ if (LaneMask == 0)
+ continue;
+ MachineBasicBlock *MBB = MBBI->second;
+ MBB->addLiveIn(PhysReg, LaneMask);
+ }
+}
+
// Compute MBB live-in lists from virtual register live ranges and their
// assignments.
void VirtRegRewriter::addMBBLiveIns() {
- SmallVector<MachineBasicBlock*, 16> LiveIn;
for (unsigned Idx = 0, IdxE = MRI->getNumVirtRegs(); Idx != IdxE; ++Idx) {
unsigned VirtReg = TargetRegisterInfo::index2VirtReg(Idx);
if (MRI->reg_nodbg_empty(VirtReg))
@@ -254,31 +297,18 @@ void VirtRegRewriter::addMBBLiveIns() {
assert(PhysReg != VirtRegMap::NO_PHYS_REG && "Unmapped virtual register.");
if (LI.hasSubRanges()) {
- for (LiveInterval::SubRange &S : LI.subranges()) {
- for (const auto &Seg : S.segments) {
- if (!Indexes->findLiveInMBBs(Seg.start, Seg.end, LiveIn))
- continue;
- for (MCSubRegIndexIterator SR(PhysReg, TRI); SR.isValid(); ++SR) {
- unsigned SubReg = SR.getSubReg();
- unsigned SubRegIndex = SR.getSubRegIndex();
- unsigned SubRegLaneMask = TRI->getSubRegIndexLaneMask(SubRegIndex);
- if ((SubRegLaneMask & S.LaneMask) == 0)
- continue;
- for (unsigned i = 0, e = LiveIn.size(); i != e; ++i) {
- LiveIn[i]->addLiveIn(SubReg);
- }
- }
- LiveIn.clear();
- }
- }
+ addLiveInsForSubRanges(LI, PhysReg);
} else {
- // Scan the segments of LI.
- for (const auto &Seg : LI.segments) {
- if (!Indexes->findLiveInMBBs(Seg.start, Seg.end, LiveIn))
- continue;
- for (unsigned i = 0, e = LiveIn.size(); i != e; ++i)
- LiveIn[i]->addLiveIn(PhysReg);
- LiveIn.clear();
+ // Go over MBB begin positions and see if we have segments covering them.
+ // The following works because segments and the MBBIndex list are both
+ // sorted by slot indexes.
+ SlotIndexes::MBBIndexIterator I = Indexes->MBBIndexBegin();
+ for (const auto &Seg : LI) {
+ I = Indexes->advanceMBBIndex(I, Seg.start);
+ for (; I != Indexes->MBBIndexEnd() && I->first < Seg.end; ++I) {
+ MachineBasicBlock *MBB = I->second;
+ MBB->addLiveIn(PhysReg);
+ }
}
}
}
@@ -305,7 +335,7 @@ bool VirtRegRewriter::readsUndefSubreg(const MachineOperand &MO) const {
assert(LI.liveAt(BaseIndex) &&
"Reads of completely dead register should be marked undef already");
unsigned SubRegIdx = MO.getSubReg();
- unsigned UseMask = TRI->getSubRegIndexLaneMask(SubRegIdx);
+ LaneBitmask UseMask = TRI->getSubRegIndexLaneMask(SubRegIdx);
// See if any of the relevant subregister liveranges is defined at this point.
for (const LiveInterval::SubRange &SR : LI.subranges()) {
if ((SR.LaneMask & UseMask) != 0 && SR.liveAt(BaseIndex))
@@ -319,54 +349,15 @@ void VirtRegRewriter::rewrite() {
SmallVector<unsigned, 8> SuperDeads;
SmallVector<unsigned, 8> SuperDefs;
SmallVector<unsigned, 8> SuperKills;
- SmallPtrSet<const MachineInstr *, 4> NoReturnInsts;
-
- // Here we have a SparseSet to hold which PhysRegs are actually encountered
- // in the MF we are about to iterate over so that later when we call
- // setPhysRegUsed, we are only doing it for physRegs that were actually found
- // in the program and not for all of the possible physRegs for the given
- // target architecture. If the target has a lot of physRegs, then for a small
- // program there will be a significant compile time reduction here.
- PhysRegs.clear();
- PhysRegs.setUniverse(TRI->getNumRegs());
-
- // The function with uwtable should guarantee that the stack unwinder
- // can unwind the stack to the previous frame. Thus, we can't apply the
- // noreturn optimization if the caller function has uwtable attribute.
- bool HasUWTable = MF->getFunction()->hasFnAttribute(Attribute::UWTable);
for (MachineFunction::iterator MBBI = MF->begin(), MBBE = MF->end();
MBBI != MBBE; ++MBBI) {
DEBUG(MBBI->print(dbgs(), Indexes));
- bool IsExitBB = MBBI->succ_empty();
for (MachineBasicBlock::instr_iterator
MII = MBBI->instr_begin(), MIE = MBBI->instr_end(); MII != MIE;) {
- MachineInstr *MI = MII;
+ MachineInstr *MI = &*MII;
++MII;
- // Check if this instruction is a call to a noreturn function. If this
- // is a call to noreturn function and we don't need the stack unwinding
- // functionality (i.e. this function does not have uwtable attribute and
- // the callee function has the nounwind attribute), then we can ignore
- // the definitions set by this instruction.
- if (!HasUWTable && IsExitBB && MI->isCall()) {
- for (MachineInstr::mop_iterator MOI = MI->operands_begin(),
- MOE = MI->operands_end(); MOI != MOE; ++MOI) {
- MachineOperand &MO = *MOI;
- if (!MO.isGlobal())
- continue;
- const Function *Func = dyn_cast<Function>(MO.getGlobal());
- if (!Func || !Func->hasFnAttribute(Attribute::NoReturn) ||
- // We need to keep correct unwind information
- // even if the function will not return, since the
- // runtime may need it.
- !Func->hasFnAttribute(Attribute::NoUnwind))
- continue;
- NoReturnInsts.insert(MI);
- break;
- }
- }
-
for (MachineInstr::mop_iterator MOI = MI->operands_begin(),
MOE = MI->operands_end(); MOI != MOE; ++MOI) {
MachineOperand &MO = *MOI;
@@ -375,15 +366,6 @@ void VirtRegRewriter::rewrite() {
if (MO.isRegMask())
MRI->addPhysRegsUsedFromRegMask(MO.getRegMask());
- // If we encounter a VirtReg or PhysReg then get at the PhysReg and add
- // it to the physreg bitset. Later we use only the PhysRegs that were
- // actually encountered in the MF to populate the MRI's used physregs.
- if (MO.isReg() && MO.getReg())
- PhysRegs.insert(
- TargetRegisterInfo::isVirtualRegister(MO.getReg()) ?
- VRM->getPhys(MO.getReg()) :
- MO.getReg());
-
if (!MO.isReg() || !TargetRegisterInfo::isVirtualRegister(MO.getReg()))
continue;
unsigned VirtReg = MO.getReg();
@@ -418,14 +400,6 @@ void VirtRegRewriter::rewrite() {
MO.setIsUndef(true);
} else if (!MO.isDead()) {
assert(MO.isDef());
- // Things get tricky when we ran out of lane mask bits and
- // merged multiple lanes into the overflow bit: In this case
- // our subregister liveness tracking isn't precise and we can't
- // know what subregister parts are undefined, fall back to the
- // implicit super-register def then.
- unsigned LaneMask = TRI->getSubRegIndexLaneMask(SubReg);
- if (TargetRegisterInfo::isImpreciseLaneMask(LaneMask))
- SuperDefs.push_back(PhysReg);
}
}
@@ -470,29 +444,5 @@ void VirtRegRewriter::rewrite() {
}
}
}
-
- // Tell MRI about physical registers in use.
- if (NoReturnInsts.empty()) {
- for (SparseSet<unsigned>::iterator
- RegI = PhysRegs.begin(), E = PhysRegs.end(); RegI != E; ++RegI)
- if (!MRI->reg_nodbg_empty(*RegI))
- MRI->setPhysRegUsed(*RegI);
- } else {
- for (SparseSet<unsigned>::iterator
- I = PhysRegs.begin(), E = PhysRegs.end(); I != E; ++I) {
- unsigned Reg = *I;
- if (MRI->reg_nodbg_empty(Reg))
- continue;
- // Check if this register has a use that will impact the rest of the
- // code. Uses in debug and noreturn instructions do not impact the
- // generated code.
- for (MachineInstr &It : MRI->reg_nodbg_instructions(Reg)) {
- if (!NoReturnInsts.count(&It)) {
- MRI->setPhysRegUsed(Reg);
- break;
- }
- }
- }
- }
}