diff options
author | Owen Anderson <resistor@mac.com> | 2008-07-22 22:46:49 +0000 |
---|---|---|
committer | Owen Anderson <resistor@mac.com> | 2008-07-22 22:46:49 +0000 |
commit | a1566f2e12ce87a5bca30bc0189a0cdbb40136a4 (patch) | |
tree | fecdd6cd2ded8963a4015584d9de5f7f4329a765 /lib/CodeGen/SimpleRegisterCoalescing.cpp | |
parent | 38bcec13e89b33fd6b0553ec47667744c54fbb7b (diff) |
Change the heuristics used in the coalescer, register allocator, and within
live intervals itself to use an instruction count approximation that is
not affected by inserting empty indices.
git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@53937 91177308-0d34-0410-b5e6-96231b3b80d8
Diffstat (limited to 'lib/CodeGen/SimpleRegisterCoalescing.cpp')
-rw-r--r-- | lib/CodeGen/SimpleRegisterCoalescing.cpp | 16 |
1 files changed, 8 insertions, 8 deletions
diff --git a/lib/CodeGen/SimpleRegisterCoalescing.cpp b/lib/CodeGen/SimpleRegisterCoalescing.cpp index 5b7f55dfc9..29e634b477 100644 --- a/lib/CodeGen/SimpleRegisterCoalescing.cpp +++ b/lib/CodeGen/SimpleRegisterCoalescing.cpp @@ -851,8 +851,8 @@ SimpleRegisterCoalescing::isProfitableToCoalesceToSubRC(unsigned SrcReg, // Then make sure the intervals are *short*. LiveInterval &SrcInt = li_->getInterval(SrcReg); LiveInterval &DstInt = li_->getInterval(DstReg); - unsigned SrcSize = SrcInt.getSize() / InstrSlots::NUM; - unsigned DstSize = DstInt.getSize() / InstrSlots::NUM; + unsigned SrcSize = li_->getApproximateInstructionCount(SrcInt); + unsigned DstSize = li_->getApproximateInstructionCount(DstInt); const TargetRegisterClass *RC = mri_->getRegClass(DstReg); unsigned Threshold = allocatableRCRegs_[RC].count() * 2; return (SrcSize + DstSize) <= Threshold; @@ -1011,10 +1011,10 @@ bool SimpleRegisterCoalescing::JoinCopy(CopyRec &TheCopy, bool &Again) { if (SubIdx) { unsigned LargeReg = isExtSubReg ? SrcReg : DstReg; unsigned SmallReg = isExtSubReg ? DstReg : SrcReg; - unsigned LargeRegSize = - li_->getInterval(LargeReg).getSize() / InstrSlots::NUM; - unsigned SmallRegSize = - li_->getInterval(SmallReg).getSize() / InstrSlots::NUM; + unsigned LargeRegSize = + li_->getApproximateInstructionCount(li_->getInterval(LargeReg)); + unsigned SmallRegSize = + li_->getApproximateInstructionCount(li_->getInterval(SmallReg)); const TargetRegisterClass *RC = mri_->getRegClass(SmallReg); unsigned Threshold = allocatableRCRegs_[RC].count(); // Be conservative. If both sides are virtual registers, do not coalesce @@ -1081,7 +1081,7 @@ bool SimpleRegisterCoalescing::JoinCopy(CopyRec &TheCopy, bool &Again) { // If the virtual register live interval is long but it has low use desity, // do not join them, instead mark the physical register as its allocation // preference. - unsigned Length = JoinVInt.getSize() / InstrSlots::NUM; + unsigned Length = li_->getApproximateInstructionCount(JoinVInt); if (Length > Threshold && (((float)std::distance(mri_->use_begin(JoinVReg), mri_->use_end()) / Length) < (1.0 / Threshold))) { @@ -2196,7 +2196,7 @@ bool SimpleRegisterCoalescing::runOnMachineFunction(MachineFunction &fn) { // Divide the weight of the interval by its size. This encourages // spilling of intervals that are large and have few uses, and // discourages spilling of small intervals with many uses. - LI.weight /= LI.getSize(); + LI.weight /= li_->getApproximateInstructionCount(LI); } } |