aboutsummaryrefslogtreecommitdiff
path: root/lib/Transforms/Scalar/CodeGenPrepare.cpp
diff options
context:
space:
mode:
authorChris Lattner <sabre@nondot.org>2009-02-12 06:56:08 +0000
committerChris Lattner <sabre@nondot.org>2009-02-12 06:56:08 +0000
commit65c02fbf9bda089398a67c887e32a99799afa522 (patch)
treefdb91e305ba60400d584c73d4a1513bc5a88a902 /lib/Transforms/Scalar/CodeGenPrepare.cpp
parentafa77434bb412c358f9d120fe9b2c3bc39da9365 (diff)
fix PR3537: if resetting bbi back to the start of a block, we need to
forget about already inserted expressions. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@64362 91177308-0d34-0410-b5e6-96231b3b80d8
Diffstat (limited to 'lib/Transforms/Scalar/CodeGenPrepare.cpp')
-rw-r--r--lib/Transforms/Scalar/CodeGenPrepare.cpp13
1 files changed, 9 insertions, 4 deletions
diff --git a/lib/Transforms/Scalar/CodeGenPrepare.cpp b/lib/Transforms/Scalar/CodeGenPrepare.cpp
index 0a1c641e2b..12c76e8525 100644
--- a/lib/Transforms/Scalar/CodeGenPrepare.cpp
+++ b/lib/Transforms/Scalar/CodeGenPrepare.cpp
@@ -1241,11 +1241,13 @@ bool CodeGenPrepare::OptimizeMemoryInst(Instruction *MemoryInst, Value *Addr,
// computation.
Value *&SunkAddr = SunkAddrs[Addr];
if (SunkAddr) {
- DEBUG(cerr << "CGP: Reusing nonlocal addrmode: " << AddrMode << "\n");
+ DEBUG(cerr << "CGP: Reusing nonlocal addrmode: " << AddrMode << " for "
+ << *MemoryInst);
if (SunkAddr->getType() != Addr->getType())
SunkAddr = new BitCastInst(SunkAddr, Addr->getType(), "tmp", InsertPt);
} else {
- DEBUG(cerr << "CGP: SINKING nonlocal addrmode: " << AddrMode << "\n");
+ DEBUG(cerr << "CGP: SINKING nonlocal addrmode: " << AddrMode << " for "
+ << *MemoryInst);
const Type *IntPtrTy = TLI->getTargetData()->getIntPtrType();
Value *Result = 0;
@@ -1505,9 +1507,12 @@ bool CodeGenPrepare::OptimizeBlock(BasicBlock &BB) {
if (TLI && isa<InlineAsm>(CI->getCalledValue()))
if (const TargetAsmInfo *TAI =
TLI->getTargetMachine().getTargetAsmInfo()) {
- if (TAI->ExpandInlineAsm(CI))
+ if (TAI->ExpandInlineAsm(CI)) {
BBI = BB.begin();
- else
+ // Avoid processing instructions out of order, which could cause
+ // reuse before a value is defined.
+ SunkAddrs.clear();
+ } else
// Sink address computing for memory operands into the block.
MadeChange |= OptimizeInlineAsmInst(I, &(*CI), SunkAddrs);
}