aboutsummaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rw-r--r--lib/Analysis/MemDepPrinter.cpp67
-rw-r--r--lib/Analysis/MemoryDependenceAnalysis.cpp10
-rw-r--r--lib/Transforms/Scalar/DeadStoreElimination.cpp6
-rw-r--r--lib/Transforms/Scalar/GVN.cpp20
4 files changed, 55 insertions, 48 deletions
diff --git a/lib/Analysis/MemDepPrinter.cpp b/lib/Analysis/MemDepPrinter.cpp
index dc40da012c..fde07ea4f9 100644
--- a/lib/Analysis/MemDepPrinter.cpp
+++ b/lib/Analysis/MemDepPrinter.cpp
@@ -25,8 +25,17 @@ namespace {
struct MemDepPrinter : public FunctionPass {
const Function *F;
- typedef PointerIntPair<const Instruction *, 1> InstAndClobberFlag;
- typedef std::pair<InstAndClobberFlag, const BasicBlock *> Dep;
+ enum DepType {
+ Clobber = 0,
+ Def,
+ NonFuncLocal,
+ Unknown
+ };
+
+ static const char* DepTypeStr[];
+
+ typedef PointerIntPair<const Instruction *, 2, DepType> InstTypePair;
+ typedef std::pair<InstTypePair, const BasicBlock *> Dep;
typedef SmallSetVector<Dep, 4> DepSet;
typedef DenseMap<const Instruction *, DepSet> DepSetMap;
DepSetMap Deps;
@@ -50,6 +59,21 @@ namespace {
Deps.clear();
F = 0;
}
+
+ private:
+ static InstTypePair getInstTypePair(MemDepResult dep) {
+ if (dep.isClobber())
+ return InstTypePair(dep.getInst(), Clobber);
+ if (dep.isDef())
+ return InstTypePair(dep.getInst(), Def);
+ if (dep.isNonFuncLocal())
+ return InstTypePair(dep.getInst(), NonFuncLocal);
+ assert(dep.isUnknown() && "unexptected dependence type");
+ return InstTypePair(dep.getInst(), Unknown);
+ }
+ static InstTypePair getInstTypePair(const Instruction* inst, DepType type) {
+ return InstTypePair(inst, type);
+ }
};
}
@@ -64,6 +88,9 @@ FunctionPass *llvm::createMemDepPrinter() {
return new MemDepPrinter();
}
+const char* MemDepPrinter::DepTypeStr[]
+ = {"Clobber", "Def", "NonFuncLocal", "Unknown"};
+
bool MemDepPrinter::runOnFunction(Function &F) {
this->F = &F;
AliasAnalysis &AA = getAnalysis<AliasAnalysis>();
@@ -79,10 +106,7 @@ bool MemDepPrinter::runOnFunction(Function &F) {
MemDepResult Res = MDA.getDependency(Inst);
if (!Res.isNonLocal()) {
- assert((Res.isUnknown() || Res.isClobber() || Res.isDef()) &&
- "Local dep should be unknown, def or clobber!");
- Deps[Inst].insert(std::make_pair(InstAndClobberFlag(Res.getInst(),
- Res.isClobber()),
+ Deps[Inst].insert(std::make_pair(getInstTypePair(Res),
static_cast<BasicBlock *>(0)));
} else if (CallSite CS = cast<Value>(Inst)) {
const MemoryDependenceAnalysis::NonLocalDepInfo &NLDI =
@@ -92,19 +116,14 @@ bool MemDepPrinter::runOnFunction(Function &F) {
for (MemoryDependenceAnalysis::NonLocalDepInfo::const_iterator
I = NLDI.begin(), E = NLDI.end(); I != E; ++I) {
const MemDepResult &Res = I->getResult();
- assert((Res.isUnknown() || Res.isClobber() || Res.isDef()) &&
- "Resolved non-local call dep should be unknown, def or "
- "clobber!");
- InstDeps.insert(std::make_pair(InstAndClobberFlag(Res.getInst(),
- Res.isClobber()),
- I->getBB()));
+ InstDeps.insert(std::make_pair(getInstTypePair(Res), I->getBB()));
}
} else {
SmallVector<NonLocalDepResult, 4> NLDI;
if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
if (!LI->isUnordered()) {
// FIXME: Handle atomic/volatile loads.
- Deps[Inst].insert(std::make_pair(InstAndClobberFlag(0, false),
+ Deps[Inst].insert(std::make_pair(getInstTypePair(0, Unknown),
static_cast<BasicBlock *>(0)));
continue;
}
@@ -113,7 +132,7 @@ bool MemDepPrinter::runOnFunction(Function &F) {
} else if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
if (!LI->isUnordered()) {
// FIXME: Handle atomic/volatile stores.
- Deps[Inst].insert(std::make_pair(InstAndClobberFlag(0, false),
+ Deps[Inst].insert(std::make_pair(getInstTypePair(0, Unknown),
static_cast<BasicBlock *>(0)));
continue;
}
@@ -130,11 +149,7 @@ bool MemDepPrinter::runOnFunction(Function &F) {
for (SmallVectorImpl<NonLocalDepResult>::const_iterator
I = NLDI.begin(), E = NLDI.end(); I != E; ++I) {
const MemDepResult &Res = I->getResult();
- assert(Res.isClobber() != Res.isDef() &&
- "Resolved non-local pointer dep should be def or clobber!");
- InstDeps.insert(std::make_pair(InstAndClobberFlag(Res.getInst(),
- Res.isClobber()),
- I->getBB()));
+ InstDeps.insert(std::make_pair(getInstTypePair(Res), I->getBB()));
}
}
}
@@ -155,26 +170,18 @@ void MemDepPrinter::print(raw_ostream &OS, const Module *M) const {
for (DepSet::const_iterator I = InstDeps.begin(), E = InstDeps.end();
I != E; ++I) {
const Instruction *DepInst = I->first.getPointer();
- bool isClobber = I->first.getInt();
+ DepType type = I->first.getInt();
const BasicBlock *DepBB = I->second;
OS << " ";
- if (!DepInst)
- OS << "Unknown";
- else if (isClobber)
- OS << "Clobber";
- else
- OS << " Def";
+ OS << DepTypeStr[type];
if (DepBB) {
OS << " in block ";
WriteAsOperand(OS, DepBB, /*PrintType=*/false, M);
}
if (DepInst) {
OS << " from: ";
- if (DepInst == Inst)
- OS << "<unspecified>";
- else
- DepInst->print(OS);
+ DepInst->print(OS);
}
OS << "\n";
}
diff --git a/lib/Analysis/MemoryDependenceAnalysis.cpp b/lib/Analysis/MemoryDependenceAnalysis.cpp
index cdff85a513..92967c08dc 100644
--- a/lib/Analysis/MemoryDependenceAnalysis.cpp
+++ b/lib/Analysis/MemoryDependenceAnalysis.cpp
@@ -238,7 +238,7 @@ getCallSiteDependencyFrom(CallSite CS, bool isReadOnlyCall,
// unknown, otherwise it is non-local.
if (BB != &BB->getParent()->getEntryBlock())
return MemDepResult::getNonLocal();
- return MemDepResult::getUnknown();
+ return MemDepResult::getNonFuncLocal();
}
/// isLoadLoadClobberIfExtendedToFullWidth - Return true if LI is a load that
@@ -499,7 +499,7 @@ getPointerDependencyFrom(const AliasAnalysis::Location &MemLoc, bool isLoad,
// unknown, otherwise it is non-local.
if (BB != &BB->getParent()->getEntryBlock())
return MemDepResult::getNonLocal();
- return MemDepResult::getUnknown();
+ return MemDepResult::getNonFuncLocal();
}
/// getDependency - Return the instruction on which a memory operation
@@ -532,7 +532,7 @@ MemDepResult MemoryDependenceAnalysis::getDependency(Instruction *QueryInst) {
if (QueryParent != &QueryParent->getParent()->getEntryBlock())
LocalCache = MemDepResult::getNonLocal();
else
- LocalCache = MemDepResult::getUnknown();
+ LocalCache = MemDepResult::getNonFuncLocal();
} else {
AliasAnalysis::Location MemLoc;
AliasAnalysis::ModRefResult MR = GetLocation(QueryInst, MemLoc, AA);
@@ -688,7 +688,7 @@ MemoryDependenceAnalysis::getNonLocalCallDependency(CallSite QueryCS) {
// a clobber, otherwise it is unknown.
Dep = MemDepResult::getNonLocal();
} else {
- Dep = MemDepResult::getUnknown();
+ Dep = MemDepResult::getNonFuncLocal();
}
// If we had a dirty entry for the block, update it. Otherwise, just add
@@ -806,7 +806,7 @@ GetNonLocalInfoForBlock(const AliasAnalysis::Location &Loc,
// If the block has a dependency (i.e. it isn't completely transparent to
// the value), remember the reverse association because we just added it
// to Cache!
- if (Dep.isNonLocal() || Dep.isUnknown())
+ if (!Dep.isDef() && !Dep.isClobber())
return Dep;
// Keep the ReverseNonLocalPtrDeps map up to date so we can efficiently
diff --git a/lib/Transforms/Scalar/DeadStoreElimination.cpp b/lib/Transforms/Scalar/DeadStoreElimination.cpp
index ffa77098bc..a593d0f446 100644
--- a/lib/Transforms/Scalar/DeadStoreElimination.cpp
+++ b/lib/Transforms/Scalar/DeadStoreElimination.cpp
@@ -441,7 +441,7 @@ bool DSE::runOnBasicBlock(BasicBlock &BB) {
// Ignore any store where we can't find a local dependence.
// FIXME: cross-block DSE would be fun. :)
- if (InstDep.isNonLocal() || InstDep.isUnknown())
+ if (!InstDep.isDef() && !InstDep.isClobber())
continue;
// If we're storing the same value back to a pointer that we just
@@ -477,7 +477,7 @@ bool DSE::runOnBasicBlock(BasicBlock &BB) {
if (Loc.Ptr == 0)
continue;
- while (!InstDep.isNonLocal() && !InstDep.isUnknown()) {
+ while (InstDep.isDef() || InstDep.isClobber()) {
// Get the memory clobbered by the instruction we depend on. MemDep will
// skip any instructions that 'Loc' clearly doesn't interact with. If we
// end up depending on a may- or must-aliased load, then we can't optimize
@@ -545,7 +545,7 @@ bool DSE::HandleFree(CallInst *F) {
MemDepResult Dep = MD->getDependency(F);
- while (!Dep.isNonLocal() && !Dep.isUnknown()) {
+ while (Dep.isDef() || Dep.isClobber()) {
Instruction *Dependency = Dep.getInst();
if (!hasMemoryWrite(Dependency) || !isRemovable(Dependency))
return MadeChange;
diff --git a/lib/Transforms/Scalar/GVN.cpp b/lib/Transforms/Scalar/GVN.cpp
index d94f504585..cbfdbcddae 100644
--- a/lib/Transforms/Scalar/GVN.cpp
+++ b/lib/Transforms/Scalar/GVN.cpp
@@ -1279,7 +1279,9 @@ bool GVN::processNonLocalLoad(LoadInst *LI) {
// If we had a phi translation failure, we'll have a single entry which is a
// clobber in the current block. Reject this early.
- if (Deps.size() == 1 && Deps[0].getResult().isUnknown()) {
+ if (Deps.size() == 1
+ && !Deps[0].getResult().isDef() && !Deps[0].getResult().isClobber())
+ {
DEBUG(
dbgs() << "GVN: non-local load ";
WriteAsOperand(dbgs(), LI);
@@ -1299,7 +1301,7 @@ bool GVN::processNonLocalLoad(LoadInst *LI) {
BasicBlock *DepBB = Deps[i].getBB();
MemDepResult DepInfo = Deps[i].getResult();
- if (DepInfo.isUnknown()) {
+ if (!DepInfo.isDef() && !DepInfo.isClobber()) {
UnavailableBlocks.push_back(DepBB);
continue;
}
@@ -1364,7 +1366,7 @@ bool GVN::processNonLocalLoad(LoadInst *LI) {
continue;
}
- assert(DepInfo.isDef() && "Expecting def here");
+ // DepInfo.isDef() here
Instruction *DepInst = DepInfo.getInst();
@@ -1761,7 +1763,11 @@ bool GVN::processLoad(LoadInst *L) {
return false;
}
- if (Dep.isUnknown()) {
+ // If it is defined in another block, try harder.
+ if (Dep.isNonLocal())
+ return processNonLocalLoad(L);
+
+ if (!Dep.isDef()) {
DEBUG(
// fast print dep, using operator<< on instruction is too slow.
dbgs() << "GVN: load ";
@@ -1771,12 +1777,6 @@ bool GVN::processLoad(LoadInst *L) {
return false;
}
- // If it is defined in another block, try harder.
- if (Dep.isNonLocal())
- return processNonLocalLoad(L);
-
- assert(Dep.isDef() && "Expecting def here");
-
Instruction *DepInst = Dep.getInst();
if (StoreInst *DepSI = dyn_cast<StoreInst>(DepInst)) {
Value *StoredVal = DepSI->getValueOperand();