aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJakob Stoklund Olesen <stoklund@2pi.dk>2012-08-29 21:19:21 +0000
committerJakob Stoklund Olesen <stoklund@2pi.dk>2012-08-29 21:19:21 +0000
commitf036f7a1e79910bf5b5b6f37d2e512b4f01155a0 (patch)
treec2767358bf504fd67496b37d8d42e5c3c034b62b
parentad7ebc2aebd94ec9e0bd01e1735d06cfe067368b (diff)
Rename hasVolatileMemoryRef() to hasOrderedMemoryRef().
Ordered memory operations are more constrained than volatile loads and stores because they must be ordered with respect to all other memory operations. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@162861 91177308-0d34-0410-b5e6-96231b3b80d8
-rw-r--r--include/llvm/CodeGen/MachineInstr.h10
-rw-r--r--lib/CodeGen/MachineInstr.cpp16
-rw-r--r--lib/CodeGen/ScheduleDAGInstrs.cpp2
-rw-r--r--lib/Target/Hexagon/HexagonVLIWPacketizer.cpp4
4 files changed, 16 insertions, 16 deletions
diff --git a/include/llvm/CodeGen/MachineInstr.h b/include/llvm/CodeGen/MachineInstr.h
index e88351cb98..d636dfd441 100644
--- a/include/llvm/CodeGen/MachineInstr.h
+++ b/include/llvm/CodeGen/MachineInstr.h
@@ -857,11 +857,11 @@ public:
bool isSafeToReMat(const TargetInstrInfo *TII, AliasAnalysis *AA,
unsigned DstReg) const;
- /// hasVolatileMemoryRef - Return true if this instruction may have a
- /// volatile memory reference, or if the information describing the
- /// memory reference is not available. Return false if it is known to
- /// have no volatile memory references.
- bool hasVolatileMemoryRef() const;
+ /// hasOrderedMemoryRef - Return true if this instruction may have an ordered
+ /// or volatile memory reference, or if the information describing the memory
+ /// reference is not available. Return false if it is known to have no
+ /// ordered or volatile memory references.
+ bool hasOrderedMemoryRef() const;
/// isInvariantLoad - Return true if this instruction is loading from a
/// location whose value is invariant across the function. For example,
diff --git a/lib/CodeGen/MachineInstr.cpp b/lib/CodeGen/MachineInstr.cpp
index 7ff79b3a22..4a4c7217bd 100644
--- a/lib/CodeGen/MachineInstr.cpp
+++ b/lib/CodeGen/MachineInstr.cpp
@@ -1348,7 +1348,7 @@ bool MachineInstr::isSafeToMove(const TargetInstrInfo *TII,
// volatiles, but it is required for atomic loads. It is now allowed to move
// a load across an atomic load with Ordering > Monotonic.
if (mayStore() || isCall() ||
- (mayLoad() && hasVolatileMemoryRef())) {
+ (mayLoad() && hasOrderedMemoryRef())) {
SawStore = true;
return false;
}
@@ -1396,11 +1396,11 @@ bool MachineInstr::isSafeToReMat(const TargetInstrInfo *TII,
return true;
}
-/// hasVolatileMemoryRef - Return true if this instruction may have a
-/// volatile memory reference, or if the information describing the
-/// memory reference is not available. Return false if it is known to
-/// have no volatile memory references.
-bool MachineInstr::hasVolatileMemoryRef() const {
+/// hasOrderedMemoryRef - Return true if this instruction may have an ordered
+/// or volatile memory reference, or if the information describing the memory
+/// reference is not available. Return false if it is known to have no ordered
+/// memory references.
+bool MachineInstr::hasOrderedMemoryRef() const {
// An instruction known never to access memory won't have a volatile access.
if (!mayStore() &&
!mayLoad() &&
@@ -1413,9 +1413,9 @@ bool MachineInstr::hasVolatileMemoryRef() const {
if (memoperands_empty())
return true;
- // Check the memory reference information for volatile references.
+ // Check the memory reference information for ordered references.
for (mmo_iterator I = memoperands_begin(), E = memoperands_end(); I != E; ++I)
- if ((*I)->isVolatile())
+ if (!(*I)->isUnordered())
return true;
return false;
diff --git a/lib/CodeGen/ScheduleDAGInstrs.cpp b/lib/CodeGen/ScheduleDAGInstrs.cpp
index dfad18b394..c25f39d5fd 100644
--- a/lib/CodeGen/ScheduleDAGInstrs.cpp
+++ b/lib/CodeGen/ScheduleDAGInstrs.cpp
@@ -502,7 +502,7 @@ void ScheduleDAGInstrs::addVRegUseDeps(SUnit *SU, unsigned OperIdx) {
/// (like a call or something with unmodeled side effects).
static inline bool isGlobalMemoryObject(AliasAnalysis *AA, MachineInstr *MI) {
if (MI->isCall() || MI->hasUnmodeledSideEffects() ||
- (MI->hasVolatileMemoryRef() &&
+ (MI->hasOrderedMemoryRef() &&
(!MI->mayLoad() || !MI->isInvariantLoad(AA))))
return true;
return false;
diff --git a/lib/Target/Hexagon/HexagonVLIWPacketizer.cpp b/lib/Target/Hexagon/HexagonVLIWPacketizer.cpp
index a03ed03365..3d5f685028 100644
--- a/lib/Target/Hexagon/HexagonVLIWPacketizer.cpp
+++ b/lib/Target/Hexagon/HexagonVLIWPacketizer.cpp
@@ -3474,8 +3474,8 @@ bool HexagonPacketizerList::isLegalToPacketizeTogether(SUnit *SUI, SUnit *SUJ) {
// 1. Two loads unless they are volatile.
// 2. Two stores in V4 unless they are volatile.
else if ((DepType == SDep::Order) &&
- !I->hasVolatileMemoryRef() &&
- !J->hasVolatileMemoryRef()) {
+ !I->hasOrderedMemoryRef() &&
+ !J->hasOrderedMemoryRef()) {
if (QRI->Subtarget.hasV4TOps() &&
// hexagonv4 allows dual store.
MCIDI.mayStore() && MCIDJ.mayStore()) {