aboutsummaryrefslogtreecommitdiff
path: root/lib/Analysis/MemorySSA.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'lib/Analysis/MemorySSA.cpp')
-rw-r--r--lib/Analysis/MemorySSA.cpp39
1 files changed, 10 insertions, 29 deletions
diff --git a/lib/Analysis/MemorySSA.cpp b/lib/Analysis/MemorySSA.cpp
index 8fe190e8bcf8..6e9368c49d65 100644
--- a/lib/Analysis/MemorySSA.cpp
+++ b/lib/Analysis/MemorySSA.cpp
@@ -192,8 +192,6 @@ template <> struct DenseMapInfo<MemoryLocOrCall> {
}
};
-enum class Reorderability { Always, IfNoAlias, Never };
-
} // end namespace llvm
/// This does one-way checks to see if Use could theoretically be hoisted above
@@ -202,22 +200,16 @@ enum class Reorderability { Always, IfNoAlias, Never };
/// This assumes that, for the purposes of MemorySSA, Use comes directly after
/// MayClobber, with no potentially clobbering operations in between them.
/// (Where potentially clobbering ops are memory barriers, aliased stores, etc.)
-static Reorderability getLoadReorderability(const LoadInst *Use,
- const LoadInst *MayClobber) {
+static bool areLoadsReorderable(const LoadInst *Use,
+ const LoadInst *MayClobber) {
bool VolatileUse = Use->isVolatile();
bool VolatileClobber = MayClobber->isVolatile();
// Volatile operations may never be reordered with other volatile operations.
if (VolatileUse && VolatileClobber)
- return Reorderability::Never;
-
- // The lang ref allows reordering of volatile and non-volatile operations.
- // Whether an aliasing nonvolatile load and volatile load can be reordered,
- // though, is ambiguous. Because it may not be best to exploit this ambiguity,
- // we only allow volatile/non-volatile reordering if the volatile and
- // non-volatile operations don't alias.
- Reorderability Result = VolatileUse || VolatileClobber
- ? Reorderability::IfNoAlias
- : Reorderability::Always;
+ return false;
+ // Otherwise, volatile doesn't matter here. From the language reference:
+ // 'optimizers may change the order of volatile operations relative to
+ // non-volatile operations.'"
// If a load is seq_cst, it cannot be moved above other loads. If its ordering
// is weaker, it can be moved above other loads. We just need to be sure that
@@ -229,9 +221,7 @@ static Reorderability getLoadReorderability(const LoadInst *Use,
bool SeqCstUse = Use->getOrdering() == AtomicOrdering::SequentiallyConsistent;
bool MayClobberIsAcquire = isAtLeastOrStrongerThan(MayClobber->getOrdering(),
AtomicOrdering::Acquire);
- if (SeqCstUse || MayClobberIsAcquire)
- return Reorderability::Never;
- return Result;
+ return !(SeqCstUse || MayClobberIsAcquire);
}
static bool instructionClobbersQuery(MemoryDef *MD,
@@ -265,18 +255,9 @@ static bool instructionClobbersQuery(MemoryDef *MD,
return isModOrRefSet(I);
}
- if (auto *DefLoad = dyn_cast<LoadInst>(DefInst)) {
- if (auto *UseLoad = dyn_cast<LoadInst>(UseInst)) {
- switch (getLoadReorderability(UseLoad, DefLoad)) {
- case Reorderability::Always:
- return false;
- case Reorderability::Never:
- return true;
- case Reorderability::IfNoAlias:
- return !AA.isNoAlias(UseLoc, MemoryLocation::get(DefLoad));
- }
- }
- }
+ if (auto *DefLoad = dyn_cast<LoadInst>(DefInst))
+ if (auto *UseLoad = dyn_cast<LoadInst>(UseInst))
+ return !areLoadsReorderable(UseLoad, DefLoad);
return isModSet(AA.getModRefInfo(DefInst, UseLoc));
}