aboutsummaryrefslogtreecommitdiffstats
path: root/lib
diff options
context:
space:
mode:
authorChris Lattner <sabre@nondot.org>2008-12-07 00:28:02 +0000
committerChris Lattner <sabre@nondot.org>2008-12-07 00:28:02 +0000
commit3ac60821e283673c709e52368eb1a7c70704ef5a (patch)
tree16d5d027af8f1fed05777dec034fb604647b2645 /lib
parent28efbb4e3494daecf0bfbf15c6bb180b2764ff83 (diff)
downloadexternal_llvm-3ac60821e283673c709e52368eb1a7c70704ef5a.zip
external_llvm-3ac60821e283673c709e52368eb1a7c70704ef5a.tar.gz
external_llvm-3ac60821e283673c709e52368eb1a7c70704ef5a.tar.bz2
a memdep query on a volatile load/store will always return
clobber with the current implementation. Instead of returning a "precise clobber" just return a fuzzy one. This doesn't matter to any clients anyway and should speed up analysis time very very slightly. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@60641 91177308-0d34-0410-b5e6-96231b3b80d8
Diffstat (limited to 'lib')
-rw-r--r--lib/Analysis/MemoryDependenceAnalysis.cpp21
1 files changed, 10 insertions, 11 deletions
diff --git a/lib/Analysis/MemoryDependenceAnalysis.cpp b/lib/Analysis/MemoryDependenceAnalysis.cpp
index 24e4552..cf748df 100644
--- a/lib/Analysis/MemoryDependenceAnalysis.cpp
+++ b/lib/Analysis/MemoryDependenceAnalysis.cpp
@@ -116,16 +116,23 @@ getDependencyFrom(Instruction *QueryInst, BasicBlock::iterator ScanIt,
// Get the pointer value for which dependence will be determined
Value *MemPtr = 0;
uint64_t MemSize = 0;
- bool MemVolatile = false;
if (StoreInst* S = dyn_cast<StoreInst>(QueryInst)) {
+ // If this is a volatile store, don't mess around with it. Just return the
+ // previous instruction as a clobber.
+ if (S->isVolatile())
+ return MemDepResult::getClobber(--ScanIt);
+
MemPtr = S->getPointerOperand();
MemSize = TD->getTypeStoreSize(S->getOperand(0)->getType());
- MemVolatile = S->isVolatile();
} else if (LoadInst* LI = dyn_cast<LoadInst>(QueryInst)) {
+ // If this is a volatile load, don't mess around with it. Just return the
+ // previous instruction as a clobber.
+ if (S->isVolatile())
+ return MemDepResult::getClobber(--ScanIt);
+
MemPtr = LI->getPointerOperand();
MemSize = TD->getTypeStoreSize(LI->getType());
- MemVolatile = LI->isVolatile();
} else if (FreeInst* F = dyn_cast<FreeInst>(QueryInst)) {
MemPtr = F->getPointerOperand();
// FreeInsts erase the entire structure, not just a field.
@@ -145,10 +152,6 @@ getDependencyFrom(Instruction *QueryInst, BasicBlock::iterator ScanIt,
// Values depend on loads if the pointers are must aliased. This means that
// a load depends on another must aliased load from the same value.
if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
- // If the access is volatile and this is volatile, return a dependence.
- if (MemVolatile && LI->isVolatile())
- return MemDepResult::getClobber(LI);
-
Value *Pointer = LI->getPointerOperand();
uint64_t PointerSize = TD->getTypeStoreSize(LI->getType());
@@ -165,10 +168,6 @@ getDependencyFrom(Instruction *QueryInst, BasicBlock::iterator ScanIt,
}
if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
- // If the access is volatile and this is volatile, return a dependence.
- if (MemVolatile && SI->isVolatile())
- return MemDepResult::getClobber(SI);
-
Value *Pointer = SI->getPointerOperand();
uint64_t PointerSize = TD->getTypeStoreSize(SI->getOperand(0)->getType());