aboutsummaryrefslogtreecommitdiffstats
path: root/lib/Analysis/BasicAliasAnalysis.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'lib/Analysis/BasicAliasAnalysis.cpp')
-rw-r--r--lib/Analysis/BasicAliasAnalysis.cpp98
1 files changed, 75 insertions, 23 deletions
diff --git a/lib/Analysis/BasicAliasAnalysis.cpp b/lib/Analysis/BasicAliasAnalysis.cpp
index bf1b689..f7bcd9e 100644
--- a/lib/Analysis/BasicAliasAnalysis.cpp
+++ b/lib/Analysis/BasicAliasAnalysis.cpp
@@ -27,6 +27,7 @@
#include "llvm/Pass.h"
#include "llvm/Analysis/CaptureTracking.h"
#include "llvm/Analysis/MemoryBuiltins.h"
+#include "llvm/Analysis/InstructionSimplify.h"
#include "llvm/Analysis/ValueTracking.h"
#include "llvm/Target/TargetData.h"
#include "llvm/ADT/SmallPtrSet.h"
@@ -96,36 +97,53 @@ static bool isEscapeSource(const Value *V) {
return false;
}
-/// isObjectSmallerThan - Return true if we can prove that the object specified
-/// by V is smaller than Size.
-static bool isObjectSmallerThan(const Value *V, uint64_t Size,
- const TargetData &TD) {
+/// getObjectSize - Return the size of the object specified by V, or
+/// UnknownSize if unknown.
+static uint64_t getObjectSize(const Value *V, const TargetData &TD) {
const Type *AccessTy;
if (const GlobalVariable *GV = dyn_cast<GlobalVariable>(V)) {
+ if (!GV->hasDefinitiveInitializer())
+ return AliasAnalysis::UnknownSize;
AccessTy = GV->getType()->getElementType();
} else if (const AllocaInst *AI = dyn_cast<AllocaInst>(V)) {
if (!AI->isArrayAllocation())
AccessTy = AI->getType()->getElementType();
else
- return false;
+ return AliasAnalysis::UnknownSize;
} else if (const CallInst* CI = extractMallocCall(V)) {
if (!isArrayMalloc(V, &TD))
// The size is the argument to the malloc call.
if (const ConstantInt* C = dyn_cast<ConstantInt>(CI->getArgOperand(0)))
- return (C->getZExtValue() < Size);
- return false;
+ return C->getZExtValue();
+ return AliasAnalysis::UnknownSize;
} else if (const Argument *A = dyn_cast<Argument>(V)) {
if (A->hasByValAttr())
AccessTy = cast<PointerType>(A->getType())->getElementType();
else
- return false;
+ return AliasAnalysis::UnknownSize;
} else {
- return false;
+ return AliasAnalysis::UnknownSize;
}
if (AccessTy->isSized())
- return TD.getTypeAllocSize(AccessTy) < Size;
- return false;
+ return TD.getTypeAllocSize(AccessTy);
+ return AliasAnalysis::UnknownSize;
+}
+
+/// isObjectSmallerThan - Return true if we can prove that the object specified
+/// by V is smaller than Size.
+static bool isObjectSmallerThan(const Value *V, uint64_t Size,
+ const TargetData &TD) {
+ uint64_t ObjectSize = getObjectSize(V, TD);
+ return ObjectSize != AliasAnalysis::UnknownSize && ObjectSize < Size;
+}
+
+/// isObjectSize - Return true if we can prove that the object specified
+/// by V has size Size.
+static bool isObjectSize(const Value *V, uint64_t Size,
+ const TargetData &TD) {
+ uint64_t ObjectSize = getObjectSize(V, TD);
+ return ObjectSize != AliasAnalysis::UnknownSize && ObjectSize == Size;
}
//===----------------------------------------------------------------------===//
@@ -206,14 +224,14 @@ static Value *GetLinearExpression(Value *V, APInt &Scale, APInt &Offset,
Value *CastOp = cast<CastInst>(V)->getOperand(0);
unsigned OldWidth = Scale.getBitWidth();
unsigned SmallWidth = CastOp->getType()->getPrimitiveSizeInBits();
- Scale.trunc(SmallWidth);
- Offset.trunc(SmallWidth);
+ Scale = Scale.trunc(SmallWidth);
+ Offset = Offset.trunc(SmallWidth);
Extension = isa<SExtInst>(V) ? EK_SignExt : EK_ZeroExt;
Value *Result = GetLinearExpression(CastOp, Scale, Offset, Extension,
TD, Depth+1);
- Scale.zext(OldWidth);
- Offset.zext(OldWidth);
+ Scale = Scale.zext(OldWidth);
+ Offset = Offset.zext(OldWidth);
return Result;
}
@@ -233,7 +251,7 @@ static Value *GetLinearExpression(Value *V, APInt &Scale, APInt &Offset,
/// the gep cannot necessarily be reconstructed from its decomposed form.
///
/// When TargetData is around, this function is capable of analyzing everything
-/// that Value::getUnderlyingObject() can look through. When not, it just looks
+/// that GetUnderlyingObject can look through. When not, it just looks
/// through pointer casts.
///
static const Value *
@@ -262,6 +280,14 @@ DecomposeGEPExpression(const Value *V, int64_t &BaseOffs,
V = Op->getOperand(0);
continue;
}
+
+ if (const Instruction *I = dyn_cast<Instruction>(V))
+ // TODO: Get a DominatorTree and use it here.
+ if (const Value *Simplified =
+ SimplifyInstruction(const_cast<Instruction *>(I), TD)) {
+ V = Simplified;
+ continue;
+ }
const GEPOperator *GEPOp = dyn_cast<GEPOperator>(Op);
if (GEPOp == 0)
@@ -528,7 +554,7 @@ BasicAliasAnalysis::pointsToConstantMemory(const Location &Loc, bool OrLocal) {
SmallVector<const Value *, 16> Worklist;
Worklist.push_back(Loc.Ptr);
do {
- const Value *V = Worklist.pop_back_val()->getUnderlyingObject();
+ const Value *V = GetUnderlyingObject(Worklist.pop_back_val(), TD);
if (!Visited.insert(V)) {
Visited.clear();
return AliasAnalysis::pointsToConstantMemory(Loc, OrLocal);
@@ -633,7 +659,7 @@ BasicAliasAnalysis::getModRefInfo(ImmutableCallSite CS,
assert(notDifferentParent(CS.getInstruction(), Loc.Ptr) &&
"AliasAnalysis query involving multiple functions!");
- const Value *Object = Loc.Ptr->getUnderlyingObject();
+ const Value *Object = GetUnderlyingObject(Loc.Ptr, TD);
// If this is a tail call and Loc.Ptr points to a stack location, we know that
// the tail call cannot access or modify the local stack.
@@ -687,10 +713,15 @@ BasicAliasAnalysis::getModRefInfo(ImmutableCallSite CS,
Len = LenCI->getZExtValue();
Value *Dest = II->getArgOperand(0);
Value *Src = II->getArgOperand(1);
+ // If it can't overlap the source dest, then it doesn't modref the loc.
if (isNoAlias(Location(Dest, Len), Loc)) {
if (isNoAlias(Location(Src, Len), Loc))
return NoModRef;
+ // If it can't overlap the dest, then worst case it reads the loc.
Min = Ref;
+ } else if (isNoAlias(Location(Src, Len), Loc)) {
+ // If it can't overlap the source, then worst case it mutates the loc.
+ Min = Mod;
}
break;
}
@@ -703,6 +734,8 @@ BasicAliasAnalysis::getModRefInfo(ImmutableCallSite CS,
if (isNoAlias(Location(Dest, Len), Loc))
return NoModRef;
}
+ // We know that memset doesn't load anything.
+ Min = Mod;
break;
case Intrinsic::atomic_cmp_swap:
case Intrinsic::atomic_swap:
@@ -754,7 +787,7 @@ BasicAliasAnalysis::getModRefInfo(ImmutableCallSite CS,
/// aliasGEP - Provide a bunch of ad-hoc rules to disambiguate a GEP instruction
/// against another pointer. We know that V1 is a GEP, but we don't know
-/// anything about V2. UnderlyingV1 is GEP1->getUnderlyingObject(),
+/// anything about V2. UnderlyingV1 is GetUnderlyingObject(GEP1, TD),
/// UnderlyingV2 is the same for V2.
///
AliasAnalysis::AliasResult
@@ -800,7 +833,7 @@ BasicAliasAnalysis::aliasGEP(const GEPOperator *GEP1, uint64_t V1Size,
// to handle without it.
if (GEP1BasePtr != UnderlyingV1 || GEP2BasePtr != UnderlyingV2) {
assert(TD == 0 &&
- "DecomposeGEPExpression and getUnderlyingObject disagree!");
+ "DecomposeGEPExpression and GetUnderlyingObject disagree!");
return MayAlias;
}
@@ -836,7 +869,7 @@ BasicAliasAnalysis::aliasGEP(const GEPOperator *GEP1, uint64_t V1Size,
// to handle without it.
if (GEP1BasePtr != UnderlyingV1) {
assert(TD == 0 &&
- "DecomposeGEPExpression and getUnderlyingObject disagree!");
+ "DecomposeGEPExpression and GetUnderlyingObject disagree!");
return MayAlias;
}
}
@@ -850,6 +883,17 @@ BasicAliasAnalysis::aliasGEP(const GEPOperator *GEP1, uint64_t V1Size,
if (GEP1BaseOffset == 0 && GEP1VariableIndices.empty())
return MustAlias;
+ // If there is a difference betwen the pointers, but the difference is
+ // less than the size of the associated memory object, then we know
+ // that the objects are partially overlapping.
+ if (GEP1BaseOffset != 0 && GEP1VariableIndices.empty()) {
+ if (GEP1BaseOffset >= 0 ?
+ (V2Size != UnknownSize && (uint64_t)GEP1BaseOffset < V2Size) :
+ (V1Size != UnknownSize && -(uint64_t)GEP1BaseOffset < V1Size &&
+ GEP1BaseOffset != INT64_MIN))
+ return PartialAlias;
+ }
+
// If we have a known constant offset, see if this offset is larger than the
// access size being queried. If so, and if no variable indices can remove
// pieces of this constant, then we know we have a no-alias. For example,
@@ -1026,8 +1070,8 @@ BasicAliasAnalysis::aliasCheck(const Value *V1, uint64_t V1Size,
return NoAlias; // Scalars cannot alias each other
// Figure out what objects these things are pointing to if we can.
- const Value *O1 = V1->getUnderlyingObject();
- const Value *O2 = V2->getUnderlyingObject();
+ const Value *O1 = GetUnderlyingObject(V1, TD);
+ const Value *O2 = GetUnderlyingObject(V2, TD);
// Null values in the default address space don't point to any object, so they
// don't alias any other pointer.
@@ -1113,6 +1157,14 @@ BasicAliasAnalysis::aliasCheck(const Value *V1, uint64_t V1Size,
if (Result != MayAlias) return Result;
}
+ // If both pointers are pointing into the same object and one of them
+ // accesses is accessing the entire object, then the accesses must
+ // overlap in some way.
+ if (TD && O1 == O2)
+ if ((V1Size != UnknownSize && isObjectSize(O1, V1Size, *TD)) ||
+ (V2Size != UnknownSize && isObjectSize(O2, V2Size, *TD)))
+ return PartialAlias;
+
return AliasAnalysis::alias(Location(V1, V1Size, V1TBAAInfo),
Location(V2, V2Size, V2TBAAInfo));
}