summaryrefslogtreecommitdiff
path: root/test/Analysis
diff options
context:
space:
mode:
authorNadav Rotem <nrotem@apple.com>2012-08-13 23:03:43 +0000
committerNadav Rotem <nrotem@apple.com>2012-08-13 23:03:43 +0000
commit8dff60e96a0b3044628511b0e43a59788de56b9d (patch)
tree630d2132d7d822c855f54735a6fdf0e75e1dad05 /test/Analysis
parent6d2986cd0374b86e23fe60a57b40ae01d057ce3b (diff)
downloadllvm-8dff60e96a0b3044628511b0e43a59788de56b9d.tar.gz
llvm-8dff60e96a0b3044628511b0e43a59788de56b9d.tar.bz2
llvm-8dff60e96a0b3044628511b0e43a59788de56b9d.tar.xz
MemoryDependenceAnalysis attempts to find the first memory dependency for function calls.
Currently, if GetLocation reports that it did not find a valid pointer (this is the case for volatile load/stores), we ignore the result. This patch adds code to handle the cases where we did not obtain a valid pointer. rdar://11872864 PR12899 git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@161802 91177308-0d34-0410-b5e6-96231b3b80d8
Diffstat (limited to 'test/Analysis')
-rw-r--r--test/Analysis/GlobalsModRef/volatile-instrs.ll34
1 files changed, 34 insertions, 0 deletions
diff --git a/test/Analysis/GlobalsModRef/volatile-instrs.ll b/test/Analysis/GlobalsModRef/volatile-instrs.ll
new file mode 100644
index 0000000000..49bce670b7
--- /dev/null
+++ b/test/Analysis/GlobalsModRef/volatile-instrs.ll
@@ -0,0 +1,34 @@
+; RUN: opt < %s -basicaa -dse -S | FileCheck %s
+
+target datalayout = "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-s0:64:64-f80:128:128-n8:16:32:64-S128"
+target triple = "x86_64-apple-macosx10.8.0"
+
+%struct.anon = type { i32, i32, i32 }
+@b = global %struct.anon { i32 1, i32 0, i32 0 }, align 4
+@c = common global i32 0, align 4
+@a = common global %struct.anon zeroinitializer, align 4
+@.str = private unnamed_addr constant [4 x i8] c"%d\0A\00", align 1
+
+declare i32 @printf(i8* nocapture, ...) nounwind
+declare void @llvm.memcpy.p0i8.p0i8.i64(i8* nocapture, i8* nocapture, i64, i32, i1) nounwind
+
+
+; Make sure that the initial memcpy call does not go away
+; because the volatile load is in the way. PR12899
+
+; CHECK: main_entry:
+; CHECK-NEXT: tail call void @llvm.memcpy.p0i8.p0i8.i64
+
+define i32 @main() nounwind uwtable ssp {
+main_entry:
+ tail call void @llvm.memcpy.p0i8.p0i8.i64(i8* bitcast (%struct.anon* @b to i8*), i8* bitcast (%struct.anon* @a to i8*), i64 12, i32 4, i1 false)
+ %0 = load volatile i32* getelementptr inbounds (%struct.anon* @b, i64 0, i32 0), align 4, !tbaa !0
+ store i32 %0, i32* @c, align 4, !tbaa !0
+ tail call void @llvm.memcpy.p0i8.p0i8.i64(i8* bitcast (%struct.anon* @b to i8*), i8* bitcast (%struct.anon* @a to i8*), i64 12, i32 4, i1 false) nounwind
+ %call = tail call i32 (i8*, ...)* @printf(i8* getelementptr inbounds ([4 x i8]* @.str, i64 0, i64 0), i32 %0) nounwind
+ ret i32 0
+}
+
+!0 = metadata !{metadata !"int", metadata !1}
+!1 = metadata !{metadata !"omnipotent char", metadata !2}
+!2 = metadata !{metadata !"Simple C/C++ TBAA"}