summaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
authorEvan Cheng <evan.cheng@apple.com>2012-10-24 19:53:01 +0000
committerEvan Cheng <evan.cheng@apple.com>2012-10-24 19:53:01 +0000
commitd258eb3ec5cc5c9a28d3a8cd80241c9df24ce3a1 (patch)
tree1c59542feb0697620542e270f32eb637f28e87cf /test
parent8c65549318950ff3fc1cb3d7a73fb50c688c78a5 (diff)
downloadllvm-d258eb3ec5cc5c9a28d3a8cd80241c9df24ce3a1.tar.gz
llvm-d258eb3ec5cc5c9a28d3a8cd80241c9df24ce3a1.tar.bz2
llvm-d258eb3ec5cc5c9a28d3a8cd80241c9df24ce3a1.tar.xz
Fix a miscompilation caused by a typo. When turning a adde with negative value
into a sbc with a positive number, the immediate should be complemented, not negated. Also added a missing pattern for ARM codegen. rdar://12559385 git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@166613 91177308-0d34-0410-b5e6-96231b3b80d8
Diffstat (limited to 'test')
-rw-r--r--test/CodeGen/ARM/carry.ll13
-rw-r--r--test/CodeGen/Thumb2/carry.ll13
2 files changed, 26 insertions, 0 deletions
diff --git a/test/CodeGen/ARM/carry.ll b/test/CodeGen/ARM/carry.ll
index f84774d9b6..bf51cd627b 100644
--- a/test/CodeGen/ARM/carry.ll
+++ b/test/CodeGen/ARM/carry.ll
@@ -45,3 +45,16 @@ entry:
%0 = sub nsw i64 0, %x
ret i64 %0
}
+
+; rdar://12559385
+define i64 @f5(i32 %vi) {
+entry:
+; CHECK: f5:
+; CHECK: movw [[REG:r[0-9]+]], #36102
+; CHECK: sbc r{{[0-9]+}}, r{{[0-9]+}}, [[REG]]
+ %v0 = zext i32 %vi to i64
+ %v1 = xor i64 %v0, -155057456198619
+ %v4 = add i64 %v1, 155057456198619
+ %v5 = add i64 %v4, %v1
+ ret i64 %v5
+}
diff --git a/test/CodeGen/Thumb2/carry.ll b/test/CodeGen/Thumb2/carry.ll
index de6f6e260d..85b4370fa5 100644
--- a/test/CodeGen/Thumb2/carry.ll
+++ b/test/CodeGen/Thumb2/carry.ll
@@ -20,3 +20,16 @@ entry:
%tmp2 = sub i64 %tmp1, %b
ret i64 %tmp2
}
+
+; rdar://12559385
+define i64 @f3(i32 %vi) {
+entry:
+; CHECK: f3:
+; CHECK: movw [[REG:r[0-9]+]], #36102
+; CHECK: sbcs r{{[0-9]+}}, [[REG]]
+ %v0 = zext i32 %vi to i64
+ %v1 = xor i64 %v0, -155057456198619
+ %v4 = add i64 %v1, 155057456198619
+ %v5 = add i64 %v4, %v1
+ ret i64 %v5
+}