summaryrefslogtreecommitdiff
path: root/test/CodeGen/AArch64
diff options
context:
space:
mode:
authorBradley Smith <bradley.smith@arm.com>2014-04-25 10:25:29 +0000
committerBradley Smith <bradley.smith@arm.com>2014-04-25 10:25:29 +0000
commit8aa927abb5588b777d24e9bc9488d91ca7a6860d (patch)
treea3f73e4bf555b2d7dc9152dbf88e76f96d8811b7 /test/CodeGen/AArch64
parent09d1d3d5880588d9f26bd4ed863e145cae477a89 (diff)
downloadllvm-8aa927abb5588b777d24e9bc9488d91ca7a6860d.tar.gz
llvm-8aa927abb5588b777d24e9bc9488d91ca7a6860d.tar.bz2
llvm-8aa927abb5588b777d24e9bc9488d91ca7a6860d.tar.xz
[ARM64] Print preferred aliases for SFBM/UBFM in InstPrinter
git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@207219 91177308-0d34-0410-b5e6-96231b3b80d8
Diffstat (limited to 'test/CodeGen/AArch64')
-rw-r--r--test/CodeGen/AArch64/bitfield.ll20
-rw-r--r--test/CodeGen/AArch64/bool-loads.ll4
2 files changed, 9 insertions, 15 deletions
diff --git a/test/CodeGen/AArch64/bitfield.ll b/test/CodeGen/AArch64/bitfield.ll
index 7f721836a7..92f6d74908 100644
--- a/test/CodeGen/AArch64/bitfield.ll
+++ b/test/CodeGen/AArch64/bitfield.ll
@@ -64,7 +64,7 @@ define void @test_extendw(i32 %var) {
%uxt64 = zext i32 %var to i64
store volatile i64 %uxt64, i64* @var64
; CHECK-AARCH64: ubfx {{w[0-9]+}}, {{w[0-9]+}}, #0, #32
-; CHECK-ARM64: uxtw {{x[0-9]+}}, {{w[0-9]+}}
+; CHECK-ARM64: ubfx {{x[0-9]+}}, {{x[0-9]+}}, #0, #32
ret void
}
@@ -124,8 +124,7 @@ define void @test_sext_inreg_64(i64 %in) {
%trunc_i1 = trunc i64 %in to i1
%sext_i1 = sext i1 %trunc_i1 to i64
store volatile i64 %sext_i1, i64* @var64
-; CHECK-AARCH64: sbfx {{x[0-9]+}}, {{x[0-9]+}}, #0, #1
-; CHECK-ARM64: sbfm {{x[0-9]+}}, {{x[0-9]+}}, #0, #0
+; CHECK: sbfx {{x[0-9]+}}, {{x[0-9]+}}, #0, #1
%trunc_i8 = trunc i64 %in to i8
%sext_i8 = sext i8 %trunc_i8 to i64
@@ -176,16 +175,14 @@ define i64 @test_sext_inreg_from_32(i32 %in) {
; Different registers are of course, possible, though suboptimal. This is
; making sure that a 64-bit "(sext_inreg (anyext GPR32), i1)" uses the 64-bit
; sbfx rather than just 32-bits.
-; CHECK-AARCH64: sbfx x0, x0, #0, #1
-; CHECK-ARM64: sbfm x0, x0, #0, #0
+; CHECK: sbfx x0, x0, #0, #1
ret i64 %ext
}
define i32 @test_ubfx32(i32* %addr) {
; CHECK-LABEL: test_ubfx32:
-; CHECK-AARCH64: ubfx {{w[0-9]+}}, {{w[0-9]+}}, #23, #3
-; CHECK-ARM64: ubfm {{w[0-9]+}}, {{w[0-9]+}}, #23, #25
+; CHECK: ubfx {{w[0-9]+}}, {{w[0-9]+}}, #23, #3
%fields = load i32* %addr
%shifted = lshr i32 %fields, 23
@@ -195,8 +192,7 @@ define i32 @test_ubfx32(i32* %addr) {
define i64 @test_ubfx64(i64* %addr) {
; CHECK-LABEL: test_ubfx64:
-; CHECK-AARCH64: ubfx {{x[0-9]+}}, {{x[0-9]+}}, #25, #10
-; CHECK-ARM64: ubfm {{x[0-9]+}}, {{x[0-9]+}}, #25, #34
+; CHECK: ubfx {{x[0-9]+}}, {{x[0-9]+}}, #25, #10
%fields = load i64* %addr
%shifted = lshr i64 %fields, 25
%masked = and i64 %shifted, 1023
@@ -205,8 +201,7 @@ define i64 @test_ubfx64(i64* %addr) {
define i32 @test_sbfx32(i32* %addr) {
; CHECK-LABEL: test_sbfx32:
-; CHECK-AARCH64: sbfx {{w[0-9]+}}, {{w[0-9]+}}, #6, #3
-; CHECK-ARM64: sbfm {{w[0-9]+}}, {{w[0-9]+}}, #6, #8
+; CHECK: sbfx {{w[0-9]+}}, {{w[0-9]+}}, #6, #3
%fields = load i32* %addr
%shifted = shl i32 %fields, 23
@@ -216,8 +211,7 @@ define i32 @test_sbfx32(i32* %addr) {
define i64 @test_sbfx64(i64* %addr) {
; CHECK-LABEL: test_sbfx64:
-; CHECK-AARCH64: sbfx {{x[0-9]+}}, {{x[0-9]+}}, #0, #63
-; CHECK-ARM64: sbfm {{x[0-9]+}}, {{x[0-9]+}}, #0, #62
+; CHECK: sbfx {{x[0-9]+}}, {{x[0-9]+}}, #0, #63
%fields = load i64* %addr
%shifted = shl i64 %fields, 1
diff --git a/test/CodeGen/AArch64/bool-loads.ll b/test/CodeGen/AArch64/bool-loads.ll
index daf2095338..37cc8e42f1 100644
--- a/test/CodeGen/AArch64/bool-loads.ll
+++ b/test/CodeGen/AArch64/bool-loads.ll
@@ -9,7 +9,7 @@ define i32 @test_sextloadi32() {
%val = load i1* @var
%ret = sext i1 %val to i32
; CHECK: ldrb {{w[0-9]+}}, [{{x[0-9]+}}, {{#?}}:lo12:var]
-; CHECK: {{sbfx x[0-9]+, x[0-9]+, #0, #1|sbfm w[0-9]+, w[0-9]+, #0, #0}}
+; CHECK: {{sbfx x[0-9]+, x[0-9]+, #0, #1|sbfx w[0-9]+, w[0-9]+, #0, #1}}
ret i32 %ret
; CHECK: ret
@@ -21,7 +21,7 @@ define i64 @test_sextloadi64() {
%val = load i1* @var
%ret = sext i1 %val to i64
; CHECK: ldrb {{w[0-9]+}}, [{{x[0-9]+}}, {{#?}}:lo12:var]
-; CHECK: {{sbfx x[0-9]+, x[0-9]+, #0, #1|sbfm x[0-9]+, x[0-9]+, #0, #0}}
+; CHECK: {{sbfx x[0-9]+, x[0-9]+, #0, #1}}
ret i64 %ret
; CHECK: ret