summaryrefslogtreecommitdiff
path: root/test/CodeGen/Thumb2
diff options
context:
space:
mode:
authorJakob Stoklund Olesen <stoklund@2pi.dk>2013-02-21 19:35:21 +0000
committerJakob Stoklund Olesen <stoklund@2pi.dk>2013-02-21 19:35:21 +0000
commit601158a18e325879b224bd1979d824407ed98bc7 (patch)
tree2c1fbf778b42e97d322a074026fcc9b0b9092ea5 /test/CodeGen/Thumb2
parentb300455b5817f099d64aad8f9356e0e23fa9a87e (diff)
downloadllvm-601158a18e325879b224bd1979d824407ed98bc7.tar.gz
llvm-601158a18e325879b224bd1979d824407ed98bc7.tar.bz2
llvm-601158a18e325879b224bd1979d824407ed98bc7.tar.xz
Make RAFast::UsedInInstr indexed by register units.
This fixes some problems with too conservative checking where we were marking all aliases of a register as used, and then also checking all aliases when allocating a register. <rdar://problem/13249625> git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@175782 91177308-0d34-0410-b5e6-96231b3b80d8
Diffstat (limited to 'test/CodeGen/Thumb2')
-rw-r--r--test/CodeGen/Thumb2/crash.ll9
1 files changed, 9 insertions, 0 deletions
diff --git a/test/CodeGen/Thumb2/crash.ll b/test/CodeGen/Thumb2/crash.ll
index cb4d08058f..6ce0b82b94 100644
--- a/test/CodeGen/Thumb2/crash.ll
+++ b/test/CodeGen/Thumb2/crash.ll
@@ -1,4 +1,5 @@
; RUN: llc < %s -mtriple=thumbv7-apple-darwin -mcpu=cortex-a8 -verify-machineinstrs
+; RUN: llc < %s -mtriple=thumbv7-apple-darwin -mcpu=cortex-a8 -verify-machineinstrs -O0
target datalayout = "e-p:32:32:32-i1:8:32-i8:8:32-i16:16:32-i32:32:32-i64:32:32-f32:32:32-f64:32:32-v64:64:64-v128:128:128-a0:0:32-n32"
target triple = "thumbv7-apple-darwin10"
@@ -76,3 +77,11 @@ entry:
store i32 %num, i32* %p2, align 4
ret void
}
+
+; Check RAFast handling of inline assembly with many dense clobbers.
+; The large tuple aliases of the vector registers can cause problems.
+define void @rdar13249625(double* nocapture %p) nounwind {
+ %1 = tail call double asm sideeffect "@ $0", "=w,~{d0},~{q1},~{q2},~{q3},~{q4},~{q5},~{q6},~{q7},~{q8},~{q9},~{q10},~{q11},~{q12},~{q13},~{q14},~{q15}"() nounwind
+ store double %1, double* %p, align 4
+ ret void
+}