summaryrefslogtreecommitdiff
path: root/test/CodeGen/X86/tailcall-64.ll
diff options
context:
space:
mode:
authorJakob Stoklund Olesen <stoklund@2pi.dk>2012-09-13 18:31:27 +0000
committerJakob Stoklund Olesen <stoklund@2pi.dk>2012-09-13 18:31:27 +0000
commit7bba7d0efc69ff3da5a064a5d77513dd5aca0f15 (patch)
treef811128f53af99ed0996105df15eb81ecb3d7102 /test/CodeGen/X86/tailcall-64.ll
parentce52bc53538df8e5412ec507f2da3661c991baf1 (diff)
downloadllvm-7bba7d0efc69ff3da5a064a5d77513dd5aca0f15.tar.gz
llvm-7bba7d0efc69ff3da5a064a5d77513dd5aca0f15.tar.bz2
llvm-7bba7d0efc69ff3da5a064a5d77513dd5aca0f15.tar.xz
Fix the TCRETURNmi64 bug differently.
Add a PatFrag to match X86tcret using 6 fixed registers or less. This avoids folding loads into TCRETURNmi64 using 7 or more volatile registers. <rdar://problem/12282281> git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@163819 91177308-0d34-0410-b5e6-96231b3b80d8
Diffstat (limited to 'test/CodeGen/X86/tailcall-64.ll')
-rw-r--r--test/CodeGen/X86/tailcall-64.ll39
1 files changed, 39 insertions, 0 deletions
diff --git a/test/CodeGen/X86/tailcall-64.ll b/test/CodeGen/X86/tailcall-64.ll
index 7030753415..df7af69a99 100644
--- a/test/CodeGen/X86/tailcall-64.ll
+++ b/test/CodeGen/X86/tailcall-64.ll
@@ -93,4 +93,43 @@ define { i64, i64 } @crash(i8* %this) {
ret { i64, i64 } %mrv7
}
+; Check that we can fold an indexed load into a tail call instruction.
+; CHECK: fold_indexed_load
+; CHECK: leaq (%rsi,%rsi,4), %[[RAX:r..]]
+; CHECK: movq _func_table@GOTPCREL(%rip), %[[RCX:r..]]
+; CHECK: jmpq *16(%[[RCX]],%[[RAX]],8) # TAILCALL
+%struct.funcs = type { i32 (i8*, i32*, i32)*, i32 (i8*)*, i32 (i8*)*, i32 (i8*, i32)*, i32 }
+@func_table = external global [0 x %struct.funcs]
+define void @fold_indexed_load(i8* %mbstr, i64 %idxprom) nounwind uwtable ssp {
+entry:
+ %dsplen = getelementptr inbounds [0 x %struct.funcs]* @func_table, i64 0, i64 %idxprom, i32 2
+ %x1 = load i32 (i8*)** %dsplen, align 8
+ %call = tail call i32 %x1(i8* %mbstr) nounwind
+ ret void
+}
+; <rdar://problem/12282281> Fold an indexed load into the tail call instruction.
+; Calling a varargs function with 6 arguments requires 7 registers (%al is the
+; vector count for varargs functions). This leaves %r11 as the only available
+; scratch register.
+;
+; It is not possible to fold an indexed load into TCRETURNmi64 in that case.
+;
+; typedef int (*funcptr)(void*, ...);
+; extern const funcptr funcs[];
+; int f(int n) {
+; return funcs[n](0, 0, 0, 0, 0, 0);
+; }
+;
+; CHECK: rdar12282281
+; CHECK: jmpq *%r11 # TAILCALL
+@funcs = external constant [0 x i32 (i8*, ...)*]
+
+define i32 @rdar12282281(i32 %n) nounwind uwtable ssp {
+entry:
+ %idxprom = sext i32 %n to i64
+ %arrayidx = getelementptr inbounds [0 x i32 (i8*, ...)*]* @funcs, i64 0, i64 %idxprom
+ %0 = load i32 (i8*, ...)** %arrayidx, align 8
+ %call = tail call i32 (i8*, ...)* %0(i8* null, i32 0, i32 0, i32 0, i32 0, i32 0) nounwind
+ ret i32 %call
+}