summaryrefslogtreecommitdiff
path: root/test/CodeGen/PowerPC
diff options
context:
space:
mode:
authorHal Finkel <hfinkel@anl.gov>2014-03-27 23:12:31 +0000
committerHal Finkel <hfinkel@anl.gov>2014-03-27 23:12:31 +0000
commite2ee98ab169fe8d1d4bd39fe0ecb89274eceb438 (patch)
treed4f276d3816f78e5f52764f62813ead41fb29df0 /test/CodeGen/PowerPC
parentd9524d66cd6a69ded63b29bed5413217444b162a (diff)
downloadllvm-e2ee98ab169fe8d1d4bd39fe0ecb89274eceb438.tar.gz
llvm-e2ee98ab169fe8d1d4bd39fe0ecb89274eceb438.tar.bz2
llvm-e2ee98ab169fe8d1d4bd39fe0ecb89274eceb438.tar.xz
[PowerPC] Use a small cleanup pass to remove VSX self copies
As explained in r204976, because of how the allocation of VSX registers interacts with the call-lowering code, we sometimes end up generating self VSX copies. Specifically, things like this: %VSL2<def> = COPY %F2, %VSL2<imp-use,kill> (where %F2 is really a sub-register of %VSL2, and so this copy is a nop) This adds a small cleanup pass to remove these prior to post-RA scheduling. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@204980 91177308-0d34-0410-b5e6-96231b3b80d8
Diffstat (limited to 'test/CodeGen/PowerPC')
-rw-r--r--test/CodeGen/PowerPC/vsx-self-copy.ll27
1 files changed, 27 insertions, 0 deletions
diff --git a/test/CodeGen/PowerPC/vsx-self-copy.ll b/test/CodeGen/PowerPC/vsx-self-copy.ll
new file mode 100644
index 0000000000..23615ca10c
--- /dev/null
+++ b/test/CodeGen/PowerPC/vsx-self-copy.ll
@@ -0,0 +1,27 @@
+; RUN: llc -mcpu=pwr7 -mattr=+vsx < %s | FileCheck %s
+target datalayout = "E-m:e-i64:64-n32:64"
+target triple = "powerpc64-unknown-linux-gnu"
+
+define double @takFP(double %x, double %y, double %z) #0 {
+entry:
+ br i1 undef, label %if.then, label %return
+
+if.then: ; preds = %if.then, %entry
+ %x.tr16 = phi double [ %call, %if.then ], [ %x, %entry ]
+ %call = tail call double @takFP(double undef, double undef, double undef)
+ %call4 = tail call double @takFP(double undef, double %x.tr16, double undef)
+ %cmp = fcmp olt double undef, %call
+ br i1 %cmp, label %if.then, label %return
+
+return: ; preds = %if.then, %entry
+ %z.tr.lcssa = phi double [ %z, %entry ], [ %call4, %if.then ]
+ ret double %z.tr.lcssa
+
+; CHECK: @takFP
+; CHECK-NOT: xxlor 0, 0, 0
+; CHECK: blr
+}
+
+attributes #0 = { nounwind readnone }
+attributes #1 = { nounwind }
+