summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--lib/Target/ARM/ARMInstrNEON.td35
-rw-r--r--test/MC/ARM/neon-shift-encoding.s41
2 files changed, 76 insertions, 0 deletions
diff --git a/lib/Target/ARM/ARMInstrNEON.td b/lib/Target/ARM/ARMInstrNEON.td
index b63ecc386f..743ce22aaf 100644
--- a/lib/Target/ARM/ARMInstrNEON.td
+++ b/lib/Target/ARM/ARMInstrNEON.td
@@ -5402,6 +5402,41 @@ def : NEONInstAlias<"vmul${p}.f32 $Qdn, $Dm$lane",
(VMULslfq QPR:$Qdn, QPR:$Qdn, DPR_VFP2:$Dm,
VectorIndex32:$lane, pred:$p)>;
+// VSHL (register) two-operand aliases.
+def : NEONInstAlias<"vshl${p}.s8 $Vdn, $Vm",
+ (VSHLsv8i8 DPR:$Vdn, DPR:$Vdn, DPR:$Vm, pred:$p)>;
+def : NEONInstAlias<"vshl${p}.s16 $Vdn, $Vm",
+ (VSHLsv4i16 DPR:$Vdn, DPR:$Vdn, DPR:$Vm, pred:$p)>;
+def : NEONInstAlias<"vshl${p}.s32 $Vdn, $Vm",
+ (VSHLsv2i32 DPR:$Vdn, DPR:$Vdn, DPR:$Vm, pred:$p)>;
+def : NEONInstAlias<"vshl${p}.s64 $Vdn, $Vm",
+ (VSHLsv1i64 DPR:$Vdn, DPR:$Vdn, DPR:$Vm, pred:$p)>;
+def : NEONInstAlias<"vshl${p}.u8 $Vdn, $Vm",
+ (VSHLuv8i8 DPR:$Vdn, DPR:$Vdn, DPR:$Vm, pred:$p)>;
+def : NEONInstAlias<"vshl${p}.u16 $Vdn, $Vm",
+ (VSHLuv4i16 DPR:$Vdn, DPR:$Vdn, DPR:$Vm, pred:$p)>;
+def : NEONInstAlias<"vshl${p}.u32 $Vdn, $Vm",
+ (VSHLuv2i32 DPR:$Vdn, DPR:$Vdn, DPR:$Vm, pred:$p)>;
+def : NEONInstAlias<"vshl${p}.u64 $Vdn, $Vm",
+ (VSHLuv1i64 DPR:$Vdn, DPR:$Vdn, DPR:$Vm, pred:$p)>;
+
+def : NEONInstAlias<"vshl${p}.s8 $Vdn, $Vm",
+ (VSHLsv16i8 QPR:$Vdn, QPR:$Vdn, QPR:$Vm, pred:$p)>;
+def : NEONInstAlias<"vshl${p}.s16 $Vdn, $Vm",
+ (VSHLsv8i16 QPR:$Vdn, QPR:$Vdn, QPR:$Vm, pred:$p)>;
+def : NEONInstAlias<"vshl${p}.s32 $Vdn, $Vm",
+ (VSHLsv4i32 QPR:$Vdn, QPR:$Vdn, QPR:$Vm, pred:$p)>;
+def : NEONInstAlias<"vshl${p}.s64 $Vdn, $Vm",
+ (VSHLsv2i64 QPR:$Vdn, QPR:$Vdn, QPR:$Vm, pred:$p)>;
+def : NEONInstAlias<"vshl${p}.u8 $Vdn, $Vm",
+ (VSHLuv16i8 QPR:$Vdn, QPR:$Vdn, QPR:$Vm, pred:$p)>;
+def : NEONInstAlias<"vshl${p}.u16 $Vdn, $Vm",
+ (VSHLuv8i16 QPR:$Vdn, QPR:$Vdn, QPR:$Vm, pred:$p)>;
+def : NEONInstAlias<"vshl${p}.u32 $Vdn, $Vm",
+ (VSHLuv4i32 QPR:$Vdn, QPR:$Vdn, QPR:$Vm, pred:$p)>;
+def : NEONInstAlias<"vshl${p}.u64 $Vdn, $Vm",
+ (VSHLuv2i64 QPR:$Vdn, QPR:$Vdn, QPR:$Vm, pred:$p)>;
+
// VLD1 single-lane pseudo-instructions. These need special handling for
// the lane index that an InstAlias can't handle, so we use these instead.
defm VLD1LNdAsm : NEONDT8AsmPseudoInst<"vld1${p}", "$list, $addr",
diff --git a/test/MC/ARM/neon-shift-encoding.s b/test/MC/ARM/neon-shift-encoding.s
index a7a1b83860..af37dd9a7c 100644
--- a/test/MC/ARM/neon-shift-encoding.s
+++ b/test/MC/ARM/neon-shift-encoding.s
@@ -235,3 +235,44 @@ _foo:
vqrshrn.u32 d16, q8, #13
@ CHECK: vqrshrn.u64 d16, q8, #13 @ encoding: [0x70,0x09,0xf3,0xf3]
vqrshrn.u64 d16, q8, #13
+
+@ Optional destination operand variants.
+ vshl.s8 q4, q5
+ vshl.s16 q4, q5
+ vshl.s32 q4, q5
+ vshl.s64 q4, q5
+
+ vshl.u8 q4, q5
+ vshl.u16 q4, q5
+ vshl.u32 q4, q5
+ vshl.u64 q4, q5
+
+ vshl.s8 d4, d5
+ vshl.s16 d4, d5
+ vshl.s32 d4, d5
+ vshl.s64 d4, d5
+
+ vshl.u8 d4, d5
+ vshl.u16 d4, d5
+ vshl.u32 d4, d5
+ vshl.u64 d4, d5
+
+@ CHECK: vshl.s8 q4, q4, q5 @ encoding: [0x48,0x84,0x0a,0xf2]
+@ CHECK: vshl.s16 q4, q4, q5 @ encoding: [0x48,0x84,0x1a,0xf2]
+@ CHECK: vshl.s32 q4, q4, q5 @ encoding: [0x48,0x84,0x2a,0xf2]
+@ CHECK: vshl.s64 q4, q4, q5 @ encoding: [0x48,0x84,0x3a,0xf2]
+
+@ CHECK: vshl.u8 q4, q4, q5 @ encoding: [0x48,0x84,0x0a,0xf3]
+@ CHECK: vshl.u16 q4, q4, q5 @ encoding: [0x48,0x84,0x1a,0xf3]
+@ CHECK: vshl.u32 q4, q4, q5 @ encoding: [0x48,0x84,0x2a,0xf3]
+@ CHECK: vshl.u64 q4, q4, q5 @ encoding: [0x48,0x84,0x3a,0xf3]
+
+@ CHECK: vshl.s8 d4, d4, d5 @ encoding: [0x04,0x44,0x05,0xf2]
+@ CHECK: vshl.s16 d4, d4, d5 @ encoding: [0x04,0x44,0x15,0xf2]
+@ CHECK: vshl.s32 d4, d4, d5 @ encoding: [0x04,0x44,0x25,0xf2]
+@ CHECK: vshl.s64 d4, d4, d5 @ encoding: [0x04,0x44,0x35,0xf2]
+
+@ CHECK: vshl.u8 d4, d4, d5 @ encoding: [0x04,0x44,0x05,0xf3]
+@ CHECK: vshl.u16 d4, d4, d5 @ encoding: [0x04,0x44,0x15,0xf3]
+@ CHECK: vshl.u32 d4, d4, d5 @ encoding: [0x04,0x44,0x25,0xf3]
+@ CHECK: vshl.u64 d4, d4, d5 @ encoding: [0x04,0x44,0x35,0xf3]