summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--lib/Target/X86/MCTargetDesc/X86MCCodeEmitter.cpp11
-rw-r--r--lib/Target/X86/X86InstrAVX512.td56
-rw-r--r--utils/TableGen/X86RecognizableInstr.cpp40
3 files changed, 67 insertions, 40 deletions
diff --git a/lib/Target/X86/MCTargetDesc/X86MCCodeEmitter.cpp b/lib/Target/X86/MCTargetDesc/X86MCCodeEmitter.cpp
index 7173d51e6a..0c9fd91e31 100644
--- a/lib/Target/X86/MCTargetDesc/X86MCCodeEmitter.cpp
+++ b/lib/Target/X86/MCTargetDesc/X86MCCodeEmitter.cpp
@@ -868,11 +868,12 @@ void X86MCCodeEmitter::EmitVEXOpcodePrefix(uint64_t TSFlags, unsigned &CurByte,
case X86II::MRM6r: case X86II::MRM7r:
// MRM0r-MRM7r instructions forms:
// dst(VEX_4V), src(ModR/M), imm8
- VEX_4V = getVEXRegisterEncoding(MI, CurOp);
- if (HasEVEX && X86II::is32ExtendedReg(MI.getOperand(CurOp).getReg()))
- EVEX_V2 = 0x0;
- CurOp++;
-
+ if (HasVEX_4V) {
+ VEX_4V = getVEXRegisterEncoding(MI, CurOp);
+ if (HasEVEX && X86II::is32ExtendedReg(MI.getOperand(CurOp).getReg()))
+ EVEX_V2 = 0x0;
+ CurOp++;
+ }
if (HasEVEX_K)
EVEX_aaa = getWriteMaskRegisterEncoding(MI, CurOp++);
diff --git a/lib/Target/X86/X86InstrAVX512.td b/lib/Target/X86/X86InstrAVX512.td
index c3fb8019cc..0cb946ddb8 100644
--- a/lib/Target/X86/X86InstrAVX512.td
+++ b/lib/Target/X86/X86InstrAVX512.td
@@ -1720,78 +1720,98 @@ defm VPTESTMQZ : avx512_vptest<0x27, "vptestmq", VK8, VR512, f512mem, memopv8i
// AVX-512 Shift instructions
//===----------------------------------------------------------------------===//
multiclass avx512_shift_rmi<bits<8> opc, Format ImmFormR, Format ImmFormM,
- string OpcodeStr,
- SDNode OpNode, RegisterClass RC, ValueType vt,
- X86MemOperand x86memop, PatFrag mem_frag> {
+ string OpcodeStr, SDNode OpNode, RegisterClass RC,
+ ValueType vt, X86MemOperand x86memop, PatFrag mem_frag,
+ RegisterClass KRC> {
def ri : AVX512BIi8<opc, ImmFormR, (outs RC:$dst),
(ins RC:$src1, i32i8imm:$src2),
!strconcat(OpcodeStr, "\t{$src2, $src1, $dst|$dst, $src1, $src2}"),
[(set RC:$dst, (vt (OpNode RC:$src1, (i32 imm:$src2))))],
SSE_INTSHIFT_ITINS_P.rr>, EVEX_4V;
+ def rik : AVX512BIi8<opc, ImmFormR, (outs RC:$dst),
+ (ins KRC:$mask, RC:$src1, i32i8imm:$src2),
+ !strconcat(OpcodeStr,
+ "\t{$src2, $src1, $dst {${mask}}|$dst {${mask}}, $src1, $src2}"),
+ [], SSE_INTSHIFT_ITINS_P.rr>, EVEX_4V, EVEX_K;
def mi: AVX512BIi8<opc, ImmFormM, (outs RC:$dst),
(ins x86memop:$src1, i32i8imm:$src2),
!strconcat(OpcodeStr, "\t{$src2, $src1, $dst|$dst, $src1, $src2}"),
[(set RC:$dst, (OpNode (mem_frag addr:$src1),
(i32 imm:$src2)))], SSE_INTSHIFT_ITINS_P.rm>, EVEX_4V;
+ def mik: AVX512BIi8<opc, ImmFormM, (outs RC:$dst),
+ (ins KRC:$mask, x86memop:$src1, i32i8imm:$src2),
+ !strconcat(OpcodeStr,
+ "\t{$src2, $src1, $dst {${mask}}|$dst {${mask}}, $src1, $src2}"),
+ [], SSE_INTSHIFT_ITINS_P.rm>, EVEX_4V, EVEX_K;
}
multiclass avx512_shift_rrm<bits<8> opc, string OpcodeStr, SDNode OpNode,
RegisterClass RC, ValueType vt, ValueType SrcVT,
- PatFrag bc_frag> {
+ PatFrag bc_frag, RegisterClass KRC> {
// src2 is always 128-bit
def rr : AVX512BI<opc, MRMSrcReg, (outs RC:$dst),
(ins RC:$src1, VR128X:$src2),
!strconcat(OpcodeStr, "\t{$src2, $src1, $dst|$dst, $src1, $src2}"),
[(set RC:$dst, (vt (OpNode RC:$src1, (SrcVT VR128X:$src2))))],
SSE_INTSHIFT_ITINS_P.rr>, EVEX_4V;
+ def rrk : AVX512BI<opc, MRMSrcReg, (outs RC:$dst),
+ (ins KRC:$mask, RC:$src1, VR128X:$src2),
+ !strconcat(OpcodeStr,
+ "\t{$src2, $src1, $dst {${mask}}|$dst {${mask}}, $src1, $src2}"),
+ [], SSE_INTSHIFT_ITINS_P.rr>, EVEX_4V, EVEX_K;
def rm : AVX512BI<opc, MRMSrcMem, (outs RC:$dst),
(ins RC:$src1, i128mem:$src2),
!strconcat(OpcodeStr, "\t{$src2, $src1, $dst|$dst, $src1, $src2}"),
[(set RC:$dst, (vt (OpNode RC:$src1,
(bc_frag (memopv2i64 addr:$src2)))))],
SSE_INTSHIFT_ITINS_P.rm>, EVEX_4V;
+ def rmk : AVX512BI<opc, MRMSrcMem, (outs RC:$dst),
+ (ins KRC:$mask, RC:$src1, i128mem:$src2),
+ !strconcat(OpcodeStr,
+ "\t{$src2, $src1, $dst {${mask}}|$dst {${mask}}, $src1, $src2}"),
+ [], SSE_INTSHIFT_ITINS_P.rm>, EVEX_4V, EVEX_K;
}
defm VPSRLDZ : avx512_shift_rmi<0x72, MRM2r, MRM2m, "vpsrld", X86vsrli,
- VR512, v16i32, i512mem, memopv16i32>, EVEX_V512,
- EVEX_CD8<32, CD8VF>;
+ VR512, v16i32, i512mem, memopv16i32, VK16WM>,
+ EVEX_V512, EVEX_CD8<32, CD8VF>;
defm VPSRLDZ : avx512_shift_rrm<0xD2, "vpsrld", X86vsrl,
- VR512, v16i32, v4i32, bc_v4i32>, EVEX_V512,
+ VR512, v16i32, v4i32, bc_v4i32, VK16WM>, EVEX_V512,
EVEX_CD8<32, CD8VQ>;
defm VPSRLQZ : avx512_shift_rmi<0x73, MRM2r, MRM2m, "vpsrlq", X86vsrli,
- VR512, v8i64, i512mem, memopv8i64>, EVEX_V512,
+ VR512, v8i64, i512mem, memopv8i64, VK8WM>, EVEX_V512,
EVEX_CD8<64, CD8VF>, VEX_W;
defm VPSRLQZ : avx512_shift_rrm<0xD3, "vpsrlq", X86vsrl,
- VR512, v8i64, v2i64, bc_v2i64>, EVEX_V512,
+ VR512, v8i64, v2i64, bc_v2i64, VK8WM>, EVEX_V512,
EVEX_CD8<64, CD8VQ>, VEX_W;
defm VPSLLDZ : avx512_shift_rmi<0x72, MRM6r, MRM6m, "vpslld", X86vshli,
- VR512, v16i32, i512mem, memopv16i32>, EVEX_V512,
+ VR512, v16i32, i512mem, memopv16i32, VK16WM>, EVEX_V512,
EVEX_CD8<32, CD8VF>;
defm VPSLLDZ : avx512_shift_rrm<0xF2, "vpslld", X86vshl,
- VR512, v16i32, v4i32, bc_v4i32>, EVEX_V512,
+ VR512, v16i32, v4i32, bc_v4i32, VK16WM>, EVEX_V512,
EVEX_CD8<32, CD8VQ>;
defm VPSLLQZ : avx512_shift_rmi<0x73, MRM6r, MRM6m, "vpsllq", X86vshli,
- VR512, v8i64, i512mem, memopv8i64>, EVEX_V512,
+ VR512, v8i64, i512mem, memopv8i64, VK8WM>, EVEX_V512,
EVEX_CD8<64, CD8VF>, VEX_W;
defm VPSLLQZ : avx512_shift_rrm<0xF3, "vpsllq", X86vshl,
- VR512, v8i64, v2i64, bc_v2i64>, EVEX_V512,
+ VR512, v8i64, v2i64, bc_v2i64, VK8WM>, EVEX_V512,
EVEX_CD8<64, CD8VQ>, VEX_W;
defm VPSRADZ : avx512_shift_rmi<0x72, MRM4r, MRM4m, "vpsrad", X86vsrai,
- VR512, v16i32, i512mem, memopv16i32>, EVEX_V512,
- EVEX_CD8<32, CD8VF>;
+ VR512, v16i32, i512mem, memopv16i32, VK16WM>,
+ EVEX_V512, EVEX_CD8<32, CD8VF>;
defm VPSRADZ : avx512_shift_rrm<0xE2, "vpsrad", X86vsra,
- VR512, v16i32, v4i32, bc_v4i32>, EVEX_V512,
+ VR512, v16i32, v4i32, bc_v4i32, VK16WM>, EVEX_V512,
EVEX_CD8<32, CD8VQ>;
defm VPSRAQZ : avx512_shift_rmi<0x72, MRM4r, MRM4m, "vpsraq", X86vsrai,
- VR512, v8i64, i512mem, memopv8i64>, EVEX_V512,
+ VR512, v8i64, i512mem, memopv8i64, VK8WM>, EVEX_V512,
EVEX_CD8<64, CD8VF>, VEX_W;
defm VPSRAQZ : avx512_shift_rrm<0xE2, "vpsraq", X86vsra,
- VR512, v8i64, v2i64, bc_v2i64>, EVEX_V512,
+ VR512, v8i64, v2i64, bc_v2i64, VK8WM>, EVEX_V512,
EVEX_CD8<64, CD8VQ>, VEX_W;
//===-------------------------------------------------------------------===//
diff --git a/utils/TableGen/X86RecognizableInstr.cpp b/utils/TableGen/X86RecognizableInstr.cpp
index 7962f9b983..d2675d7a62 100644
--- a/utils/TableGen/X86RecognizableInstr.cpp
+++ b/utils/TableGen/X86RecognizableInstr.cpp
@@ -818,17 +818,20 @@ void RecognizableInstr::emitInstructionSpecifier(DisassemblerTables &tables) {
case X86Local::MRM5r:
case X86Local::MRM6r:
case X86Local::MRM7r:
- // Operand 1 is a register operand in the R/M field.
- // Operand 2 (optional) is an immediate or relocation.
- // Operand 3 (optional) is an immediate.
- if (HasVEX_4VPrefix)
- assert(numPhysicalOperands <= 3 &&
- "Unexpected number of operands for MRMnRFrm with VEX_4V");
- else
- assert(numPhysicalOperands <= 3 &&
- "Unexpected number of operands for MRMnRFrm");
+ {
+ // Operand 1 is a register operand in the R/M field.
+ // Operand 2 (optional) is an immediate or relocation.
+ // Operand 3 (optional) is an immediate.
+ unsigned kOp = (HasEVEX_K) ? 1:0;
+ unsigned Op4v = (HasVEX_4VPrefix) ? 1:0;
+ if (numPhysicalOperands > 3 + kOp + Op4v)
+ llvm_unreachable("Unexpected number of operands for MRMnr");
+ }
if (HasVEX_4VPrefix)
HANDLE_OPERAND(vvvvRegister)
+
+ if (HasEVEX_K)
+ HANDLE_OPERAND(writemaskRegister)
HANDLE_OPTIONAL(rmRegister)
HANDLE_OPTIONAL(relocation)
HANDLE_OPTIONAL(immediate)
@@ -841,16 +844,19 @@ void RecognizableInstr::emitInstructionSpecifier(DisassemblerTables &tables) {
case X86Local::MRM5m:
case X86Local::MRM6m:
case X86Local::MRM7m:
- // Operand 1 is a memory operand (possibly SIB-extended)
- // Operand 2 (optional) is an immediate or relocation.
- if (HasVEX_4VPrefix)
- assert(numPhysicalOperands >= 2 && numPhysicalOperands <= 3 &&
- "Unexpected number of operands for MRMnMFrm");
- else
- assert(numPhysicalOperands >= 1 && numPhysicalOperands <= 2 &&
- "Unexpected number of operands for MRMnMFrm");
+ {
+ // Operand 1 is a memory operand (possibly SIB-extended)
+ // Operand 2 (optional) is an immediate or relocation.
+ unsigned kOp = (HasEVEX_K) ? 1:0;
+ unsigned Op4v = (HasVEX_4VPrefix) ? 1:0;
+ if (numPhysicalOperands < 1 + kOp + Op4v ||
+ numPhysicalOperands > 2 + kOp + Op4v)
+ llvm_unreachable("Unexpected number of operands for MRMnm");
+ }
if (HasVEX_4VPrefix)
HANDLE_OPERAND(vvvvRegister)
+ if (HasEVEX_K)
+ HANDLE_OPERAND(writemaskRegister)
HANDLE_OPERAND(memory)
HANDLE_OPTIONAL(relocation)
break;