* [PATCH 0/5] x86/disasm: deal with invalid uses of AVX512 masking
@ 2023-06-16 10:14 Jan Beulich
2023-06-16 10:15 ` [PATCH 1/5] x86: re-work EVEX-z-without-masking check Jan Beulich
` (4 more replies)
0 siblings, 5 replies; 6+ messages in thread
From: Jan Beulich @ 2023-06-16 10:14 UTC (permalink / raw)
To: Binutils; +Cc: H.J. Lu
So far we only deal with EVEX.z set when EVEX.aaa is zero, and even
that in a crude way.
1: re-work EVEX-z-without-masking check
2: flag EVEX.z set when destination is a mask register
3: flag EVEX.z set when destination is memory
4: flag EVEX masking when destination is GPR(-like)
5: flag bad EVEX masking for miscellaneous insns
Jan
^ permalink raw reply [flat|nested] 6+ messages in thread
* [PATCH 1/5] x86: re-work EVEX-z-without-masking check
2023-06-16 10:14 [PATCH 0/5] x86/disasm: deal with invalid uses of AVX512 masking Jan Beulich
@ 2023-06-16 10:15 ` Jan Beulich
2023-06-16 10:15 ` [PATCH 2/5] x86: flag EVEX.z set when destination is a mask register Jan Beulich
` (3 subsequent siblings)
4 siblings, 0 replies; 6+ messages in thread
From: Jan Beulich @ 2023-06-16 10:15 UTC (permalink / raw)
To: Binutils; +Cc: H.J. Lu
Rather than corrupting disassmbly altogether, flag EVEX.z set as bad
when masking isn't in effect in the first place at the time the
destination operand is actually processed.
--- a/gas/testsuite/gas/i386/avx512f-nondef.d
+++ b/gas/testsuite/gas/i386/avx512f-nondef.d
@@ -15,8 +15,7 @@ Disassembly of section .text:
[ ]*[a-f0-9]+: 62 f2 55 1f 3b f4 vpminud \{rn-bad\},%zmm4,%zmm5,%zmm6\{%k7\}
[ ]*[a-f0-9]+: 62 f2 7e 48 31 72 7f vpmovdb %zmm6,0x7f0\(%edx\)
[ ]*[a-f0-9]+: 62 f2 7e 58 31 72 7f vpmovdb %zmm6,0x7f0\(%edx\)\{bad\}
-[ ]*[a-f0-9]+: 62 f1 7c 88 58 \(bad\)
-[ ]*[a-f0-9]+: c3 ret
+[ ]*[a-f0-9]+: 62 f1 7c 88 58 c3 (\{evex\} )?vaddps %xmm3,%xmm0,%xmm0\{bad\}
[ ]*[a-f0-9]+: 62 f2 7d 4f 92 01 vgatherdps \(bad\),%zmm0\{%k7\}
[ ]*[a-f0-9]+: 67 62 f2 7d 4f 92 01 addr16 vgatherdps \(bad\),%zmm0\{%k7\}
[ ]*[a-f0-9]+: 62 f2 7d cf 92 04 08 vgatherdps \(%eax,%zmm1(,1)?\),%zmm0\{%k7\}\{z\}/\(bad\)
--- a/opcodes/i386-dis.c
+++ b/opcodes/i386-dis.c
@@ -9905,9 +9905,15 @@ print_insn (bfd_vma pc, disassemble_info
oappend (&ins, "{");
oappend_register (&ins, reg_name);
oappend (&ins, "}");
+
+ if (ins.vex.zeroing)
+ oappend (&ins, "{z}");
+ }
+ else if (ins.vex.zeroing)
+ {
+ oappend (&ins, "{bad}");
+ continue;
}
- if (ins.vex.zeroing)
- oappend (&ins, "{z}");
/* S/G insns require a mask and don't allow
zeroing-masking. */
@@ -9982,14 +9988,6 @@ print_insn (bfd_vma pc, disassemble_info
{
i386_dis_printf (info, dis_style_text, "(bad)");
ret = ins.end_codep - priv.the_buffer;
- goto out;
- }
-
- /* If EVEX.z is set, there must be an actual mask register in use. */
- if (ins.vex.zeroing && ins.vex.mask_register_specifier == 0)
- {
- i386_dis_printf (info, dis_style_text, "(bad)");
- ret = ins.end_codep - priv.the_buffer;
goto out;
}
^ permalink raw reply [flat|nested] 6+ messages in thread
* [PATCH 2/5] x86: flag EVEX.z set when destination is a mask register
2023-06-16 10:14 [PATCH 0/5] x86/disasm: deal with invalid uses of AVX512 masking Jan Beulich
2023-06-16 10:15 ` [PATCH 1/5] x86: re-work EVEX-z-without-masking check Jan Beulich
@ 2023-06-16 10:15 ` Jan Beulich
2023-06-16 10:16 ` [PATCH 3/5] x86: flag EVEX.z set when destination is memory Jan Beulich
` (2 subsequent siblings)
4 siblings, 0 replies; 6+ messages in thread
From: Jan Beulich @ 2023-06-16 10:15 UTC (permalink / raw)
To: Binutils; +Cc: H.J. Lu
While only zeroing-masking is possible in this case, this still requires
EVEX.z to be clear. Introduce a "global" flag right here, to be re-used
by checks which need to live in specific operand handlers.
--- a/gas/testsuite/gas/i386/avx512f-nondef.d
+++ b/gas/testsuite/gas/i386/avx512f-nondef.d
@@ -20,4 +20,5 @@ Disassembly of section .text:
[ ]*[a-f0-9]+: 67 62 f2 7d 4f 92 01 addr16 vgatherdps \(bad\),%zmm0\{%k7\}
[ ]*[a-f0-9]+: 62 f2 7d cf 92 04 08 vgatherdps \(%eax,%zmm1(,1)?\),%zmm0\{%k7\}\{z\}/\(bad\)
[ ]*[a-f0-9]+: 62 f2 7d 48 92 04 08 vgatherdps \(%eax,%zmm1(,1)?\),%zmm0/\(bad\)
+[ ]*[a-f0-9]+: 62 f1 7c cf c2 c0 00 vcmpeqps %zmm0,%zmm0,%k0\{%k7\}\{z\}/\(bad\)
#pass
--- a/gas/testsuite/gas/i386/avx512f-nondef.s
+++ b/gas/testsuite/gas/i386/avx512f-nondef.s
@@ -26,3 +26,6 @@
.insn EVEX.66.0F38.W0 0x92, (%eax,%zmm1), %zmm0{%k7}{z}
# vgatherdps (%eax,%zmm1), %zmm0 # without actual mask register
.insn EVEX.66.0F38.W0 0x92, (%eax,%zmm1), %zmm0
+
+ # vcmpeqps %zmm0, %zmm0, %k0{%k7} with EVEX.z set
+ .insn EVEX.0f 0xc2, $0, %zmm0, %zmm0, %k0{%k7}{z}
--- a/opcodes/i386-dis.c
+++ b/opcodes/i386-dis.c
@@ -219,6 +219,9 @@ struct instr_info
bool two_source_ops;
+ /* Record whether EVEX masking is used incorrectly. */
+ bool illegal_masking;
+
unsigned char op_ad;
signed char op_index[MAX_OPERANDS];
bool op_riprel[MAX_OPERANDS];
@@ -9915,12 +9918,21 @@ print_insn (bfd_vma pc, disassemble_info
continue;
}
+ /* Instructions with a mask register destination allow for
+ zeroing-masking only (if any masking at all), which is
+ _not_ expressed by EVEX.z. */
+ if (ins.vex.zeroing && dp->op[0].bytemode == mask_mode)
+ ins.illegal_masking = true;
+
/* S/G insns require a mask and don't allow
zeroing-masking. */
if ((dp->op[0].bytemode == vex_vsib_d_w_dq_mode
|| dp->op[0].bytemode == vex_vsib_q_w_dq_mode)
&& (ins.vex.mask_register_specifier == 0
|| ins.vex.zeroing))
+ ins.illegal_masking = true;
+
+ if (ins.illegal_masking)
oappend (&ins, "/(bad)");
}
}
^ permalink raw reply [flat|nested] 6+ messages in thread
* [PATCH 3/5] x86: flag EVEX.z set when destination is memory
2023-06-16 10:14 [PATCH 0/5] x86/disasm: deal with invalid uses of AVX512 masking Jan Beulich
2023-06-16 10:15 ` [PATCH 1/5] x86: re-work EVEX-z-without-masking check Jan Beulich
2023-06-16 10:15 ` [PATCH 2/5] x86: flag EVEX.z set when destination is a mask register Jan Beulich
@ 2023-06-16 10:16 ` Jan Beulich
2023-06-16 10:16 ` [PATCH 4/5] x86: flag EVEX masking when destination is GPR(-like) Jan Beulich
2023-06-16 10:17 ` [PATCH 5/5] x86: flag bad EVEX masking for miscellaneous insns Jan Beulich
4 siblings, 0 replies; 6+ messages in thread
From: Jan Beulich @ 2023-06-16 10:16 UTC (permalink / raw)
To: Binutils; +Cc: H.J. Lu
Zeroing-masking is not permitted in this case. See the code comment for
how this is being dealt with.
--- a/gas/testsuite/gas/i386/avx512f-nondef.d
+++ b/gas/testsuite/gas/i386/avx512f-nondef.d
@@ -21,4 +21,5 @@ Disassembly of section .text:
[ ]*[a-f0-9]+: 62 f2 7d cf 92 04 08 vgatherdps \(%eax,%zmm1(,1)?\),%zmm0\{%k7\}\{z\}/\(bad\)
[ ]*[a-f0-9]+: 62 f2 7d 48 92 04 08 vgatherdps \(%eax,%zmm1(,1)?\),%zmm0/\(bad\)
[ ]*[a-f0-9]+: 62 f1 7c cf c2 c0 00 vcmpeqps %zmm0,%zmm0,%k0\{%k7\}\{z\}/\(bad\)
+[ ]*[a-f0-9]+: 62 f1 7c cf 29 00 vmovaps %zmm0,\(%eax\)\{%k7\}\{z\}/\(bad\)
#pass
--- a/gas/testsuite/gas/i386/avx512f-nondef.s
+++ b/gas/testsuite/gas/i386/avx512f-nondef.s
@@ -29,3 +29,6 @@
# vcmpeqps %zmm0, %zmm0, %k0{%k7} with EVEX.z set
.insn EVEX.0f 0xc2, $0, %zmm0, %zmm0, %k0{%k7}{z}
+
+ # vmovaps %zmm0, (%eax){%k7} with EVEX.z set
+ .insn EVEX.0f 0x29, %zmm0, (%eax){%k7}{z}
--- a/opcodes/i386-dis.c
+++ b/opcodes/i386-dis.c
@@ -11854,6 +11854,13 @@ OP_E_memory (instr_info *ins, int bytemo
if (ins->vex.evex)
{
+
+ /* Zeroing-masking is invalid for memory destinations. Set the flag
+ uniformly, as the consumer will inspect it only for the destination
+ operand. */
+ if (ins->vex.zeroing)
+ ins->illegal_masking = true;
+
switch (bytemode)
{
case dw_mode:
^ permalink raw reply [flat|nested] 6+ messages in thread
* [PATCH 4/5] x86: flag EVEX masking when destination is GPR(-like)
2023-06-16 10:14 [PATCH 0/5] x86/disasm: deal with invalid uses of AVX512 masking Jan Beulich
` (2 preceding siblings ...)
2023-06-16 10:16 ` [PATCH 3/5] x86: flag EVEX.z set when destination is memory Jan Beulich
@ 2023-06-16 10:16 ` Jan Beulich
2023-06-16 10:17 ` [PATCH 5/5] x86: flag bad EVEX masking for miscellaneous insns Jan Beulich
4 siblings, 0 replies; 6+ messages in thread
From: Jan Beulich @ 2023-06-16 10:16 UTC (permalink / raw)
To: Binutils; +Cc: H.J. Lu
Masking is not permitted in this case. See the code comment for how this
is being dealt with.
To avoid excess special casing of modes, have OP_M() call OP_E_memory()
directly.
--- a/opcodes/i386-dis.c
+++ b/opcodes/i386-dis.c
@@ -11640,6 +11640,11 @@ print_register (instr_info *ins, unsigne
{
const char (*names)[8];
+ /* Masking is invalid for insns with GPR destination. Set the flag uniformly,
+ as the consumer will inspect it only for the destination operand. */
+ if (bytemode != mask_mode && ins->vex.mask_register_specifier)
+ ins->illegal_masking = true;
+
USED_REX (rexmask);
if (ins->rex & rexmask)
reg += 8;
@@ -12374,6 +12379,12 @@ OP_E (instr_info *ins, int bytemode, int
return true;
}
+ /* Masking is invalid for insns with GPR-like memory destination. Set the
+ flag uniformly, as the consumer will inspect it only for the destination
+ operand. */
+ if (ins->vex.mask_register_specifier)
+ ins->illegal_masking = true;
+
return OP_E_memory (ins, bytemode, sizeflag);
}
@@ -13156,10 +13167,14 @@ OP_XS (instr_info *ins, int bytemode, in
static bool
OP_M (instr_info *ins, int bytemode, int sizeflag)
{
+ /* Skip mod/rm byte. */
+ MODRM_CHECK;
+ ins->codep++;
+
if (ins->modrm.mod == 3)
/* bad bound,lea,lds,les,lfs,lgs,lss,cmpxchg8b,vmptrst modrm */
return BadOp (ins);
- return OP_E (ins, bytemode, sizeflag);
+ return OP_E_memory (ins, bytemode, sizeflag);
}
static bool
--- a/gas/testsuite/gas/i386/avx512f-nondef.d
+++ b/gas/testsuite/gas/i386/avx512f-nondef.d
@@ -22,4 +22,6 @@ Disassembly of section .text:
[ ]*[a-f0-9]+: 62 f2 7d 48 92 04 08 vgatherdps \(%eax,%zmm1(,1)?\),%zmm0/\(bad\)
[ ]*[a-f0-9]+: 62 f1 7c cf c2 c0 00 vcmpeqps %zmm0,%zmm0,%k0\{%k7\}\{z\}/\(bad\)
[ ]*[a-f0-9]+: 62 f1 7c cf 29 00 vmovaps %zmm0,\(%eax\)\{%k7\}\{z\}/\(bad\)
+[ ]*[a-f0-9]+: 62 f1 7d 0a c5 c8 00 vpextrw \$(0x)?0,%xmm0,%ecx\{%k2\}/\(bad\)
+[ ]*[a-f0-9]+: 62 f3 7d 0a 16 01 00 vpextrd \$(0x)?0,%xmm0,\(%ecx\)\{%k2\}/\(bad\)
#pass
--- a/gas/testsuite/gas/i386/avx512f-nondef.s
+++ b/gas/testsuite/gas/i386/avx512f-nondef.s
@@ -32,3 +32,9 @@
# vmovaps %zmm0, (%eax){%k7} with EVEX.z set
.insn EVEX.0f 0x29, %zmm0, (%eax){%k7}{z}
+
+ # vpextrw $0, %xmm0, %ecx with non-zero EVEX.aaa
+ .insn EVEX.66.0f 0xc5, $0, %xmm0, %ecx{%k2}
+
+ # vpextrd $0, %xmm0, (%ecx) with non-zero EVEX.aaa
+ .insn EVEX.66.0f3a 0x16, $0, %xmm0, (%ecx){%k2}
^ permalink raw reply [flat|nested] 6+ messages in thread
* [PATCH 5/5] x86: flag bad EVEX masking for miscellaneous insns
2023-06-16 10:14 [PATCH 0/5] x86/disasm: deal with invalid uses of AVX512 masking Jan Beulich
` (3 preceding siblings ...)
2023-06-16 10:16 ` [PATCH 4/5] x86: flag EVEX masking when destination is GPR(-like) Jan Beulich
@ 2023-06-16 10:17 ` Jan Beulich
4 siblings, 0 replies; 6+ messages in thread
From: Jan Beulich @ 2023-06-16 10:17 UTC (permalink / raw)
To: Binutils; +Cc: H.J. Lu
Masking is not permitted for certain further insns, not falling in any
of the earlier categories. Introduce the Y macro (not expanding to any
output) to flag such cases.
Note that in a few cases entries already covered otherwise are converted
as well, to continue to allow sharing of the string literals.
--- a/gas/testsuite/gas/i386/avx512f-nondef.d
+++ b/gas/testsuite/gas/i386/avx512f-nondef.d
@@ -24,4 +24,5 @@ Disassembly of section .text:
[ ]*[a-f0-9]+: 62 f1 7c cf 29 00 vmovaps %zmm0,\(%eax\)\{%k7\}\{z\}/\(bad\)
[ ]*[a-f0-9]+: 62 f1 7d 0a c5 c8 00 vpextrw \$(0x)?0,%xmm0,%ecx\{%k2\}/\(bad\)
[ ]*[a-f0-9]+: 62 f3 7d 0a 16 01 00 vpextrd \$(0x)?0,%xmm0,\(%ecx\)\{%k2\}/\(bad\)
+[ ]*[a-f0-9]+: 62 f2 7d 4a 2a 01 vmovntdqa \(%ecx\),%zmm0\{%k2\}/\(bad\)
#pass
--- a/gas/testsuite/gas/i386/avx512f-nondef.s
+++ b/gas/testsuite/gas/i386/avx512f-nondef.s
@@ -38,3 +38,6 @@
# vpextrd $0, %xmm0, (%ecx) with non-zero EVEX.aaa
.insn EVEX.66.0f3a 0x16, $0, %xmm0, (%ecx){%k2}
+
+ # vmovntdqa (%ecx), %zmm0 with non-zero EVEX.aaa
+ .insn EVEX.66.0f38.W0 0x2a, (%ecx), %zmm0{%k2}
--- a/opcodes/i386-dis.c
+++ b/opcodes/i386-dis.c
@@ -1806,7 +1806,7 @@ struct dis386 {
'V' unused.
'W' => print 'b', 'w' or 'l' ('d' in Intel mode)
'X' => print 's', 'd' depending on data16 prefix (for XMM)
- 'Y' unused.
+ 'Y' => no output, mark EVEX.aaa != 0 as bad.
'Z' => print 'q' in 64bit mode and 'l' otherwise, if suffix_always is true.
'!' => change condition from true to false or from false to true.
'%' => add 1 upper case letter to the macro.
@@ -1828,6 +1828,8 @@ struct dis386 {
"XV" => print "{vex} " pseudo prefix
"XE" => print "{evex} " pseudo prefix if no EVEX-specific functionality is
is used by an EVEX-encoded (AVX512VL) instruction.
+ "YK" keep unused, to avoid ambiguity with the combined use of Y and K.
+ "YX" keep unused, to avoid ambiguity with the combined use of Y and X.
"LQ" => print 'l' ('d' in Intel mode) or 'q' for memory operand, cond
being false, or no operand at all in 64bit mode, or if suffix_always
is true.
@@ -3719,9 +3721,9 @@ static const struct dis386 prefix_table[
/* PREFIX_VEX_0F2A */
{
{ Bad_Opcode },
- { "%XEvcvtsi2ss{%LQ|}", { XMScalar, VexScalar, EXxEVexR, Edq }, 0 },
+ { "%XEvcvtsi2ssY{%LQ|}", { XMScalar, VexScalar, EXxEVexR, Edq }, 0 },
{ Bad_Opcode },
- { "%XEvcvtsi2sd{%LQ|}", { XMScalar, VexScalar, EXxEVexR64, Edq }, 0 },
+ { "%XEvcvtsi2sdY{%LQ|}", { XMScalar, VexScalar, EXxEVexR64, Edq }, 0 },
},
/* PREFIX_VEX_0F2C */
@@ -3742,16 +3744,16 @@ static const struct dis386 prefix_table[
/* PREFIX_VEX_0F2E */
{
- { "%XEvucomisX", { XMScalar, EXd, EXxEVexS }, 0 },
+ { "%XEvucomisYX", { XMScalar, EXd, EXxEVexS }, 0 },
{ Bad_Opcode },
- { "%XEvucomisX", { XMScalar, EXq, EXxEVexS }, 0 },
+ { "%XEvucomisYX", { XMScalar, EXq, EXxEVexS }, 0 },
},
/* PREFIX_VEX_0F2F */
{
- { "%XEvcomisX", { XMScalar, EXd, EXxEVexS }, 0 },
+ { "%XEvcomisYX", { XMScalar, EXd, EXxEVexS }, 0 },
{ Bad_Opcode },
- { "%XEvcomisX", { XMScalar, EXq, EXxEVexS }, 0 },
+ { "%XEvcomisYX", { XMScalar, EXq, EXxEVexS }, 0 },
},
/* PREFIX_VEX_0F41_L_1_M_1_W_0 */
@@ -7004,32 +7006,32 @@ static const struct dis386 vex_table[][2
static const struct dis386 vex_len_table[][2] = {
/* VEX_LEN_0F12_P_0_M_0 / VEX_LEN_0F12_P_2_M_0 */
{
- { "%XEvmovlpX", { XM, Vex, EXq }, 0 },
+ { "%XEvmovlpYX", { XM, Vex, EXq }, 0 },
},
/* VEX_LEN_0F12_P_0_M_1 */
{
- { "%XEvmovhlp%XS", { XM, Vex, EXq }, 0 },
+ { "%XEvmovhlpY%XS", { XM, Vex, EXq }, 0 },
},
/* VEX_LEN_0F13_M_0 */
{
- { "%XEvmovlpX", { EXq, XM }, PREFIX_OPCODE },
+ { "%XEvmovlpYX", { EXq, XM }, PREFIX_OPCODE },
},
/* VEX_LEN_0F16_P_0_M_0 / VEX_LEN_0F16_P_2_M_0 */
{
- { "%XEvmovhpX", { XM, Vex, EXq }, 0 },
+ { "%XEvmovhpYX", { XM, Vex, EXq }, 0 },
},
/* VEX_LEN_0F16_P_0_M_1 */
{
- { "%XEvmovlhp%XS", { XM, Vex, EXq }, 0 },
+ { "%XEvmovlhpY%XS", { XM, Vex, EXq }, 0 },
},
/* VEX_LEN_0F17_M_0 */
{
- { "%XEvmovhpX", { EXq, XM }, PREFIX_OPCODE },
+ { "%XEvmovhpYX", { EXq, XM }, PREFIX_OPCODE },
},
/* VEX_LEN_0F41 */
@@ -7081,7 +7083,7 @@ static const struct dis386 vex_len_table
/* VEX_LEN_0F6E */
{
- { "%XEvmovK", { XMScalar, Edq }, PREFIX_DATA },
+ { "%XEvmovYK", { XMScalar, Edq }, PREFIX_DATA },
},
/* VEX_LEN_0F77 */
@@ -7092,7 +7094,7 @@ static const struct dis386 vex_len_table
/* VEX_LEN_0F7E_P_1 */
{
- { "%XEvmovq", { XMScalar, EXq }, 0 },
+ { "%XEvmovqY", { XMScalar, EXq }, 0 },
},
/* VEX_LEN_0F7E_P_2 */
@@ -7142,7 +7144,7 @@ static const struct dis386 vex_len_table
/* VEX_LEN_0FC4 */
{
- { "%XEvpinsrw", { XM, Vex, Edw, Ib }, PREFIX_DATA },
+ { "%XEvpinsrwY", { XM, Vex, Edw, Ib }, PREFIX_DATA },
},
/* VEX_LEN_0FC5 */
@@ -7152,7 +7154,7 @@ static const struct dis386 vex_len_table
/* VEX_LEN_0FD6 */
{
- { "%XEvmovq", { EXqS, XMScalar }, PREFIX_DATA },
+ { "%XEvmovqY", { EXqS, XMScalar }, PREFIX_DATA },
},
/* VEX_LEN_0FF7 */
@@ -7302,17 +7304,17 @@ static const struct dis386 vex_len_table
/* VEX_LEN_0F3A20 */
{
- { "%XEvpinsrb", { XM, Vex, Edb, Ib }, PREFIX_DATA },
+ { "%XEvpinsrbY", { XM, Vex, Edb, Ib }, PREFIX_DATA },
},
/* VEX_LEN_0F3A21 */
{
- { "%XEvinsertps", { XM, Vex, EXd, Ib }, PREFIX_DATA },
+ { "%XEvinsertpsY", { XM, Vex, EXd, Ib }, PREFIX_DATA },
},
/* VEX_LEN_0F3A22 */
{
- { "%XEvpinsrK", { XM, Vex, Edq, Ib }, PREFIX_DATA },
+ { "%XEvpinsrYK", { XM, Vex, Edq, Ib }, PREFIX_DATA },
},
/* VEX_LEN_0F3A30 */
@@ -11125,7 +11127,12 @@ putop (instr_info *ins, const char *in_t
*ins->obufp++ = 's';
break;
case 'Y':
- if (l == 1 && last[0] == 'X')
+ if (l == 0)
+ {
+ if (ins->vex.mask_register_specifier)
+ ins->illegal_masking = true;
+ }
+ else if (l == 1 && last[0] == 'X')
{
if (!ins->need_vex)
abort ();
--- a/opcodes/i386-dis-evex.h
+++ b/opcodes/i386-dis-evex.h
@@ -278,7 +278,7 @@ static const struct dis386 evex_table[][
{ VEX_W_TABLE (EVEX_W_0FF3) },
{ VEX_W_TABLE (EVEX_W_0FF4) },
{ "%XEvpmaddwd", { XM, Vex, EXx }, PREFIX_DATA },
- { "%XEvpsadbw", { XM, Vex, EXx }, PREFIX_DATA },
+ { "%XEvpsadbwY", { XM, Vex, EXx }, PREFIX_DATA },
{ Bad_Opcode },
/* F8 */
{ "%XEvpsubb", { XM, Vex, EXx }, PREFIX_DATA },
@@ -540,10 +540,10 @@ static const struct dis386 evex_table[][
{ Bad_Opcode },
{ Bad_Opcode },
{ Bad_Opcode },
- { "%XEvaesenc", { XM, Vex, EXx }, PREFIX_DATA },
- { "%XEvaesenclast", { XM, Vex, EXx }, PREFIX_DATA },
- { "%XEvaesdec", { XM, Vex, EXx }, PREFIX_DATA },
- { "%XEvaesdeclast", { XM, Vex, EXx }, PREFIX_DATA },
+ { "%XEvaesencY", { XM, Vex, EXx }, PREFIX_DATA },
+ { "%XEvaesenclastY", { XM, Vex, EXx }, PREFIX_DATA },
+ { "%XEvaesdecY", { XM, Vex, EXx }, PREFIX_DATA },
+ { "%XEvaesdeclastY", { XM, Vex, EXx }, PREFIX_DATA },
/* E0 */
{ Bad_Opcode },
{ Bad_Opcode },
@@ -660,7 +660,7 @@ static const struct dis386 evex_table[][
{ Bad_Opcode },
{ VEX_W_TABLE (EVEX_W_0F3A42) },
{ EVEX_LEN_TABLE (EVEX_LEN_0F3A43) },
- { "%XEvpclmulqdq", { XM, Vex, EXx, PCLMUL }, PREFIX_DATA },
+ { "%XEvpclmulqdqY", { XM, Vex, EXx, PCLMUL }, PREFIX_DATA },
{ Bad_Opcode },
{ Bad_Opcode },
{ Bad_Opcode },
@@ -998,7 +998,7 @@ static const struct dis386 evex_table[][
{ Bad_Opcode },
{ Bad_Opcode },
{ Bad_Opcode },
- { "vmovw", { XMScalar, Edw }, PREFIX_DATA },
+ { "vmovwY", { XMScalar, Edw }, PREFIX_DATA },
{ Bad_Opcode },
/* 70 */
{ Bad_Opcode },
--- a/opcodes/i386-dis-evex-mod.h
+++ b/opcodes/i386-dis-evex-mod.h
@@ -9,22 +9,22 @@
/* MOD_EVEX_0F3828_P_1 */
{
{ Bad_Opcode },
- { "vpmovm2%BW", { XM, MaskE }, 0 },
+ { "vpmovm2Y%BW", { XM, MaskE }, 0 },
},
/* MOD_EVEX_0F382A_P_1_W_1 */
{
{ Bad_Opcode },
- { "vpbroadcastmb2q", { XM, MaskE }, 0 },
+ { "vpbroadcastmb2qY", { XM, MaskE }, 0 },
},
/* MOD_EVEX_0F3838_P_1 */
{
{ Bad_Opcode },
- { "vpmovm2%DQ", { XM, MaskE }, 0 },
+ { "vpmovm2Y%DQ", { XM, MaskE }, 0 },
},
/* MOD_EVEX_0F383A_P_1_W_0 */
{
{ Bad_Opcode },
- { "vpbroadcastmw2d", { XM, MaskE }, 0 },
+ { "vpbroadcastmw2dY", { XM, MaskE }, 0 },
},
/* MOD_EVEX_0F385A */
{
--- a/opcodes/i386-dis-evex-prefix.h
+++ b/opcodes/i386-dis-evex-prefix.h
@@ -42,9 +42,9 @@
/* PREFIX_EVEX_0F7B */
{
{ Bad_Opcode },
- { "vcvtusi2ss{%LQ|}", { XMScalar, VexScalar, EXxEVexR, Edq }, 0 },
+ { "vcvtusi2ssY{%LQ|}", { XMScalar, VexScalar, EXxEVexR, Edq }, 0 },
{ VEX_W_TABLE (EVEX_W_0F7B_P_2) },
- { "vcvtusi2sd{%LQ|}", { XMScalar, VexScalar, EXxEVexR64, Edq }, 0 },
+ { "vcvtusi2sdY{%LQ|}", { XMScalar, VexScalar, EXxEVexR64, Edq }, 0 },
},
/* PREFIX_EVEX_0F7E */
{
@@ -166,7 +166,7 @@
/* PREFIX_EVEX_0F3829 */
{
{ Bad_Opcode },
- { "vpmov%BW2m", { MaskG, EXx }, 0 },
+ { "vpmov%BW2mY", { MaskG, EXx }, 0 },
{ VEX_W_TABLE (EVEX_W_0F3829_P_2) },
},
/* PREFIX_EVEX_0F382A */
@@ -220,7 +220,7 @@
/* PREFIX_EVEX_0F3839 */
{
{ Bad_Opcode },
- { "vpmov%DQ2m", { MaskG, EXx }, 0 },
+ { "vpmov%DQ2mY", { MaskG, EXx }, 0 },
{ "%XEvpmins%DQ", { XM, Vex, EXx }, 0 },
},
/* PREFIX_EVEX_0F383A */
@@ -248,7 +248,7 @@
{ Bad_Opcode },
{ Bad_Opcode },
{ Bad_Opcode },
- { "vp2intersect%DQ", { MaskG, Vex, EXx, EXxEVexS }, 0 },
+ { "vp2intersectY%DQ", { MaskG, Vex, EXx, EXxEVexS }, 0 },
},
/* PREFIX_EVEX_0F3872 */
{
@@ -357,7 +357,7 @@
/* PREFIX_EVEX_MAP5_2A */
{
{ Bad_Opcode },
- { "vcvtsi2sh{%LQ|}", { XMScalar, VexScalar, EXxEVexR, Edq }, 0 },
+ { "vcvtsi2shY{%LQ|}", { XMScalar, VexScalar, EXxEVexR, Edq }, 0 },
},
/* PREFIX_EVEX_MAP5_2C */
{
@@ -371,11 +371,11 @@
},
/* PREFIX_EVEX_MAP5_2E */
{
- { "vucomis%XH", { XMScalar, EXw, EXxEVexS }, 0 },
+ { "vucomisY%XH", { XMScalar, EXw, EXxEVexS }, 0 },
},
/* PREFIX_EVEX_MAP5_2F */
{
- { "vcomis%XH", { XMScalar, EXw, EXxEVexS }, 0 },
+ { "vcomisY%XH", { XMScalar, EXw, EXxEVexS }, 0 },
},
/* PREFIX_EVEX_MAP5_51 */
{
@@ -447,7 +447,7 @@
/* PREFIX_EVEX_MAP5_7B */
{
{ Bad_Opcode },
- { "vcvtusi2sh{%LQ|}", { XMScalar, VexScalar, EXxEVexR, Edq }, 0 },
+ { "vcvtusi2shY{%LQ|}", { XMScalar, VexScalar, EXxEVexR, Edq }, 0 },
{ "vcvtp%XH2qq", { XM, EXxmmqdh, EXxEVexR }, 0 },
},
/* PREFIX_EVEX_MAP5_7C */
--- a/opcodes/i386-dis-evex-w.h
+++ b/opcodes/i386-dis-evex-w.h
@@ -293,7 +293,7 @@
},
/* EVEX_W_0F382A_P_2 */
{
- { "%XEvmovntdqa", { XM, EXEvexXNoBcst }, 0 },
+ { "%XEvmovntdqaY", { XM, EXEvexXNoBcst }, 0 },
},
/* EVEX_W_0F382B */
{
--- a/opcodes/i386-dis-evex-reg.h
+++ b/opcodes/i386-dis-evex-reg.h
@@ -23,11 +23,11 @@
{ Bad_Opcode },
{ Bad_Opcode },
{ VEX_W_TABLE (EVEX_W_0F73_R_2) },
- { "%XEvpsrldq", { Vex, EXx, Ib }, PREFIX_DATA },
+ { "%XEvpsrldqY", { Vex, EXx, Ib }, PREFIX_DATA },
{ Bad_Opcode },
{ Bad_Opcode },
{ VEX_W_TABLE (EVEX_W_0F73_R_6) },
- { "%XEvpslldq", { Vex, EXx, Ib }, PREFIX_DATA },
+ { "%XEvpslldqY", { Vex, EXx, Ib }, PREFIX_DATA },
},
/* REG_EVEX_0F38C6_M_0_L_2 */
{
^ permalink raw reply [flat|nested] 6+ messages in thread
end of thread, other threads:[~2023-06-16 10:17 UTC | newest]
Thread overview: 6+ messages (download: mbox.gz / follow: Atom feed)
-- links below jump to the message on this page --
2023-06-16 10:14 [PATCH 0/5] x86/disasm: deal with invalid uses of AVX512 masking Jan Beulich
2023-06-16 10:15 ` [PATCH 1/5] x86: re-work EVEX-z-without-masking check Jan Beulich
2023-06-16 10:15 ` [PATCH 2/5] x86: flag EVEX.z set when destination is a mask register Jan Beulich
2023-06-16 10:16 ` [PATCH 3/5] x86: flag EVEX.z set when destination is memory Jan Beulich
2023-06-16 10:16 ` [PATCH 4/5] x86: flag EVEX masking when destination is GPR(-like) Jan Beulich
2023-06-16 10:17 ` [PATCH 5/5] x86: flag bad EVEX masking for miscellaneous insns Jan Beulich
This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox;
as well as URLs for read-only IMAP folder(s) and NNTP newsgroup(s).