{ { 0xf6 }, { 2, 2 }, T, R, pfx_66 }, /* adcx */
{ { 0xf6 }, { 2, 2 }, T, R, pfx_f3 }, /* adox */
{ { 0xf8 }, { 2, 2 }, F, W, pfx_66 }, /* movdir64b */
+ { { 0xf8 }, { 2, 2 }, F, W, pfx_f3 }, /* enqcmds */
+ { { 0xf8 }, { 2, 2 }, F, W, pfx_f2 }, /* enqcmd */
{ { 0xf9 }, { 2, 2 }, F, W }, /* movdiri */
};
#undef CND
$(call as-option-add,CFLAGS,CC,".equ \"x\"$$(comma)1",-DHAVE_AS_QUOTED_SYM)
$(call as-option-add,CFLAGS,CC,"invpcid (%rax)$$(comma)%rax",-DHAVE_AS_INVPCID)
$(call as-option-add,CFLAGS,CC,"movdiri %rax$$(comma)(%rax)",-DHAVE_AS_MOVDIR)
+$(call as-option-add,CFLAGS,CC,"enqcmd (%rax)$$(comma)%rax",-DHAVE_AS_ENQCMD)
# GAS's idea of true is -1. Clang's idea is 1
$(call as-option-add,CFLAGS,CC,\
} rmw;
enum {
blk_NONE,
+ blk_enqcmd,
blk_movdir,
} blk;
uint8_t modrm, modrm_mod, modrm_reg, modrm_rm;
uint64_t __attribute__ ((aligned(16))) xmm[2];
uint64_t __attribute__ ((aligned(32))) ymm[4];
uint64_t __attribute__ ((aligned(64))) zmm[8];
+ uint32_t data32[16];
} mmval_t;
/*
#define vcpu_has_rdpid() (ctxt->cpuid->feat.rdpid)
#define vcpu_has_movdiri() (ctxt->cpuid->feat.movdiri)
#define vcpu_has_movdir64b() (ctxt->cpuid->feat.movdir64b)
+#define vcpu_has_enqcmd() (ctxt->cpuid->feat.enqcmd)
#define vcpu_has_avx512_4vnniw() (ctxt->cpuid->feat.avx512_4vnniw)
#define vcpu_has_avx512_4fmaps() (ctxt->cpuid->feat.avx512_4fmaps)
#define vcpu_has_serialize() (ctxt->cpuid->feat.serialize)
state->simd_size = simd_none;
break;
+ case X86EMUL_OPC_F2(0x0f38, 0xf8): /* enqcmd r,m512 */
+ case X86EMUL_OPC_F3(0x0f38, 0xf8): /* enqcmds r,m512 */
+ host_and_vcpu_must_have(enqcmd);
+ generate_exception_if(ea.type != OP_MEM, EXC_UD);
+ generate_exception_if(vex.pfx != vex_f2 && !mode_ring0(), EXC_GP, 0);
+ src.val = truncate_ea(*dst.reg);
+ generate_exception_if(!is_aligned(x86_seg_es, src.val, 64, ctxt, ops),
+ EXC_GP, 0);
+ fail_if(!ops->blk);
+ BUILD_BUG_ON(sizeof(*mmvalp) < 64);
+ if ( (rc = ops->read(ea.mem.seg, ea.mem.off, mmvalp, 64,
+ ctxt)) != X86EMUL_OKAY )
+ goto done;
+ if ( vex.pfx == vex_f2 ) /* enqcmd */
+ {
+ fail_if(!ops->read_msr);
+ if ( (rc = ops->read_msr(MSR_PASID, &msr_val,
+ ctxt)) != X86EMUL_OKAY )
+ goto done;
+ generate_exception_if(!(msr_val & PASID_VALID), EXC_GP, 0);
+ mmvalp->data32[0] = MASK_EXTR(msr_val, PASID_PASID_MASK);
+ }
+ mmvalp->data32[0] &= ~0x7ff00000;
+ state->blk = blk_enqcmd;
+ if ( (rc = ops->blk(x86_seg_es, src.val, mmvalp, 64, &_regs.eflags,
+ state, ctxt)) != X86EMUL_OKAY )
+ goto done;
+ state->simd_size = simd_none;
+ break;
+
case X86EMUL_OPC(0x0f38, 0xf9): /* movdiri mem,r */
host_and_vcpu_must_have(movdiri);
generate_exception_if(dst.type != OP_MEM, EXC_UD);
{
switch ( state->blk )
{
+ bool zf;
+
/*
* Throughout this switch(), memory clobbers are used to compensate
* that other operands may not properly express the (full) memory
* ranges covered.
*/
+ case blk_enqcmd:
+ ASSERT(bytes == 64);
+ if ( ((unsigned long)ptr & 0x3f) )
+ {
+ ASSERT_UNREACHABLE();
+ return X86EMUL_UNHANDLEABLE;
+ }
+ *eflags &= ~EFLAGS_MASK;
+#ifdef HAVE_AS_ENQCMD
+ asm ( "enqcmds (%[src]), %[dst]" ASM_FLAG_OUT(, "; setz %[zf]")
+ : [zf] ASM_FLAG_OUT("=@ccz", "=qm") (zf)
+ : [src] "r" (data), [dst] "r" (ptr) : "memory" );
+#else
+ /* enqcmds (%rsi), %rdi */
+ asm ( ".byte 0xf3, 0x0f, 0x38, 0xf8, 0x3e"
+ ASM_FLAG_OUT(, "; setz %[zf]")
+ : [zf] ASM_FLAG_OUT("=@ccz", "=qm") (zf)
+ : "S" (data), "D" (ptr) : "memory" );
+#endif
+ if ( zf )
+ *eflags |= X86_EFLAGS_ZF;
+ break;
+
case blk_movdir:
switch ( bytes )
{
return !mode_64bit();
case X86EMUL_OPC_66(0x0f38, 0xf8): /* MOVDIR64B */
+ case X86EMUL_OPC_F2(0x0f38, 0xf8): /* ENQCMD */
+ case X86EMUL_OPC_F3(0x0f38, 0xf8): /* ENQCMDS */
return true;
}