opcode_desc_t desc;
union vex vex;
union evex evex;
- int override_seg;
/*
* Data operand effective address (usually computed from ModRM).
#define lock_prefix (state->lock_prefix)
#define vex (state->vex)
#define evex (state->evex)
-#define override_seg (state->override_seg)
#define ea (state->ea)
static int
case 0xa0: case 0xa1: /* mov mem.offs,{%al,%ax,%eax,%rax} */
case 0xa2: case 0xa3: /* mov {%al,%ax,%eax,%rax},mem.offs */
/* Source EA is not encoded via ModRM. */
+ ea.type = OP_MEM;
ea.mem.off = insn_fetch_bytes(ad_bytes);
break;
{
uint8_t b, d, sib, sib_index, sib_base;
unsigned int def_op_bytes, def_ad_bytes, opcode;
+ enum x86_segment override_seg = x86_seg_none;
int rc = X86EMUL_OKAY;
memset(state, 0, sizeof(*state));
- override_seg = -1;
- ea.type = OP_MEM;
+ ea.type = OP_NONE;
ea.mem.seg = x86_seg_ds;
ea.reg = PTR_POISON;
state->regs = ctxt->regs;
else if ( ad_bytes == 2 )
{
/* 16-bit ModR/M decode. */
+ ea.type = OP_MEM;
switch ( modrm_rm )
{
case 0:
else
{
/* 32/64-bit ModR/M decode. */
+ ea.type = OP_MEM;
if ( modrm_rm == 4 )
{
sib = insn_fetch_type(uint8_t);
}
}
- if ( override_seg != -1 && ea.type == OP_MEM )
+ if ( override_seg != x86_seg_none )
ea.mem.seg = override_seg;
/* Fetch the immediate operand, if present. */
generate_exception_if(limit < sizeof(long) ||
(limit & (limit - 1)), EXC_UD);
base &= ~(limit - 1);
- if ( override_seg == -1 )
- override_seg = x86_seg_ds;
if ( ops->rep_stos )
{
unsigned long nr_reps = limit / sizeof(zero);
- rc = ops->rep_stos(&zero, override_seg, base, sizeof(zero),
+ rc = ops->rep_stos(&zero, ea.mem.seg, base, sizeof(zero),
&nr_reps, ctxt);
if ( rc == X86EMUL_OKAY )
{
}
while ( limit )
{
- rc = ops->write(override_seg, base, &zero, sizeof(zero), ctxt);
+ rc = ops->write(ea.mem.seg, base, &zero, sizeof(zero), ctxt);
if ( rc != X86EMUL_OKAY )
goto done;
base += sizeof(zero);
#undef rex_prefix
#undef lock_prefix
#undef vex
-#undef override_seg
#undef ea
static void __init __maybe_unused build_assertions(void)