From: Wei Liu <wei.liu2@citrix.com>
To: Xen-devel <xen-devel@lists.xenproject.org>
Cc: Andrew Cooper <andrew.cooper3@citrix.com>,
Wei Liu <wei.liu2@citrix.com>, Jan Beulich <jbeulich@suse.com>
Subject: [PATCH v2 09/12] x86emul: use TRAP definitions in x86-defns.h
Date: Tue, 31 Jan 2017 11:08:06 +0000 [thread overview]
Message-ID: <20170131110809.30001-10-wei.liu2@citrix.com> (raw)
In-Reply-To: <20170131110809.30001-1-wei.liu2@citrix.com>
And remove duplicates.
No functional change.
Signed-off-by: Wei Liu <wei.liu2@citrix.com>
---
Cc: Jan Beulich <jbeulich@suse.com>
Cc: Andrew Cooper <andrew.cooper3@citrix.com>
---
xen/arch/x86/x86_emulate/x86_emulate.c | 305 +++++++++++++++------------------
1 file changed, 142 insertions(+), 163 deletions(-)
diff --git a/xen/arch/x86/x86_emulate/x86_emulate.c b/xen/arch/x86/x86_emulate/x86_emulate.c
index d09b861622..ac86de7b5f 100644
--- a/xen/arch/x86/x86_emulate/x86_emulate.c
+++ b/xen/arch/x86/x86_emulate/x86_emulate.c
@@ -421,27 +421,6 @@ typedef union {
/* MXCSR bit definitions. */
#define MXCSR_MM (1U << 17)
-/* Exception definitions. */
-#define EXC_DE 0
-#define EXC_DB 1
-#define EXC_BP 3
-#define EXC_OF 4
-#define EXC_BR 5
-#define EXC_UD 6
-#define EXC_NM 7
-#define EXC_DF 8
-#define EXC_TS 10
-#define EXC_NP 11
-#define EXC_SS 12
-#define EXC_GP 13
-#define EXC_PF 14
-#define EXC_MF 16
-#define EXC_AC 17
-#define EXC_XM 19
-
-#define EXC_HAS_EC \
- ((1u << EXC_DF) | (1u << EXC_TS) | (1u << EXC_NP) | \
- (1u << EXC_SS) | (1u << EXC_GP) | (1u << EXC_PF) | (1u << EXC_AC))
/* Segment selector error code bits. */
#define ECODE_EXT (1 << 0)
@@ -642,7 +621,7 @@ do{ asm volatile ( \
state->ip += (_size); /* real hardware doesn't truncate */ \
generate_exception_if((uint8_t)(state->ip - \
ctxt->regs->r(ip)) > MAX_INST_LEN, \
- EXC_GP, 0); \
+ TRAP_gp_fault, 0); \
rc = ops->insn_fetch(x86_seg_cs, _ip, &_x, (_size), ctxt); \
if ( rc ) goto done; \
_x; \
@@ -671,7 +650,7 @@ do { \
static inline int mkec(uint8_t e, int32_t ec, ...)
{
- return (e < 32 && ((1u << e) & EXC_HAS_EC)) ? ec : X86_EVENT_NO_EC;
+ return (e < 32 && ((1u << e) & TRAP_HAVE_EC)) ? ec : X86_EVENT_NO_EC;
}
#define generate_exception_if(p, e, ec...) \
@@ -741,12 +720,12 @@ do { \
#define validate_far_branch(cs, ip) ({ \
if ( sizeof(ip) <= 4 ) { \
ASSERT(in_longmode(ctxt, ops) <= 0); \
- generate_exception_if((ip) > (cs)->limit, EXC_GP, 0); \
+ generate_exception_if((ip) > (cs)->limit, TRAP_gp_fault, 0); \
} else \
generate_exception_if(in_longmode(ctxt, ops) && \
(cs)->attr.fields.l \
? !is_canonical_address(ip) \
- : (ip) > (cs)->limit, EXC_GP, 0); \
+ : (ip) > (cs)->limit, TRAP_gp_fault, 0); \
})
#define commit_far_branch(cs, newip) ({ \
@@ -796,7 +775,7 @@ static int _get_fpu(
return rc;
generate_exception_if(!(cr4 & ((type == X86EMUL_FPU_xmm)
? X86_CR4_OSFXSR : X86_CR4_OSXSAVE)),
- EXC_UD);
+ TRAP_invalid_op);
}
rc = ops->read_cr(0, &cr0, ctxt);
@@ -809,13 +788,13 @@ static int _get_fpu(
}
if ( cr0 & X86_CR0_EM )
{
- generate_exception_if(type == X86EMUL_FPU_fpu, EXC_NM);
- generate_exception_if(type == X86EMUL_FPU_mmx, EXC_UD);
- generate_exception_if(type == X86EMUL_FPU_xmm, EXC_UD);
+ generate_exception_if(type == X86EMUL_FPU_fpu, TRAP_no_device);
+ generate_exception_if(type == X86EMUL_FPU_mmx, TRAP_invalid_op);
+ generate_exception_if(type == X86EMUL_FPU_xmm, TRAP_invalid_op);
}
generate_exception_if((cr0 & X86_CR0_TS) &&
(type != X86EMUL_FPU_wait || (cr0 & X86_CR0_MP)),
- EXC_NM);
+ TRAP_no_device);
}
done:
@@ -835,10 +814,10 @@ do { \
#define put_fpu(_fic) \
do { \
_put_fpu(); \
- if ( (_fic)->exn_raised == EXC_XM && ops->read_cr && \
+ if ( (_fic)->exn_raised == TRAP_simd_error && ops->read_cr && \
ops->read_cr(4, &cr4, ctxt) == X86EMUL_OKAY && \
!(cr4 & X86_CR4_OSXMMEXCPT) ) \
- (_fic)->exn_raised = EXC_UD; \
+ (_fic)->exn_raised = TRAP_invalid_op; \
generate_exception_if((_fic)->exn_raised >= 0, \
(_fic)->exn_raised); \
} while (0)
@@ -1194,7 +1173,7 @@ static int ioport_access_check(
return rc == X86EMUL_DONE ? X86EMUL_OKAY : rc;
/* Ensure the TSS has an io-bitmap-offset field. */
- generate_exception_if(tr.attr.fields.type != 0xb, EXC_GP, 0);
+ generate_exception_if(tr.attr.fields.type != 0xb, TRAP_gp_fault, 0);
switch ( rc = read_ulong(x86_seg_tr, 0x66, &iobmp, 2, ctxt, ops) )
{
@@ -1202,7 +1181,7 @@ static int ioport_access_check(
break;
case X86EMUL_EXCEPTION:
- generate_exception_if(!ctxt->event_pending, EXC_GP, 0);
+ generate_exception_if(!ctxt->event_pending, TRAP_gp_fault, 0);
/* fallthrough */
default:
@@ -1217,7 +1196,7 @@ static int ioport_access_check(
break;
case X86EMUL_EXCEPTION:
- generate_exception_if(!ctxt->event_pending, EXC_GP, 0);
+ generate_exception_if(!ctxt->event_pending, TRAP_gp_fault, 0);
/* fallthrough */
default:
@@ -1225,7 +1204,7 @@ static int ioport_access_check(
}
generate_exception_if(iobmp & (((1 << bytes) - 1) << (first_port & 7)),
- EXC_GP, 0);
+ TRAP_gp_fault, 0);
done:
return rc;
@@ -1322,7 +1301,7 @@ static bool vcpu_has(
#define vcpu_has_rdpid() vcpu_has( 7, ECX, 22, ctxt, ops)
#define vcpu_must_have(feat) \
- generate_exception_if(!vcpu_has_##feat(), EXC_UD)
+ generate_exception_if(!vcpu_has_##feat(), TRAP_invalid_op)
#ifdef __XEN__
/*
@@ -1332,7 +1311,7 @@ static bool vcpu_has(
* the actual operation.
*/
#define host_and_vcpu_must_have(feat) ({ \
- generate_exception_if(!cpu_has_##feat, EXC_UD); \
+ generate_exception_if(!cpu_has_##feat, TRAP_invalid_op); \
vcpu_must_have(feat); \
})
#else
@@ -1398,7 +1377,7 @@ protmode_load_seg(
uint8_t dpl, rpl;
int cpl = get_cpl(ctxt, ops);
uint32_t a_flag = 0x100;
- int rc, fault_type = EXC_GP;
+ int rc, fault_type = TRAP_gp_fault;
if ( cpl < 0 )
return X86EMUL_UNHANDLEABLE;
@@ -1527,7 +1506,7 @@ protmode_load_seg(
/* Segment present in memory? */
if ( !(desc.b & (1 << 15)) && seg != x86_seg_none )
{
- fault_type = seg != x86_seg_ss ? EXC_NP : EXC_SS;
+ fault_type = seg != x86_seg_ss ? TRAP_no_segment : TRAP_stack_error;
goto raise_exn;
}
@@ -1720,7 +1699,7 @@ static int inject_swint(enum x86_swint_type type,
struct x86_emulate_ctxt *ctxt,
const struct x86_emulate_ops *ops)
{
- int rc, error_code, fault_type = EXC_GP;
+ int rc, error_code, fault_type = TRAP_gp_fault;
/*
* Without hardware support, injecting software interrupts/exceptions is
@@ -1834,7 +1813,7 @@ static int inject_swint(enum x86_swint_type type,
/* Is this entry present? */
if ( !(idte.b & (1u << 15)) )
{
- fault_type = EXC_NP;
+ fault_type = TRAP_no_segment;
goto raise_exn;
}
}
@@ -1989,7 +1968,7 @@ x86_decode_onebyte(
case 0x9a: /* call (far, absolute) */
case 0xea: /* jmp (far, absolute) */
- generate_exception_if(mode_64bit(), EXC_UD);
+ generate_exception_if(mode_64bit(), TRAP_invalid_op);
imm1 = insn_fetch_bytes(op_bytes);
imm2 = insn_fetch_type(uint16_t);
@@ -2083,7 +2062,7 @@ x86_decode_twobyte(
}
/* fall through */
case 0x21: case 0x23: /* mov to/from dr */
- generate_exception_if(lock_prefix || ea.type != OP_REG, EXC_UD);
+ generate_exception_if(lock_prefix || ea.type != OP_REG, TRAP_invalid_op);
op_bytes = mode_64bit() ? 8 : 4;
break;
@@ -2274,7 +2253,7 @@ x86_decode(
/* fall through */
case 8:
/* VEX / XOP / EVEX */
- generate_exception_if(rex_prefix || vex.pfx, EXC_UD);
+ generate_exception_if(rex_prefix || vex.pfx, TRAP_invalid_op);
/*
* With operand size override disallowed (see above), op_bytes
* should not have changed from its default.
@@ -2665,7 +2644,7 @@ x86_emulate(
d = state.desc;
#define state (&state)
- generate_exception_if(state->not_64bit && mode_64bit(), EXC_UD);
+ generate_exception_if(state->not_64bit && mode_64bit(), TRAP_invalid_op);
if ( ea.type == OP_REG )
ea.reg = decode_register(modrm_rm, &_regs,
@@ -2744,12 +2723,12 @@ x86_emulate(
* CMPXCHG{8,16}B (MOV CRn is being handled elsewhere).
*/
generate_exception_if(lock_prefix && (ext != ext_0f || b != 0xc7),
- EXC_UD);
+ TRAP_invalid_op);
dst.type = OP_NONE;
break;
case DstReg:
- generate_exception_if(lock_prefix, EXC_UD);
+ generate_exception_if(lock_prefix, TRAP_invalid_op);
dst.type = OP_REG;
if ( d & ByteOp )
{
@@ -2804,7 +2783,7 @@ x86_emulate(
dst = ea;
if ( dst.type == OP_REG )
{
- generate_exception_if(lock_prefix, EXC_UD);
+ generate_exception_if(lock_prefix, TRAP_invalid_op);
switch ( dst.bytes )
{
case 1: dst.val = *(uint8_t *)dst.reg; break;
@@ -2824,7 +2803,7 @@ x86_emulate(
else
{
/* Lock prefix is allowed only on RMW instructions. */
- generate_exception_if(lock_prefix, EXC_UD);
+ generate_exception_if(lock_prefix, TRAP_invalid_op);
fail_if(!ops->write);
}
break;
@@ -2866,7 +2845,7 @@ x86_emulate(
break;
case 0x38 ... 0x3d: cmp: /* cmp */
- generate_exception_if(lock_prefix, EXC_UD);
+ generate_exception_if(lock_prefix, TRAP_invalid_op);
emulate_2op_SrcV("cmp", src, dst, _regs._eflags);
dst.type = OP_NONE;
break;
@@ -3003,14 +2982,14 @@ x86_emulate(
case 0x62: /* bound */ {
unsigned long src_val2;
int lb, ub, idx;
- generate_exception_if(src.type != OP_MEM, EXC_UD);
+ generate_exception_if(src.type != OP_MEM, TRAP_invalid_op);
if ( (rc = read_ulong(src.mem.seg, src.mem.off + op_bytes,
&src_val2, op_bytes, ctxt, ops)) )
goto done;
ub = (op_bytes == 2) ? (int16_t)src_val2 : (int32_t)src_val2;
lb = (op_bytes == 2) ? (int16_t)src.val : (int32_t)src.val;
idx = (op_bytes == 2) ? (int16_t)dst.val : (int32_t)dst.val;
- generate_exception_if((idx < lb) || (idx > ub), EXC_BR);
+ generate_exception_if((idx < lb) || (idx > ub), TRAP_bounds);
dst.type = OP_NONE;
break;
}
@@ -3048,7 +3027,7 @@ x86_emulate(
_regs._eflags &= ~X86_EFLAGS_ZF;
dst.type = OP_NONE;
}
- generate_exception_if(!in_protmode(ctxt, ops), EXC_UD);
+ generate_exception_if(!in_protmode(ctxt, ops), TRAP_invalid_op);
}
break;
@@ -3212,7 +3191,7 @@ x86_emulate(
dst.type = OP_NONE;
break;
}
- generate_exception_if((modrm_reg & 7) != 0, EXC_UD);
+ generate_exception_if((modrm_reg & 7) != 0, TRAP_invalid_op);
case 0x88 ... 0x8b: /* mov */
case 0xa0 ... 0xa1: /* mov mem.offs,{%al,%ax,%eax,%rax} */
case 0xa2 ... 0xa3: /* mov {%al,%ax,%eax,%rax},mem.offs */
@@ -3221,7 +3200,7 @@ x86_emulate(
case 0x8c: /* mov Sreg,r/m */
seg = modrm_reg & 7; /* REX.R is ignored. */
- generate_exception_if(!is_x86_user_segment(seg), EXC_UD);
+ generate_exception_if(!is_x86_user_segment(seg), TRAP_invalid_op);
store_selector:
fail_if(ops->read_segment == NULL);
if ( (rc = ops->read_segment(seg, &sreg, ctxt)) != 0 )
@@ -3234,7 +3213,7 @@ x86_emulate(
case 0x8e: /* mov r/m,Sreg */
seg = modrm_reg & 7; /* REX.R is ignored. */
generate_exception_if(!is_x86_user_segment(seg) ||
- seg == x86_seg_cs, EXC_UD);
+ seg == x86_seg_cs, TRAP_invalid_op);
if ( (rc = load_seg(seg, src.val, 0, NULL, ctxt, ops)) != 0 )
goto done;
if ( seg == x86_seg_ss )
@@ -3243,12 +3222,12 @@ x86_emulate(
break;
case 0x8d: /* lea */
- generate_exception_if(ea.type != OP_MEM, EXC_UD);
+ generate_exception_if(ea.type != OP_MEM, TRAP_invalid_op);
dst.val = ea.mem.off;
break;
case 0x8f: /* pop (sole member of Grp1a) */
- generate_exception_if((modrm_reg & 7) != 0, EXC_UD);
+ generate_exception_if((modrm_reg & 7) != 0, TRAP_invalid_op);
/* 64-bit mode: POP defaults to a 64-bit operand. */
if ( mode_64bit() && (dst.bytes == 4) )
dst.bytes = 8;
@@ -3330,7 +3309,7 @@ x86_emulate(
if ( rc != X86EMUL_OKAY )
goto done;
}
- generate_exception_if(!(cr4 & X86_CR4_VME), EXC_GP, 0);
+ generate_exception_if(!(cr4 & X86_CR4_VME), TRAP_gp_fault, 0);
src.val = (_regs.flags & ~X86_EFLAGS_IF) | X86_EFLAGS_IOPL;
if ( _regs._eflags & X86_EFLAGS_VIF )
src.val |= X86_EFLAGS_IF;
@@ -3355,7 +3334,7 @@ x86_emulate(
}
generate_exception_if(!(cr4 & X86_CR4_VME) &&
MASK_EXTR(_regs._eflags, X86_EFLAGS_IOPL) != 3,
- EXC_GP, 0);
+ TRAP_gp_fault, 0);
}
mask |= X86_EFLAGS_IOPL;
if ( !mode_iopl() )
@@ -3375,7 +3354,7 @@ x86_emulate(
if ( dst.val & X86_EFLAGS_IF )
{
generate_exception_if(_regs._eflags & X86_EFLAGS_VIP,
- EXC_GP, 0);
+ TRAP_gp_fault, 0);
dst.val |= X86_EFLAGS_VIF;
}
else
@@ -3553,7 +3532,7 @@ x86_emulate(
case 0xc5: /* lds */
seg = (b & 1) * 3; /* es = 0, ds = 3 */
les:
- generate_exception_if(src.type != OP_MEM, EXC_UD);
+ generate_exception_if(src.type != OP_MEM, TRAP_invalid_op);
if ( (rc = read_ulong(src.mem.seg, src.mem.off + src.bytes,
&dst.val, 2, ctxt, ops)) != X86EMUL_OKAY )
goto done;
@@ -3625,7 +3604,7 @@ x86_emulate(
break;
case 0xcc: /* int3 */
- src.val = EXC_BP;
+ src.val = TRAP_int3;
swint_type = x86_swint_int3;
goto swint;
@@ -3640,7 +3619,7 @@ x86_emulate(
case 0xce: /* into */
if ( !(_regs._eflags & X86_EFLAGS_OF) )
break;
- src.val = EXC_OF;
+ src.val = TRAP_overflow;
swint_type = x86_swint_into;
goto swint;
@@ -3689,7 +3668,7 @@ x86_emulate(
{
uint8_t al = _regs.al;
- generate_exception_if(!base, EXC_DE);
+ generate_exception_if(!base, TRAP_divide_error);
_regs.ax = ((al / base) << 8) | (al % base);
}
_regs._eflags &= ~(X86_EFLAGS_SF|X86_EFLAGS_ZF|X86_EFLAGS_PF);
@@ -3805,7 +3784,7 @@ x86_emulate(
emulate_fpu_insn_stub(0xd9, modrm);
break;
default:
- generate_exception_if(ea.type != OP_MEM, EXC_UD);
+ generate_exception_if(ea.type != OP_MEM, TRAP_invalid_op);
dst = ea;
switch ( modrm_reg & 7 )
{
@@ -3840,7 +3819,7 @@ x86_emulate(
dst.bytes = 2;
break;
default:
- generate_exception(EXC_UD);
+ generate_exception(TRAP_invalid_op);
}
/*
* Control instructions can't raise FPU exceptions, so we need
@@ -3869,7 +3848,7 @@ x86_emulate(
emulate_fpu_insn_stub(0xda, modrm);
break;
default:
- generate_exception_if(ea.type != OP_MEM, EXC_UD);
+ generate_exception_if(ea.type != OP_MEM, TRAP_invalid_op);
if ( (rc = ops->read(ea.mem.seg, ea.mem.off, &src.val,
4, ctxt)) != X86EMUL_OKAY )
goto done;
@@ -3927,7 +3906,7 @@ x86_emulate(
emulate_fpu_insn_stub(0xdb, modrm);
break;
default:
- generate_exception_if(ea.type != OP_MEM, EXC_UD);
+ generate_exception_if(ea.type != OP_MEM, TRAP_invalid_op);
dst = ea;
switch ( modrm_reg & 7 )
{
@@ -3968,7 +3947,7 @@ x86_emulate(
dst.type = OP_NONE;
break;
default:
- generate_exception(EXC_UD);
+ generate_exception(TRAP_invalid_op);
}
if ( dst.type == OP_MEM && !fpu_check_write() )
dst.type = OP_NONE;
@@ -4041,7 +4020,7 @@ x86_emulate(
emulate_fpu_insn_stub(0xdd, modrm);
break;
default:
- generate_exception_if(ea.type != OP_MEM, EXC_UD);
+ generate_exception_if(ea.type != OP_MEM, TRAP_invalid_op);
dst = ea;
switch ( modrm_reg & 7 )
{
@@ -4073,7 +4052,7 @@ x86_emulate(
dst.bytes = 2;
break;
default:
- generate_exception(EXC_UD);
+ generate_exception(TRAP_invalid_op);
}
/*
* Control instructions can't raise FPU exceptions, so we need
@@ -4102,7 +4081,7 @@ x86_emulate(
emulate_fpu_insn_stub(0xde, modrm);
break;
default:
- generate_exception_if(ea.type != OP_MEM, EXC_UD);
+ generate_exception_if(ea.type != OP_MEM, TRAP_invalid_op);
switch ( modrm_reg & 7 )
{
case 0: /* fiadd m16i */
@@ -4158,7 +4137,7 @@ x86_emulate(
emulate_fpu_insn_stub(0xdf, modrm);
break;
default:
- generate_exception_if(ea.type != OP_MEM, EXC_UD);
+ generate_exception_if(ea.type != OP_MEM, TRAP_invalid_op);
dst = ea;
switch ( modrm_reg & 7 )
{
@@ -4296,12 +4275,12 @@ x86_emulate(
break;
case 0xf1: /* int1 (icebp) */
- src.val = EXC_DB;
+ src.val = TRAP_debug;
swint_type = x86_swint_icebp;
goto swint;
case 0xf4: /* hlt */
- generate_exception_if(!mode_ring0(), EXC_GP, 0);
+ generate_exception_if(!mode_ring0(), TRAP_gp_fault, 0);
ctxt->retire.hlt = true;
break;
@@ -4317,7 +4296,7 @@ x86_emulate(
unsigned long u[2], v;
case 0 ... 1: /* test */
- generate_exception_if(lock_prefix, EXC_UD);
+ generate_exception_if(lock_prefix, TRAP_invalid_op);
goto test;
case 2: /* not */
dst.val = ~dst.val;
@@ -4412,7 +4391,7 @@ x86_emulate(
v = (uint8_t)src.val;
generate_exception_if(
div_dbl(u, v) || ((uint8_t)u[0] != (uint16_t)u[0]),
- EXC_DE);
+ TRAP_divide_error);
dst.val = (uint8_t)u[0];
_regs.ah = u[1];
break;
@@ -4422,7 +4401,7 @@ x86_emulate(
v = (uint16_t)src.val;
generate_exception_if(
div_dbl(u, v) || ((uint16_t)u[0] != (uint32_t)u[0]),
- EXC_DE);
+ TRAP_divide_error);
dst.val = (uint16_t)u[0];
_regs.dx = u[1];
break;
@@ -4433,7 +4412,7 @@ x86_emulate(
v = (uint32_t)src.val;
generate_exception_if(
div_dbl(u, v) || ((uint32_t)u[0] != u[0]),
- EXC_DE);
+ TRAP_divide_error);
dst.val = (uint32_t)u[0];
_regs.rdx = (uint32_t)u[1];
break;
@@ -4442,7 +4421,7 @@ x86_emulate(
u[0] = _regs.r(ax);
u[1] = _regs.r(dx);
v = src.val;
- generate_exception_if(div_dbl(u, v), EXC_DE);
+ generate_exception_if(div_dbl(u, v), TRAP_divide_error);
dst.val = u[0];
_regs.r(dx) = u[1];
break;
@@ -4457,7 +4436,7 @@ x86_emulate(
v = (int8_t)src.val;
generate_exception_if(
idiv_dbl(u, v) || ((int8_t)u[0] != (int16_t)u[0]),
- EXC_DE);
+ TRAP_divide_error);
dst.val = (int8_t)u[0];
_regs.ah = u[1];
break;
@@ -4467,7 +4446,7 @@ x86_emulate(
v = (int16_t)src.val;
generate_exception_if(
idiv_dbl(u, v) || ((int16_t)u[0] != (int32_t)u[0]),
- EXC_DE);
+ TRAP_divide_error);
dst.val = (int16_t)u[0];
_regs.dx = u[1];
break;
@@ -4478,7 +4457,7 @@ x86_emulate(
v = (int32_t)src.val;
generate_exception_if(
idiv_dbl(u, v) || ((int32_t)u[0] != u[0]),
- EXC_DE);
+ TRAP_divide_error);
dst.val = (int32_t)u[0];
_regs.rdx = (uint32_t)u[1];
break;
@@ -4487,7 +4466,7 @@ x86_emulate(
u[0] = _regs.r(ax);
u[1] = _regs.r(dx);
v = src.val;
- generate_exception_if(idiv_dbl(u, v), EXC_DE);
+ generate_exception_if(idiv_dbl(u, v), TRAP_divide_error);
dst.val = u[0];
_regs.r(dx) = u[1];
break;
@@ -4509,7 +4488,7 @@ x86_emulate(
_regs._eflags &= ~X86_EFLAGS_IF;
else
{
- generate_exception_if(!mode_vif(), EXC_GP, 0);
+ generate_exception_if(!mode_vif(), TRAP_gp_fault, 0);
_regs._eflags &= ~X86_EFLAGS_VIF;
}
break;
@@ -4525,7 +4504,7 @@ x86_emulate(
{
generate_exception_if((_regs._eflags & X86_EFLAGS_VIP) ||
!mode_vif(),
- EXC_GP, 0);
+ TRAP_gp_fault, 0);
if ( !(_regs._eflags & X86_EFLAGS_VIF) )
ctxt->retire.sti = true;
_regs._eflags |= X86_EFLAGS_VIF;
@@ -4541,7 +4520,7 @@ x86_emulate(
break;
case 0xfe: /* Grp4 */
- generate_exception_if((modrm_reg & 7) >= 2, EXC_UD);
+ generate_exception_if((modrm_reg & 7) >= 2, TRAP_invalid_op);
/* Fallthrough. */
case 0xff: /* Grp5 */
switch ( modrm_reg & 7 )
@@ -4569,7 +4548,7 @@ x86_emulate(
break;
case 3: /* call (far, absolute indirect) */
case 5: /* jmp (far, absolute indirect) */
- generate_exception_if(src.type != OP_MEM, EXC_UD);
+ generate_exception_if(src.type != OP_MEM, TRAP_invalid_op);
if ( (rc = read_ulong(src.mem.seg, src.mem.off + op_bytes,
&imm2, 2, ctxt, ops)) )
@@ -4581,20 +4560,20 @@ x86_emulate(
case 6: /* push */
goto push;
case 7:
- generate_exception(EXC_UD);
+ generate_exception(TRAP_invalid_op);
}
break;
case X86EMUL_OPC(0x0f, 0x00): /* Grp6 */
seg = (modrm_reg & 1) ? x86_seg_tr : x86_seg_ldtr;
- generate_exception_if(!in_protmode(ctxt, ops), EXC_UD);
+ generate_exception_if(!in_protmode(ctxt, ops), TRAP_invalid_op);
switch ( modrm_reg & 6 )
{
case 0: /* sldt / str */
- generate_exception_if(umip_active(ctxt, ops), EXC_GP, 0);
+ generate_exception_if(umip_active(ctxt, ops), TRAP_gp_fault, 0);
goto store_selector;
case 2: /* lldt / ltr */
- generate_exception_if(!mode_ring0(), EXC_GP, 0);
+ generate_exception_if(!mode_ring0(), TRAP_gp_fault, 0);
if ( (rc = load_seg(seg, src.val, 0, NULL, ctxt, ops)) != 0 )
goto done;
break;
@@ -4612,7 +4591,7 @@ x86_emulate(
case X86EMUL_EXCEPTION:
if ( ctxt->event_pending )
{
- ASSERT(ctxt->event.vector == EXC_PF);
+ ASSERT(ctxt->event.vector == TRAP_page_fault);
default:
goto done;
}
@@ -4622,7 +4601,7 @@ x86_emulate(
}
break;
default:
- generate_exception_if(true, EXC_UD);
+ generate_exception_if(true, TRAP_invalid_op);
break;
}
break;
@@ -4635,7 +4614,7 @@ x86_emulate(
case 0xca: /* clac */
case 0xcb: /* stac */
vcpu_must_have(smap);
- generate_exception_if(vex.pfx || !mode_ring0(), EXC_UD);
+ generate_exception_if(vex.pfx || !mode_ring0(), TRAP_invalid_op);
_regs._eflags &= ~X86_EFLAGS_AC;
if ( modrm == 0xcb )
@@ -4644,41 +4623,41 @@ x86_emulate(
#ifdef __XEN__
case 0xd1: /* xsetbv */
- generate_exception_if(vex.pfx, EXC_UD);
+ generate_exception_if(vex.pfx, TRAP_invalid_op);
if ( !ops->read_cr || ops->read_cr(4, &cr4, ctxt) != X86EMUL_OKAY )
cr4 = 0;
- generate_exception_if(!(cr4 & X86_CR4_OSXSAVE), EXC_UD);
+ generate_exception_if(!(cr4 & X86_CR4_OSXSAVE), TRAP_invalid_op);
generate_exception_if(!mode_ring0() ||
handle_xsetbv(_regs._ecx,
_regs._eax | (_regs.rdx << 32)),
- EXC_GP, 0);
+ TRAP_gp_fault, 0);
goto complete_insn;
#endif
case 0xd4: /* vmfunc */
- generate_exception_if(vex.pfx, EXC_UD);
+ generate_exception_if(vex.pfx, TRAP_invalid_op);
fail_if(!ops->vmfunc);
if ( (rc = ops->vmfunc(ctxt)) != X86EMUL_OKAY )
goto done;
goto complete_insn;
case 0xd5: /* xend */
- generate_exception_if(vex.pfx, EXC_UD);
- generate_exception_if(!vcpu_has_rtm(), EXC_UD);
- generate_exception_if(vcpu_has_rtm(), EXC_GP, 0);
+ generate_exception_if(vex.pfx, TRAP_invalid_op);
+ generate_exception_if(!vcpu_has_rtm(), TRAP_invalid_op);
+ generate_exception_if(vcpu_has_rtm(), TRAP_gp_fault, 0);
break;
case 0xd6: /* xtest */
- generate_exception_if(vex.pfx, EXC_UD);
+ generate_exception_if(vex.pfx, TRAP_invalid_op);
generate_exception_if(!vcpu_has_rtm() && !vcpu_has_hle(),
- EXC_UD);
+ TRAP_invalid_op);
/* Neither HLE nor RTM can be active when we get here. */
_regs._eflags |= X86_EFLAGS_ZF;
goto complete_insn;
case 0xdf: /* invlpga */
- generate_exception_if(!in_protmode(ctxt, ops), EXC_UD);
- generate_exception_if(!mode_ring0(), EXC_GP, 0);
+ generate_exception_if(!in_protmode(ctxt, ops), TRAP_invalid_op);
+ generate_exception_if(!mode_ring0(), TRAP_gp_fault, 0);
fail_if(ops->invlpg == NULL);
if ( (rc = ops->invlpg(x86_seg_none, truncate_ea(_regs.r(ax)),
ctxt)) )
@@ -4706,7 +4685,7 @@ x86_emulate(
ops->cpuid(1, 0, &cpuid_leaf, ctxt) == X86EMUL_OKAY )
limit = ((cpuid_leaf.b >> 8) & 0xff) * 8;
generate_exception_if(limit < sizeof(long) ||
- (limit & (limit - 1)), EXC_UD);
+ (limit & (limit - 1)), TRAP_invalid_op);
base &= ~(limit - 1);
if ( ops->rep_stos )
{
@@ -4741,8 +4720,8 @@ x86_emulate(
{
case 0: /* sgdt */
case 1: /* sidt */
- generate_exception_if(ea.type != OP_MEM, EXC_UD);
- generate_exception_if(umip_active(ctxt, ops), EXC_GP, 0);
+ generate_exception_if(ea.type != OP_MEM, TRAP_invalid_op);
+ generate_exception_if(umip_active(ctxt, ops), TRAP_gp_fault, 0);
fail_if(!ops->read_segment || !ops->write);
if ( (rc = ops->read_segment(seg, &sreg, ctxt)) )
goto done;
@@ -4761,8 +4740,8 @@ x86_emulate(
break;
case 2: /* lgdt */
case 3: /* lidt */
- generate_exception_if(!mode_ring0(), EXC_GP, 0);
- generate_exception_if(ea.type != OP_MEM, EXC_UD);
+ generate_exception_if(!mode_ring0(), TRAP_gp_fault, 0);
+ generate_exception_if(ea.type != OP_MEM, TRAP_invalid_op);
fail_if(ops->write_segment == NULL);
memset(&sreg, 0, sizeof(sreg));
if ( (rc = read_ulong(ea.mem.seg, ea.mem.off+0,
@@ -4770,7 +4749,7 @@ x86_emulate(
(rc = read_ulong(ea.mem.seg, ea.mem.off+2,
&base, mode_64bit() ? 8 : 4, ctxt, ops)) )
goto done;
- generate_exception_if(!is_canonical_address(base), EXC_GP, 0);
+ generate_exception_if(!is_canonical_address(base), TRAP_gp_fault, 0);
sreg.base = base;
sreg.limit = limit;
if ( !mode_64bit() && op_bytes == 2 )
@@ -4779,7 +4758,7 @@ x86_emulate(
goto done;
break;
case 4: /* smsw */
- generate_exception_if(umip_active(ctxt, ops), EXC_GP, 0);
+ generate_exception_if(umip_active(ctxt, ops), TRAP_gp_fault, 0);
if ( ea.type == OP_MEM )
{
fail_if(!ops->write);
@@ -4796,7 +4775,7 @@ x86_emulate(
case 6: /* lmsw */
fail_if(ops->read_cr == NULL);
fail_if(ops->write_cr == NULL);
- generate_exception_if(!mode_ring0(), EXC_GP, 0);
+ generate_exception_if(!mode_ring0(), TRAP_gp_fault, 0);
if ( (rc = ops->read_cr(0, &cr0, ctxt)) )
goto done;
if ( ea.type == OP_REG )
@@ -4810,8 +4789,8 @@ x86_emulate(
goto done;
break;
case 7: /* invlpg */
- generate_exception_if(!mode_ring0(), EXC_GP, 0);
- generate_exception_if(ea.type != OP_MEM, EXC_UD);
+ generate_exception_if(!mode_ring0(), TRAP_gp_fault, 0);
+ generate_exception_if(ea.type != OP_MEM, TRAP_invalid_op);
fail_if(ops->invlpg == NULL);
if ( (rc = ops->invlpg(ea.mem.seg, ea.mem.off, ctxt)) )
goto done;
@@ -4823,7 +4802,7 @@ x86_emulate(
}
case X86EMUL_OPC(0x0f, 0x02): /* lar */
- generate_exception_if(!in_protmode(ctxt, ops), EXC_UD);
+ generate_exception_if(!in_protmode(ctxt, ops), TRAP_invalid_op);
_regs._eflags &= ~X86_EFLAGS_ZF;
switch ( rc = protmode_load_seg(x86_seg_none, src.val, false, &sreg,
ctxt, ops) )
@@ -4854,7 +4833,7 @@ x86_emulate(
case X86EMUL_EXCEPTION:
if ( ctxt->event_pending )
{
- ASSERT(ctxt->event.vector == EXC_PF);
+ ASSERT(ctxt->event.vector == TRAP_page_fault);
default:
goto done;
}
@@ -4872,7 +4851,7 @@ x86_emulate(
break;
case X86EMUL_OPC(0x0f, 0x03): /* lsl */
- generate_exception_if(!in_protmode(ctxt, ops), EXC_UD);
+ generate_exception_if(!in_protmode(ctxt, ops), TRAP_invalid_op);
_regs._eflags &= ~X86_EFLAGS_ZF;
switch ( rc = protmode_load_seg(x86_seg_none, src.val, false, &sreg,
ctxt, ops) )
@@ -4900,7 +4879,7 @@ x86_emulate(
case X86EMUL_EXCEPTION:
if ( ctxt->event_pending )
{
- ASSERT(ctxt->event.vector == EXC_PF);
+ ASSERT(ctxt->event.vector == TRAP_page_fault);
default:
goto done;
}
@@ -4917,13 +4896,13 @@ x86_emulate(
case X86EMUL_OPC(0x0f, 0x05): /* syscall */ {
uint64_t msr_content;
- generate_exception_if(!in_protmode(ctxt, ops), EXC_UD);
+ generate_exception_if(!in_protmode(ctxt, ops), TRAP_invalid_op);
/* Inject #UD if syscall/sysret are disabled. */
fail_if(ops->read_msr == NULL);
if ( (rc = ops->read_msr(MSR_EFER, &msr_content, ctxt)) != 0 )
goto done;
- generate_exception_if((msr_content & EFER_SCE) == 0, EXC_UD);
+ generate_exception_if((msr_content & EFER_SCE) == 0, TRAP_invalid_op);
if ( (rc = ops->read_msr(MSR_STAR, &msr_content, ctxt)) != 0 )
goto done;
@@ -4991,7 +4970,7 @@ x86_emulate(
}
case X86EMUL_OPC(0x0f, 0x06): /* clts */
- generate_exception_if(!mode_ring0(), EXC_GP, 0);
+ generate_exception_if(!mode_ring0(), TRAP_gp_fault, 0);
fail_if((ops->read_cr == NULL) || (ops->write_cr == NULL));
if ( (rc = ops->read_cr(0, &dst.val, ctxt)) != X86EMUL_OKAY ||
(rc = ops->write_cr(0, dst.val & ~X86_CR0_TS, ctxt)) != X86EMUL_OKAY )
@@ -5000,7 +4979,7 @@ x86_emulate(
case X86EMUL_OPC(0x0f, 0x08): /* invd */
case X86EMUL_OPC(0x0f, 0x09): /* wbinvd */
- generate_exception_if(!mode_ring0(), EXC_GP, 0);
+ generate_exception_if(!mode_ring0(), TRAP_gp_fault, 0);
fail_if(ops->wbinvd == NULL);
if ( (rc = ops->wbinvd(ctxt)) != 0 )
goto done;
@@ -5009,7 +4988,7 @@ x86_emulate(
case X86EMUL_OPC(0x0f, 0x0b): /* ud2 */
case X86EMUL_OPC(0x0f, 0xb9): /* ud1 */
case X86EMUL_OPC(0x0f, 0xff): /* ud0 */
- generate_exception(EXC_UD);
+ generate_exception(TRAP_invalid_op);
case X86EMUL_OPC(0x0f, 0x0d): /* GrpP (prefetch) */
case X86EMUL_OPC(0x0f, 0x18): /* Grp16 (prefetch/nop) */
@@ -5098,7 +5077,7 @@ x86_emulate(
generate_exception_if(!(mxcsr & MXCSR_MM) &&
!is_aligned(ea.mem.seg, ea.mem.off, ea.bytes,
ctxt, ops),
- EXC_GP, 0);
+ TRAP_gp_fault, 0);
if ( !(b & 1) )
rc = ops->read(ea.mem.seg, ea.mem.off+0, mmvalp,
ea.bytes, ctxt);
@@ -5133,7 +5112,7 @@ x86_emulate(
case X86EMUL_OPC(0x0f, 0x21): /* mov dr,reg */
case X86EMUL_OPC(0x0f, 0x22): /* mov reg,cr */
case X86EMUL_OPC(0x0f, 0x23): /* mov reg,dr */
- generate_exception_if(!mode_ring0(), EXC_GP, 0);
+ generate_exception_if(!mode_ring0(), TRAP_gp_fault, 0);
if ( b & 2 )
{
/* Write to CR/DR. */
@@ -5156,7 +5135,7 @@ x86_emulate(
break;
case X86EMUL_OPC(0x0f, 0x30): /* wrmsr */
- generate_exception_if(!mode_ring0(), EXC_GP, 0);
+ generate_exception_if(!mode_ring0(), TRAP_gp_fault, 0);
fail_if(ops->write_msr == NULL);
if ( (rc = ops->write_msr(_regs._ecx,
((uint64_t)_regs.r(dx) << 32) | _regs._eax,
@@ -5172,7 +5151,7 @@ x86_emulate(
fail_if(ops->read_cr == NULL);
if ( (rc = ops->read_cr(4, &cr4, ctxt)) )
goto done;
- generate_exception_if(cr4 & X86_CR4_TSD, EXC_GP, 0);
+ generate_exception_if(cr4 & X86_CR4_TSD, TRAP_gp_fault, 0);
}
fail_if(ops->read_msr == NULL);
if ( (rc = ops->read_msr(MSR_IA32_TSC, &val, ctxt)) != 0 )
@@ -5184,7 +5163,7 @@ x86_emulate(
case X86EMUL_OPC(0x0f, 0x32): /* rdmsr */ {
uint64_t val;
- generate_exception_if(!mode_ring0(), EXC_GP, 0);
+ generate_exception_if(!mode_ring0(), TRAP_gp_fault, 0);
fail_if(ops->read_msr == NULL);
if ( (rc = ops->read_msr(_regs._ecx, &val, ctxt)) != 0 )
goto done;
@@ -5204,15 +5183,15 @@ x86_emulate(
int lm;
vcpu_must_have(sep);
- generate_exception_if(mode_ring0(), EXC_GP, 0);
- generate_exception_if(!in_protmode(ctxt, ops), EXC_GP, 0);
+ generate_exception_if(mode_ring0(), TRAP_gp_fault, 0);
+ generate_exception_if(!in_protmode(ctxt, ops), TRAP_gp_fault, 0);
fail_if(ops->read_msr == NULL);
if ( (rc = ops->read_msr(MSR_IA32_SYSENTER_CS, &msr_content, ctxt))
!= 0 )
goto done;
- generate_exception_if(!(msr_content & 0xfffc), EXC_GP, 0);
+ generate_exception_if(!(msr_content & 0xfffc), TRAP_gp_fault, 0);
lm = in_longmode(ctxt, ops);
if ( lm < 0 )
goto cannot_emulate;
@@ -5254,19 +5233,19 @@ x86_emulate(
uint64_t msr_content;
vcpu_must_have(sep);
- generate_exception_if(!mode_ring0(), EXC_GP, 0);
- generate_exception_if(!in_protmode(ctxt, ops), EXC_GP, 0);
+ generate_exception_if(!mode_ring0(), TRAP_gp_fault, 0);
+ generate_exception_if(!in_protmode(ctxt, ops), TRAP_gp_fault, 0);
fail_if(ops->read_msr == NULL);
if ( (rc = ops->read_msr(MSR_IA32_SYSENTER_CS, &msr_content, ctxt))
!= 0 )
goto done;
- generate_exception_if(!(msr_content & 0xfffc), EXC_GP, 0);
+ generate_exception_if(!(msr_content & 0xfffc), TRAP_gp_fault, 0);
generate_exception_if(op_bytes == 8 &&
(!is_canonical_address(_regs.r(dx)) ||
!is_canonical_address(_regs.r(cx))),
- EXC_GP, 0);
+ TRAP_gp_fault, 0);
cs.sel = (msr_content | 3) + /* SELECTOR_RPL_MASK */
(op_bytes == 8 ? 32 : 16);
@@ -5364,11 +5343,11 @@ x86_emulate(
switch ( b )
{
case 0x7e:
- generate_exception_if(vex.l, EXC_UD);
+ generate_exception_if(vex.l, TRAP_invalid_op);
ea.bytes = op_bytes;
break;
case 0xd6:
- generate_exception_if(vex.l, EXC_UD);
+ generate_exception_if(vex.l, TRAP_invalid_op);
ea.bytes = 8;
break;
}
@@ -5383,7 +5362,7 @@ x86_emulate(
generate_exception_if(!(mxcsr & MXCSR_MM) &&
!is_aligned(ea.mem.seg, ea.mem.off, ea.bytes,
ctxt, ops),
- EXC_GP, 0);
+ TRAP_gp_fault, 0);
if ( b == 0x6f )
rc = ops->read(ea.mem.seg, ea.mem.off+0, mmvalp,
ea.bytes, ctxt);
@@ -5435,7 +5414,7 @@ x86_emulate(
fail_if(ops->cpuid == NULL);
rc = ops->cpuid(_regs._eax, _regs._ecx, &cpuid_leaf, ctxt);
generate_exception_if(rc == X86EMUL_EXCEPTION,
- EXC_GP, 0); /* CPUID Faulting? */
+ TRAP_gp_fault, 0); /* CPUID Faulting? */
if ( rc != X86EMUL_OKAY )
goto done;
_regs.r(ax) = cpuid_leaf.a;
@@ -5445,7 +5424,7 @@ x86_emulate(
break;
case X86EMUL_OPC(0x0f, 0xa3): bt: /* bt */
- generate_exception_if(lock_prefix, EXC_UD);
+ generate_exception_if(lock_prefix, TRAP_invalid_op);
emulate_2op_SrcV_nobyte("bt", src, dst, _regs._eflags);
dst.type = OP_NONE;
break;
@@ -5456,7 +5435,7 @@ x86_emulate(
case X86EMUL_OPC(0x0f, 0xad): /* shrd %%cl,r,r/m */ {
uint8_t shift, width = dst.bytes << 3;
- generate_exception_if(lock_prefix, EXC_UD);
+ generate_exception_if(lock_prefix, TRAP_invalid_op);
if ( b & 1 )
shift = _regs.cl;
else
@@ -5498,14 +5477,14 @@ x86_emulate(
{
case 5: /* lfence */
fail_if(modrm_mod != 3);
- generate_exception_if(vex.pfx, EXC_UD);
+ generate_exception_if(vex.pfx, TRAP_invalid_op);
vcpu_must_have(sse2);
asm volatile ( "lfence" ::: "memory" );
break;
case 6:
if ( modrm_mod == 3 ) /* mfence */
{
- generate_exception_if(vex.pfx, EXC_UD);
+ generate_exception_if(vex.pfx, TRAP_invalid_op);
vcpu_must_have(sse2);
asm volatile ( "mfence" ::: "memory" );
break;
@@ -5520,7 +5499,7 @@ x86_emulate(
case 7:
if ( modrm_mod == 3 ) /* sfence */
{
- generate_exception_if(vex.pfx, EXC_UD);
+ generate_exception_if(vex.pfx, TRAP_invalid_op);
vcpu_must_have(sse);
asm volatile ( "sfence" ::: "memory" );
break;
@@ -5541,11 +5520,11 @@ x86_emulate(
case X86EMUL_OPC_F3(0x0f, 0xae): /* Grp15 */
fail_if(modrm_mod != 3);
- generate_exception_if((modrm_reg & 4) || !mode_64bit(), EXC_UD);
+ generate_exception_if((modrm_reg & 4) || !mode_64bit(), TRAP_invalid_op);
fail_if(!ops->read_cr);
if ( (rc = ops->read_cr(4, &cr4, ctxt)) != X86EMUL_OKAY )
goto done;
- generate_exception_if(!(cr4 & X86_CR4_FSGSBASE), EXC_UD);
+ generate_exception_if(!(cr4 & X86_CR4_FSGSBASE), TRAP_invalid_op);
seg = modrm_reg & 1 ? x86_seg_gs : x86_seg_fs;
fail_if(!ops->read_segment);
if ( (rc = ops->read_segment(seg, &sreg, ctxt)) != X86EMUL_OKAY )
@@ -5565,7 +5544,7 @@ x86_emulate(
{
sreg.base = *dst.reg;
generate_exception_if(!is_canonical_address(sreg.base),
- EXC_GP, 0);
+ TRAP_gp_fault, 0);
}
else
sreg.base = (uint32_t)*dst.reg;
@@ -5634,7 +5613,7 @@ x86_emulate(
case 5: goto bts;
case 6: goto btr;
case 7: goto btc;
- default: generate_exception(EXC_UD);
+ default: generate_exception(TRAP_invalid_op);
}
break;
@@ -5746,7 +5725,7 @@ x86_emulate(
#ifdef HAVE_GAS_RDRAND
case 6: /* rdrand */
- generate_exception_if(rep_prefix(), EXC_UD);
+ generate_exception_if(rep_prefix(), TRAP_invalid_op);
host_and_vcpu_must_have(rdrand);
dst = ea;
switch ( op_bytes )
@@ -5777,7 +5756,7 @@ x86_emulate(
{
uint64_t tsc_aux;
- generate_exception_if(ea.type != OP_REG, EXC_UD);
+ generate_exception_if(ea.type != OP_REG, TRAP_invalid_op);
vcpu_must_have(rdpid);
fail_if(!ops->read_msr);
if ( (rc = ops->read_msr(MSR_TSC_AUX, &tsc_aux,
@@ -5789,7 +5768,7 @@ x86_emulate(
break;
}
#ifdef HAVE_GAS_RDSEED
- generate_exception_if(rep_prefix(), EXC_UD);
+ generate_exception_if(rep_prefix(), TRAP_invalid_op);
host_and_vcpu_must_have(rdseed);
dst = ea;
switch ( op_bytes )
@@ -5819,14 +5798,14 @@ x86_emulate(
}
/* cmpxchg8b/cmpxchg16b */
- generate_exception_if((modrm_reg & 7) != 1, EXC_UD);
+ generate_exception_if((modrm_reg & 7) != 1, TRAP_invalid_op);
fail_if(!ops->cmpxchg);
if ( rex_prefix & REX_W )
{
host_and_vcpu_must_have(cx16);
generate_exception_if(!is_aligned(ea.mem.seg, ea.mem.off, 16,
ctxt, ops),
- EXC_GP, 0);
+ TRAP_gp_fault, 0);
op_bytes = 16;
}
else
@@ -5976,7 +5955,7 @@ x86_emulate(
host_and_vcpu_must_have(bmi2);
else
host_and_vcpu_must_have(bmi1);
- generate_exception_if(vex.l, EXC_UD);
+ generate_exception_if(vex.l, TRAP_invalid_op);
buf[0] = 0xc4;
*pvex = vex;
@@ -6010,7 +5989,7 @@ x86_emulate(
goto cannot_emulate;
}
- generate_exception_if(vex.l, EXC_UD);
+ generate_exception_if(vex.l, TRAP_invalid_op);
buf[0] = 0xc4;
*pvex = vex;
@@ -6063,7 +6042,7 @@ x86_emulate(
case X86EMUL_OPC_VEX_F2(0x0f38, 0xf6): /* mulx r/m,r,r */
vcpu_must_have(bmi2);
- generate_exception_if(vex.l, EXC_UD);
+ generate_exception_if(vex.l, TRAP_invalid_op);
ea.reg = decode_vex_gpr(vex.reg, &_regs, ctxt);
if ( mode_64bit() && vex.w )
asm ( "mulq %3" : "=a" (*ea.reg), "=d" (dst.val)
@@ -6075,7 +6054,7 @@ x86_emulate(
case X86EMUL_OPC_VEX_F2(0x0f3a, 0xf0): /* rorx imm,r/m,r */
vcpu_must_have(bmi2);
- generate_exception_if(vex.l || vex.reg != 0xf, EXC_UD);
+ generate_exception_if(vex.l || vex.reg != 0xf, TRAP_invalid_op);
if ( ea.type == OP_REG )
src.val = *ea.reg;
else if ( (rc = read_ulong(ea.mem.seg, ea.mem.off, &src.val, op_bytes,
@@ -6108,7 +6087,7 @@ x86_emulate(
uint8_t *buf = get_stub(stub);
typeof(vex) *pxop = container_of(buf + 1, typeof(vex), raw[0]);
- generate_exception_if(vex.l, EXC_UD);
+ generate_exception_if(vex.l, TRAP_invalid_op);
buf[0] = 0x8f;
*pxop = vex;
@@ -6142,7 +6121,7 @@ x86_emulate(
typeof(vex) *pxop = container_of(buf + 1, typeof(vex), raw[0]);
host_and_vcpu_must_have(tbm);
- generate_exception_if(vex.l || vex.reg != 0xf, EXC_UD);
+ generate_exception_if(vex.l || vex.reg != 0xf, TRAP_invalid_op);
if ( ea.type == OP_REG )
src.val = *ea.reg;
--
2.11.0
_______________________________________________
Xen-devel mailing list
Xen-devel@lists.xen.org
https://lists.xen.org/xen-devel
next prev parent reply other threads:[~2017-01-31 11:08 UTC|newest]
Thread overview: 37+ messages / expand[flat|nested] mbox.gz Atom feed top
2017-01-31 11:07 [PATCH v2 00/12] fuzz: update x86emul fuzzer Wei Liu
2017-01-31 11:07 ` [PATCH v2 01/12] fuzz: don't buffer stdout in afl stubs Wei Liu
2017-01-31 12:44 ` Jan Beulich
2017-01-31 11:07 ` [PATCH v2 02/12] x86: extract macros to x86-defns.h Wei Liu
2017-01-31 12:45 ` Jan Beulich
2017-01-31 13:28 ` Wei Liu
2017-01-31 11:08 ` [PATCH v2 03/12] x86: extract vendor numeric id to x86-vendors.h Wei Liu
2017-01-31 12:48 ` Jan Beulich
2017-01-31 11:08 ` [PATCH v2 04/12] x86emul/test: use x86-vendors.h Wei Liu
2017-01-31 12:50 ` Jan Beulich
2017-01-31 14:36 ` Wei Liu
2017-01-31 15:16 ` Jan Beulich
2017-01-31 15:16 ` Wei Liu
2017-01-31 11:08 ` [PATCH v2 05/12] x86emul: use eflags definitions in x86-defns.h Wei Liu
2017-01-31 12:56 ` Jan Beulich
2017-01-31 14:55 ` Wei Liu
2017-01-31 15:16 ` Jan Beulich
2017-01-31 11:08 ` [PATCH v2 06/12] x86emul: use msr definitions in msr-index.h Wei Liu
2017-01-31 12:59 ` Jan Beulich
2017-01-31 11:08 ` [PATCH v2 07/12] x86: add UMIP CR4 bit Wei Liu
2017-01-31 13:00 ` Jan Beulich
2017-01-31 11:08 ` [PATCH v2 08/12] x86emul: use CR definitions in x86-defns.h Wei Liu
2017-01-31 13:01 ` Jan Beulich
2017-01-31 11:08 ` Wei Liu [this message]
2017-01-31 11:26 ` [PATCH v2 09/12] x86emul: use TRAP " Andrew Cooper
2017-01-31 11:08 ` [PATCH v2 10/12] fuzz/x86emul: update fuzzer Wei Liu
2017-01-31 13:33 ` Jan Beulich
2017-01-31 15:51 ` Wei Liu
2017-01-31 15:57 ` Andrew Cooper
2017-01-31 16:01 ` George Dunlap
2017-01-31 16:05 ` Jan Beulich
2017-01-31 16:02 ` Jan Beulich
2017-01-31 17:37 ` Wei Liu
2017-01-31 11:08 ` [PATCH v2 11/12] fuzz/x86emul: print out minimal input size Wei Liu
2017-01-31 13:33 ` Jan Beulich
2017-01-31 11:08 ` [PATCH v2 12/12] fuzz: update README.afl example Wei Liu
2017-01-31 13:34 ` Jan Beulich
Reply instructions:
You may reply publicly to this message via plain-text email
using any one of the following methods:
* Save the following mbox file, import it into your mail client,
and reply-to-all from there: mbox
Avoid top-posting and favor interleaved quoting:
https://en.wikipedia.org/wiki/Posting_style#Interleaved_style
* Reply using the --to, --cc, and --in-reply-to
switches of git-send-email(1):
git send-email \
--in-reply-to=20170131110809.30001-10-wei.liu2@citrix.com \
--to=wei.liu2@citrix.com \
--cc=andrew.cooper3@citrix.com \
--cc=jbeulich@suse.com \
--cc=xen-devel@lists.xenproject.org \
/path/to/YOUR_REPLY
https://kernel.org/pub/software/scm/git/docs/git-send-email.html
* If your mail client supports setting the In-Reply-To header
via mailto: links, try the mailto: link
Be sure your reply has a Subject: header at the top and a blank line
before the message body.
This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox;
as well as URLs for NNTP newsgroup(s).