1 /******************************************************************************
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
6 * Copyright (c) 2005 Keir Fraser
8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
9 * privileged instructions:
11 * Copyright (C) 2006 Qumranet
13 * Avi Kivity <avi@qumranet.com>
14 * Yaniv Kamay <yaniv@qumranet.com>
16 * This work is licensed under the terms of the GNU GPL, version 2. See
17 * the COPYING file in the top-level directory.
19 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
25 #include <public/xen.h>
26 #define DPRINTF(_f, _a ...) printf( _f , ## _a )
29 #define DPRINTF(x...) do {} while (0)
31 #include "x86_emulate.h"
32 #include <linux/module.h>
35 * Opcode effective-address decode tables.
36 * Note that we only emulate instructions that have at least one memory
37 * operand (excluding implicit stack references). We assume that stack
38 * references and instruction fetches will never occur in special memory
39 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
43 /* Operand sizes: 8-bit operands or specified/overridden size. */
44 #define ByteOp (1<<0) /* 8-bit operands. */
45 /* Destination operand type. */
46 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
47 #define DstReg (2<<1) /* Register operand. */
48 #define DstMem (3<<1) /* Memory operand. */
49 #define DstMask (3<<1)
50 /* Source operand type. */
51 #define SrcNone (0<<3) /* No source operand. */
52 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
53 #define SrcReg (1<<3) /* Register operand. */
54 #define SrcMem (2<<3) /* Memory operand. */
55 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
56 #define SrcMem32 (4<<3) /* Memory operand (32-bit). */
57 #define SrcImm (5<<3) /* Immediate operand. */
58 #define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */
59 #define SrcMask (7<<3)
60 /* Generic ModRM decode. */
62 /* Destination is only written; never read. */
66 static u8 opcode_table[256] = {
68 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
69 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
72 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
73 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
76 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
77 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
80 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
81 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
84 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
85 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
86 SrcImmByte, SrcImm, 0, 0,
88 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
89 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
92 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
93 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
96 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
97 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
100 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
102 0, 0, 0, 0, 0, 0, 0, 0,
104 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
105 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
107 0, 0, 0, DstReg | SrcMem32 | ModRM | Mov /* movsxd (x86/64) */ ,
108 0, 0, 0, 0, 0, 0, 0, 0,
110 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* insb, insw/insd */
111 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* outsb, outsw/outsd */
113 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
115 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
116 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
117 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
118 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
120 ByteOp | DstMem | SrcReg | ModRM | Mov, DstMem | SrcReg | ModRM | Mov,
121 ByteOp | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
122 0, 0, 0, DstMem | SrcNone | ModRM | Mov,
124 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
126 ByteOp | DstReg | SrcMem | Mov, DstReg | SrcMem | Mov,
127 ByteOp | DstMem | SrcReg | Mov, DstMem | SrcReg | Mov,
128 ByteOp | ImplicitOps | Mov, ImplicitOps | Mov,
129 ByteOp | ImplicitOps, ImplicitOps,
131 0, 0, ByteOp | ImplicitOps | Mov, ImplicitOps | Mov,
132 ByteOp | ImplicitOps | Mov, ImplicitOps | Mov,
133 ByteOp | ImplicitOps, ImplicitOps,
135 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
137 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
138 0, ImplicitOps, 0, 0,
139 ByteOp | DstMem | SrcImm | ModRM | Mov, DstMem | SrcImm | ModRM | Mov,
141 0, 0, 0, 0, 0, 0, 0, 0,
143 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
144 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
147 0, 0, 0, 0, 0, 0, 0, 0,
149 0, 0, 0, 0, 0, 0, 0, 0,
151 0, SrcImm|ImplicitOps, 0, SrcImmByte|ImplicitOps, 0, 0, 0, 0,
155 ByteOp | DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM,
158 0, 0, ByteOp | DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM
161 static u16 twobyte_table[256] = {
163 0, SrcMem | ModRM | DstReg, 0, 0, 0, 0, ImplicitOps, 0,
164 0, ImplicitOps, 0, 0, 0, ImplicitOps | ModRM, 0, 0,
166 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0,
168 ModRM | ImplicitOps, ModRM, ModRM | ImplicitOps, ModRM, 0, 0, 0, 0,
169 0, 0, 0, 0, 0, 0, 0, 0,
171 ImplicitOps, 0, ImplicitOps, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
173 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
174 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
175 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
176 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
178 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
179 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
180 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
181 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
183 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
185 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
187 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
189 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
191 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
193 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0,
195 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0,
197 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, 0,
198 DstMem | SrcReg | ModRM | BitOp,
199 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
200 DstReg | SrcMem16 | ModRM | Mov,
202 0, 0, DstMem | SrcImmByte | ModRM, DstMem | SrcReg | ModRM | BitOp,
203 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
204 DstReg | SrcMem16 | ModRM | Mov,
206 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0, 0,
208 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
210 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
212 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
216 * Tell the emulator that of the Group 7 instructions (sgdt, lidt, etc.) we
217 * are interested only in invlpg and not in any of the rest.
219 * invlpg is a special instruction in that the data it references may not
222 void kvm_emulator_want_group7_invlpg(void)
224 twobyte_table[1] &= ~SrcMem;
226 EXPORT_SYMBOL_GPL(kvm_emulator_want_group7_invlpg);
228 /* Type, address-of, and value of an instruction's operand. */
230 enum { OP_REG, OP_MEM, OP_IMM } type;
232 unsigned long val, orig_val, *ptr;
235 /* EFLAGS bit definitions. */
236 #define EFLG_OF (1<<11)
237 #define EFLG_DF (1<<10)
238 #define EFLG_SF (1<<7)
239 #define EFLG_ZF (1<<6)
240 #define EFLG_AF (1<<4)
241 #define EFLG_PF (1<<2)
242 #define EFLG_CF (1<<0)
245 * Instruction emulation:
246 * Most instructions are emulated directly via a fragment of inline assembly
247 * code. This allows us to save/restore EFLAGS and thus very easily pick up
248 * any modified flags.
251 #if defined(CONFIG_X86_64)
252 #define _LO32 "k" /* force 32-bit operand */
253 #define _STK "%%rsp" /* stack pointer */
254 #elif defined(__i386__)
255 #define _LO32 "" /* force 32-bit operand */
256 #define _STK "%%esp" /* stack pointer */
260 * These EFLAGS bits are restored from saved value during emulation, and
261 * any changes are written back to the saved value after emulation.
263 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
265 /* Before executing instruction: restore necessary bits in EFLAGS. */
266 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
267 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); */ \
269 "movl %"_msk",%"_LO32 _tmp"; " \
270 "andl %"_LO32 _tmp",("_STK"); " \
272 "notl %"_LO32 _tmp"; " \
273 "andl %"_LO32 _tmp",("_STK"); " \
275 "orl %"_LO32 _tmp",("_STK"); " \
277 /* _sav &= ~msk; */ \
278 "movl %"_msk",%"_LO32 _tmp"; " \
279 "notl %"_LO32 _tmp"; " \
280 "andl %"_LO32 _tmp",%"_sav"; "
282 /* After executing instruction: write-back necessary bits in EFLAGS. */
283 #define _POST_EFLAGS(_sav, _msk, _tmp) \
284 /* _sav |= EFLAGS & _msk; */ \
287 "andl %"_msk",%"_LO32 _tmp"; " \
288 "orl %"_LO32 _tmp",%"_sav"; "
290 /* Raw emulation: instruction has two explicit operands. */
291 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
293 unsigned long _tmp; \
295 switch ((_dst).bytes) { \
297 __asm__ __volatile__ ( \
298 _PRE_EFLAGS("0","4","2") \
299 _op"w %"_wx"3,%1; " \
300 _POST_EFLAGS("0","4","2") \
301 : "=m" (_eflags), "=m" ((_dst).val), \
303 : _wy ((_src).val), "i" (EFLAGS_MASK) ); \
306 __asm__ __volatile__ ( \
307 _PRE_EFLAGS("0","4","2") \
308 _op"l %"_lx"3,%1; " \
309 _POST_EFLAGS("0","4","2") \
310 : "=m" (_eflags), "=m" ((_dst).val), \
312 : _ly ((_src).val), "i" (EFLAGS_MASK) ); \
315 __emulate_2op_8byte(_op, _src, _dst, \
316 _eflags, _qx, _qy); \
321 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
323 unsigned long _tmp; \
324 switch ( (_dst).bytes ) \
327 __asm__ __volatile__ ( \
328 _PRE_EFLAGS("0","4","2") \
329 _op"b %"_bx"3,%1; " \
330 _POST_EFLAGS("0","4","2") \
331 : "=m" (_eflags), "=m" ((_dst).val), \
333 : _by ((_src).val), "i" (EFLAGS_MASK) ); \
336 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
337 _wx, _wy, _lx, _ly, _qx, _qy); \
342 /* Source operand is byte-sized and may be restricted to just %cl. */
343 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
344 __emulate_2op(_op, _src, _dst, _eflags, \
345 "b", "c", "b", "c", "b", "c", "b", "c")
347 /* Source operand is byte, word, long or quad sized. */
348 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
349 __emulate_2op(_op, _src, _dst, _eflags, \
350 "b", "q", "w", "r", _LO32, "r", "", "r")
352 /* Source operand is word, long or quad sized. */
353 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
354 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
355 "w", "r", _LO32, "r", "", "r")
357 /* Instruction has only one explicit operand (no source operand). */
358 #define emulate_1op(_op, _dst, _eflags) \
360 unsigned long _tmp; \
362 switch ( (_dst).bytes ) \
365 __asm__ __volatile__ ( \
366 _PRE_EFLAGS("0","3","2") \
368 _POST_EFLAGS("0","3","2") \
369 : "=m" (_eflags), "=m" ((_dst).val), \
371 : "i" (EFLAGS_MASK) ); \
374 __asm__ __volatile__ ( \
375 _PRE_EFLAGS("0","3","2") \
377 _POST_EFLAGS("0","3","2") \
378 : "=m" (_eflags), "=m" ((_dst).val), \
380 : "i" (EFLAGS_MASK) ); \
383 __asm__ __volatile__ ( \
384 _PRE_EFLAGS("0","3","2") \
386 _POST_EFLAGS("0","3","2") \
387 : "=m" (_eflags), "=m" ((_dst).val), \
389 : "i" (EFLAGS_MASK) ); \
392 __emulate_1op_8byte(_op, _dst, _eflags); \
397 /* Emulate an instruction with quadword operands (x86/64 only). */
398 #if defined(CONFIG_X86_64)
399 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
401 __asm__ __volatile__ ( \
402 _PRE_EFLAGS("0","4","2") \
403 _op"q %"_qx"3,%1; " \
404 _POST_EFLAGS("0","4","2") \
405 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
406 : _qy ((_src).val), "i" (EFLAGS_MASK) ); \
409 #define __emulate_1op_8byte(_op, _dst, _eflags) \
411 __asm__ __volatile__ ( \
412 _PRE_EFLAGS("0","3","2") \
414 _POST_EFLAGS("0","3","2") \
415 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
416 : "i" (EFLAGS_MASK) ); \
419 #elif defined(__i386__)
420 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
421 #define __emulate_1op_8byte(_op, _dst, _eflags)
422 #endif /* __i386__ */
424 /* Fetch next part of the instruction being emulated. */
425 #define insn_fetch(_type, _size, _eip) \
426 ({ unsigned long _x; \
427 rc = ops->read_std((unsigned long)(_eip) + ctxt->cs_base, &_x, \
428 (_size), ctxt->vcpu); \
435 /* Access/update address held in a register, based on addressing mode. */
436 #define address_mask(reg) \
437 ((ad_bytes == sizeof(unsigned long)) ? \
438 (reg) : ((reg) & ((1UL << (ad_bytes << 3)) - 1)))
439 #define register_address(base, reg) \
440 ((base) + address_mask(reg))
441 #define register_address_increment(reg, inc) \
443 /* signed type ensures sign extension to long */ \
445 if ( ad_bytes == sizeof(unsigned long) ) \
448 (reg) = ((reg) & ~((1UL << (ad_bytes << 3)) - 1)) | \
449 (((reg) + _inc) & ((1UL << (ad_bytes << 3)) - 1)); \
452 #define JMP_REL(rel) \
454 _eip += (int)(rel); \
455 _eip = ((op_bytes == 2) ? (uint16_t)_eip : (uint32_t)_eip); \
459 * Given the 'reg' portion of a ModRM byte, and a register block, return a
460 * pointer into the block that addresses the relevant register.
461 * @highbyte_regs specifies whether to decode AH,CH,DH,BH.
463 static void *decode_register(u8 modrm_reg, unsigned long *regs,
468 p = ®s[modrm_reg];
469 if (highbyte_regs && modrm_reg >= 4 && modrm_reg < 8)
470 p = (unsigned char *)®s[modrm_reg & 3] + 1;
474 static int read_descriptor(struct x86_emulate_ctxt *ctxt,
475 struct x86_emulate_ops *ops,
477 u16 *size, unsigned long *address, int op_bytes)
484 rc = ops->read_std((unsigned long)ptr, (unsigned long *)size, 2,
488 rc = ops->read_std((unsigned long)ptr + 2, address, op_bytes,
494 x86_emulate_memop(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
497 u8 b, sib, twobyte = 0, rex_prefix = 0;
498 u8 modrm, modrm_mod = 0, modrm_reg = 0, modrm_rm = 0;
499 unsigned long *override_base = NULL;
500 unsigned int op_bytes, ad_bytes, lock_prefix = 0, rep_prefix = 0, i;
502 struct operand src, dst;
503 unsigned long cr2 = ctxt->cr2;
504 int mode = ctxt->mode;
505 unsigned long modrm_ea;
506 int use_modrm_ea, index_reg = 0, base_reg = 0, scale, rip_relative = 0;
510 /* Shadow copy of register state. Committed on successful emulation. */
511 unsigned long _regs[NR_VCPU_REGS];
512 unsigned long _eip = ctxt->vcpu->rip, _eflags = ctxt->eflags;
513 unsigned long modrm_val = 0;
515 memcpy(_regs, ctxt->vcpu->regs, sizeof _regs);
518 case X86EMUL_MODE_REAL:
519 case X86EMUL_MODE_PROT16:
520 op_bytes = ad_bytes = 2;
522 case X86EMUL_MODE_PROT32:
523 op_bytes = ad_bytes = 4;
526 case X86EMUL_MODE_PROT64:
535 /* Legacy prefixes. */
536 for (i = 0; i < 8; i++) {
537 switch (b = insn_fetch(u8, 1, _eip)) {
538 case 0x66: /* operand-size override */
539 op_bytes ^= 6; /* switch between 2/4 bytes */
541 case 0x67: /* address-size override */
542 if (mode == X86EMUL_MODE_PROT64)
543 ad_bytes ^= 12; /* switch between 4/8 bytes */
545 ad_bytes ^= 6; /* switch between 2/4 bytes */
547 case 0x2e: /* CS override */
548 override_base = &ctxt->cs_base;
550 case 0x3e: /* DS override */
551 override_base = &ctxt->ds_base;
553 case 0x26: /* ES override */
554 override_base = &ctxt->es_base;
556 case 0x64: /* FS override */
557 override_base = &ctxt->fs_base;
559 case 0x65: /* GS override */
560 override_base = &ctxt->gs_base;
562 case 0x36: /* SS override */
563 override_base = &ctxt->ss_base;
565 case 0xf0: /* LOCK */
568 case 0xf3: /* REP/REPE/REPZ */
571 case 0xf2: /* REPNE/REPNZ */
581 if ((mode == X86EMUL_MODE_PROT64) && ((b & 0xf0) == 0x40)) {
584 op_bytes = 8; /* REX.W */
585 modrm_reg = (b & 4) << 1; /* REX.R */
586 index_reg = (b & 2) << 2; /* REX.X */
587 modrm_rm = base_reg = (b & 1) << 3; /* REG.B */
588 b = insn_fetch(u8, 1, _eip);
591 /* Opcode byte(s). */
594 /* Two-byte opcode? */
597 b = insn_fetch(u8, 1, _eip);
598 d = twobyte_table[b];
606 /* ModRM and SIB bytes. */
608 modrm = insn_fetch(u8, 1, _eip);
609 modrm_mod |= (modrm & 0xc0) >> 6;
610 modrm_reg |= (modrm & 0x38) >> 3;
611 modrm_rm |= (modrm & 0x07);
615 if (modrm_mod == 3) {
616 modrm_val = *(unsigned long *)
617 decode_register(modrm_rm, _regs, d & ByteOp);
622 unsigned bx = _regs[VCPU_REGS_RBX];
623 unsigned bp = _regs[VCPU_REGS_RBP];
624 unsigned si = _regs[VCPU_REGS_RSI];
625 unsigned di = _regs[VCPU_REGS_RDI];
627 /* 16-bit ModR/M decode. */
631 modrm_ea += insn_fetch(u16, 2, _eip);
634 modrm_ea += insn_fetch(s8, 1, _eip);
637 modrm_ea += insn_fetch(u16, 2, _eip);
667 if (modrm_rm == 2 || modrm_rm == 3 ||
668 (modrm_rm == 6 && modrm_mod != 0))
670 override_base = &ctxt->ss_base;
671 modrm_ea = (u16)modrm_ea;
673 /* 32/64-bit ModR/M decode. */
677 sib = insn_fetch(u8, 1, _eip);
678 index_reg |= (sib >> 3) & 7;
685 modrm_ea += _regs[base_reg];
687 modrm_ea += insn_fetch(s32, 4, _eip);
690 modrm_ea += _regs[base_reg];
696 modrm_ea += _regs[index_reg] << scale;
702 modrm_ea += _regs[modrm_rm];
703 else if (mode == X86EMUL_MODE_PROT64)
707 modrm_ea += _regs[modrm_rm];
713 modrm_ea += insn_fetch(s32, 4, _eip);
716 modrm_ea += insn_fetch(s8, 1, _eip);
719 modrm_ea += insn_fetch(s32, 4, _eip);
724 override_base = &ctxt->ds_base;
725 if (mode == X86EMUL_MODE_PROT64 &&
726 override_base != &ctxt->fs_base &&
727 override_base != &ctxt->gs_base)
728 override_base = NULL;
731 modrm_ea += *override_base;
735 switch (d & SrcMask) {
746 modrm_ea += op_bytes;
750 modrm_ea = (u32)modrm_ea;
757 * Decode and fetch the source operand: register, memory
760 switch (d & SrcMask) {
766 src.ptr = decode_register(modrm_reg, _regs,
768 src.val = src.orig_val = *(u8 *) src.ptr;
771 src.ptr = decode_register(modrm_reg, _regs, 0);
772 switch ((src.bytes = op_bytes)) {
774 src.val = src.orig_val = *(u16 *) src.ptr;
777 src.val = src.orig_val = *(u32 *) src.ptr;
780 src.val = src.orig_val = *(u64 *) src.ptr;
792 src.bytes = (d & ByteOp) ? 1 : op_bytes;
795 src.ptr = (unsigned long *)cr2;
796 if ((rc = ops->read_emulated((unsigned long)src.ptr,
797 &src.val, src.bytes, ctxt->vcpu)) != 0)
799 src.orig_val = src.val;
803 src.ptr = (unsigned long *)_eip;
804 src.bytes = (d & ByteOp) ? 1 : op_bytes;
807 /* NB. Immediates are sign-extended as necessary. */
810 src.val = insn_fetch(s8, 1, _eip);
813 src.val = insn_fetch(s16, 2, _eip);
816 src.val = insn_fetch(s32, 4, _eip);
822 src.ptr = (unsigned long *)_eip;
824 src.val = insn_fetch(s8, 1, _eip);
828 /* Decode and fetch the destination operand: register or memory. */
829 switch (d & DstMask) {
831 /* Special instructions do their own operand decoding. */
836 && !(twobyte && (b == 0xb6 || b == 0xb7))) {
837 dst.ptr = decode_register(modrm_reg, _regs,
839 dst.val = *(u8 *) dst.ptr;
842 dst.ptr = decode_register(modrm_reg, _regs, 0);
843 switch ((dst.bytes = op_bytes)) {
845 dst.val = *(u16 *)dst.ptr;
848 dst.val = *(u32 *)dst.ptr;
851 dst.val = *(u64 *)dst.ptr;
858 dst.ptr = (unsigned long *)cr2;
859 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
861 unsigned long mask = ~(dst.bytes * 8 - 1);
863 dst.ptr = (void *)dst.ptr + (src.val & mask) / 8;
865 if (!(d & Mov) && /* optimisation - avoid slow emulated read */
866 ((rc = ops->read_emulated((unsigned long)dst.ptr,
867 &dst.val, dst.bytes, ctxt->vcpu)) != 0))
871 dst.orig_val = dst.val;
879 emulate_2op_SrcV("add", src, dst, _eflags);
883 emulate_2op_SrcV("or", src, dst, _eflags);
887 emulate_2op_SrcV("adc", src, dst, _eflags);
891 emulate_2op_SrcV("sbb", src, dst, _eflags);
895 emulate_2op_SrcV("and", src, dst, _eflags);
897 case 0x24: /* and al imm8 */
899 dst.ptr = &_regs[VCPU_REGS_RAX];
900 dst.val = *(u8 *)dst.ptr;
902 dst.orig_val = dst.val;
904 case 0x25: /* and ax imm16, or eax imm32 */
906 dst.bytes = op_bytes;
907 dst.ptr = &_regs[VCPU_REGS_RAX];
909 dst.val = *(u16 *)dst.ptr;
911 dst.val = *(u32 *)dst.ptr;
912 dst.orig_val = dst.val;
916 emulate_2op_SrcV("sub", src, dst, _eflags);
920 emulate_2op_SrcV("xor", src, dst, _eflags);
924 emulate_2op_SrcV("cmp", src, dst, _eflags);
926 case 0x63: /* movsxd */
927 if (mode != X86EMUL_MODE_PROT64)
929 dst.val = (s32) src.val;
931 case 0x80 ... 0x83: /* Grp1 */
953 emulate_2op_SrcV("test", src, dst, _eflags);
955 case 0x86 ... 0x87: /* xchg */
956 /* Write back the register source. */
959 *(u8 *) src.ptr = (u8) dst.val;
962 *(u16 *) src.ptr = (u16) dst.val;
965 *src.ptr = (u32) dst.val;
966 break; /* 64b reg: zero-extend */
972 * Write back the memory destination with implicit LOCK
978 case 0xa0 ... 0xa1: /* mov */
979 dst.ptr = (unsigned long *)&_regs[VCPU_REGS_RAX];
981 _eip += ad_bytes; /* skip src displacement */
983 case 0xa2 ... 0xa3: /* mov */
984 dst.val = (unsigned long)_regs[VCPU_REGS_RAX];
985 _eip += ad_bytes; /* skip dst displacement */
987 case 0x88 ... 0x8b: /* mov */
988 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
991 case 0x8f: /* pop (sole member of Grp1a) */
992 /* 64-bit mode: POP always pops a 64-bit operand. */
993 if (mode == X86EMUL_MODE_PROT64)
995 if ((rc = ops->read_std(register_address(ctxt->ss_base,
996 _regs[VCPU_REGS_RSP]),
997 &dst.val, dst.bytes, ctxt->vcpu)) != 0)
999 register_address_increment(_regs[VCPU_REGS_RSP], dst.bytes);
1003 switch (modrm_reg) {
1005 emulate_2op_SrcB("rol", src, dst, _eflags);
1008 emulate_2op_SrcB("ror", src, dst, _eflags);
1011 emulate_2op_SrcB("rcl", src, dst, _eflags);
1014 emulate_2op_SrcB("rcr", src, dst, _eflags);
1016 case 4: /* sal/shl */
1017 case 6: /* sal/shl */
1018 emulate_2op_SrcB("sal", src, dst, _eflags);
1021 emulate_2op_SrcB("shr", src, dst, _eflags);
1024 emulate_2op_SrcB("sar", src, dst, _eflags);
1028 case 0xd0 ... 0xd1: /* Grp2 */
1031 case 0xd2 ... 0xd3: /* Grp2 */
1032 src.val = _regs[VCPU_REGS_RCX];
1034 case 0xe9: /* jmp rel */
1035 case 0xeb: /* jmp rel short */
1037 no_wb = 1; /* Disable writeback. */
1039 case 0xf6 ... 0xf7: /* Grp3 */
1040 switch (modrm_reg) {
1041 case 0 ... 1: /* test */
1043 * Special case in Grp3: test has an immediate
1047 src.ptr = (unsigned long *)_eip;
1048 src.bytes = (d & ByteOp) ? 1 : op_bytes;
1051 switch (src.bytes) {
1053 src.val = insn_fetch(s8, 1, _eip);
1056 src.val = insn_fetch(s16, 2, _eip);
1059 src.val = insn_fetch(s32, 4, _eip);
1067 emulate_1op("neg", dst, _eflags);
1070 goto cannot_emulate;
1073 case 0xfe ... 0xff: /* Grp4/Grp5 */
1074 switch (modrm_reg) {
1076 emulate_1op("inc", dst, _eflags);
1079 emulate_1op("dec", dst, _eflags);
1082 /* 64-bit mode: PUSH always pushes a 64-bit operand. */
1083 if (mode == X86EMUL_MODE_PROT64) {
1085 if ((rc = ops->read_std((unsigned long)dst.ptr,
1090 register_address_increment(_regs[VCPU_REGS_RSP],
1092 if ((rc = ops->write_std(
1093 register_address(ctxt->ss_base,
1094 _regs[VCPU_REGS_RSP]),
1095 &dst.val, dst.bytes, ctxt->vcpu)) != 0)
1100 goto cannot_emulate;
1109 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
1110 switch (dst.bytes) {
1112 *(u8 *)dst.ptr = (u8)dst.val;
1115 *(u16 *)dst.ptr = (u16)dst.val;
1118 *dst.ptr = (u32)dst.val;
1119 break; /* 64b: zero-ext */
1127 rc = ops->cmpxchg_emulated((unsigned long)dst.
1129 &dst.val, dst.bytes,
1132 rc = ops->write_emulated((unsigned long)dst.ptr,
1133 &dst.val, dst.bytes,
1142 /* Commit shadow register state. */
1143 memcpy(ctxt->vcpu->regs, _regs, sizeof _regs);
1144 ctxt->eflags = _eflags;
1145 ctxt->vcpu->rip = _eip;
1148 return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
1152 goto twobyte_special_insn;
1154 case 0x6c: /* insb */
1155 case 0x6d: /* insw/insd */
1156 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1158 (d & ByteOp) ? 1 : op_bytes, /* size */
1160 address_mask(_regs[VCPU_REGS_RCX]) : 1, /* count */
1161 (_eflags & EFLG_DF), /* down */
1162 register_address(ctxt->es_base,
1163 _regs[VCPU_REGS_RDI]), /* address */
1165 _regs[VCPU_REGS_RDX] /* port */
1169 case 0x6e: /* outsb */
1170 case 0x6f: /* outsw/outsd */
1171 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1173 (d & ByteOp) ? 1 : op_bytes, /* size */
1175 address_mask(_regs[VCPU_REGS_RCX]) : 1, /* count */
1176 (_eflags & EFLG_DF), /* down */
1177 register_address(override_base ?
1178 *override_base : ctxt->ds_base,
1179 _regs[VCPU_REGS_RSI]), /* address */
1181 _regs[VCPU_REGS_RDX] /* port */
1187 if (_regs[VCPU_REGS_RCX] == 0) {
1188 ctxt->vcpu->rip = _eip;
1191 _regs[VCPU_REGS_RCX]--;
1192 _eip = ctxt->vcpu->rip;
1195 case 0xa4 ... 0xa5: /* movs */
1197 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1198 dst.ptr = (unsigned long *)register_address(ctxt->es_base,
1199 _regs[VCPU_REGS_RDI]);
1200 if ((rc = ops->read_emulated(register_address(
1201 override_base ? *override_base : ctxt->ds_base,
1202 _regs[VCPU_REGS_RSI]), &dst.val, dst.bytes, ctxt->vcpu)) != 0)
1204 register_address_increment(_regs[VCPU_REGS_RSI],
1205 (_eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1206 register_address_increment(_regs[VCPU_REGS_RDI],
1207 (_eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1209 case 0xa6 ... 0xa7: /* cmps */
1210 DPRINTF("Urk! I don't handle CMPS.\n");
1211 goto cannot_emulate;
1212 case 0xaa ... 0xab: /* stos */
1214 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1215 dst.ptr = (unsigned long *)cr2;
1216 dst.val = _regs[VCPU_REGS_RAX];
1217 register_address_increment(_regs[VCPU_REGS_RDI],
1218 (_eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1220 case 0xac ... 0xad: /* lods */
1222 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1223 dst.ptr = (unsigned long *)&_regs[VCPU_REGS_RAX];
1224 if ((rc = ops->read_emulated(cr2, &dst.val, dst.bytes,
1227 register_address_increment(_regs[VCPU_REGS_RSI],
1228 (_eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1230 case 0xae ... 0xaf: /* scas */
1231 DPRINTF("Urk! I don't handle SCAS.\n");
1232 goto cannot_emulate;
1233 case 0xf4: /* hlt */
1234 ctxt->vcpu->halt_request = 1;
1236 case 0xc3: /* ret */
1238 goto pop_instruction;
1239 case 0x58 ... 0x5f: /* pop reg */
1240 dst.ptr = (unsigned long *)&_regs[b & 0x7];
1243 if ((rc = ops->read_std(register_address(ctxt->ss_base,
1244 _regs[VCPU_REGS_RSP]), dst.ptr, op_bytes, ctxt->vcpu))
1248 register_address_increment(_regs[VCPU_REGS_RSP], op_bytes);
1249 no_wb = 1; /* Disable writeback. */
1256 case 0x01: /* lgdt, lidt, lmsw */
1257 /* Disable writeback. */
1259 switch (modrm_reg) {
1261 unsigned long address;
1264 rc = read_descriptor(ctxt, ops, src.ptr,
1265 &size, &address, op_bytes);
1268 realmode_lgdt(ctxt->vcpu, size, address);
1271 rc = read_descriptor(ctxt, ops, src.ptr,
1272 &size, &address, op_bytes);
1275 realmode_lidt(ctxt->vcpu, size, address);
1279 goto cannot_emulate;
1280 *(u16 *)&_regs[modrm_rm]
1281 = realmode_get_cr(ctxt->vcpu, 0);
1285 goto cannot_emulate;
1286 realmode_lmsw(ctxt->vcpu, (u16)modrm_val, &_eflags);
1289 emulate_invlpg(ctxt->vcpu, cr2);
1292 goto cannot_emulate;
1295 case 0x21: /* mov from dr to reg */
1298 goto cannot_emulate;
1299 rc = emulator_get_dr(ctxt, modrm_reg, &_regs[modrm_rm]);
1301 case 0x23: /* mov from reg to dr */
1304 goto cannot_emulate;
1305 rc = emulator_set_dr(ctxt, modrm_reg, _regs[modrm_rm]);
1307 case 0x40 ... 0x4f: /* cmov */
1308 dst.val = dst.orig_val = src.val;
1311 * First, assume we're decoding an even cmov opcode
1314 switch ((b & 15) >> 1) {
1316 no_wb = (_eflags & EFLG_OF) ? 0 : 1;
1318 case 1: /* cmovb/cmovc/cmovnae */
1319 no_wb = (_eflags & EFLG_CF) ? 0 : 1;
1321 case 2: /* cmovz/cmove */
1322 no_wb = (_eflags & EFLG_ZF) ? 0 : 1;
1324 case 3: /* cmovbe/cmovna */
1325 no_wb = (_eflags & (EFLG_CF | EFLG_ZF)) ? 0 : 1;
1328 no_wb = (_eflags & EFLG_SF) ? 0 : 1;
1330 case 5: /* cmovp/cmovpe */
1331 no_wb = (_eflags & EFLG_PF) ? 0 : 1;
1333 case 7: /* cmovle/cmovng */
1334 no_wb = (_eflags & EFLG_ZF) ? 0 : 1;
1336 case 6: /* cmovl/cmovnge */
1337 no_wb &= (!(_eflags & EFLG_SF) !=
1338 !(_eflags & EFLG_OF)) ? 0 : 1;
1341 /* Odd cmov opcodes (lsb == 1) have inverted sense. */
1344 case 0xb0 ... 0xb1: /* cmpxchg */
1346 * Save real source value, then compare EAX against
1349 src.orig_val = src.val;
1350 src.val = _regs[VCPU_REGS_RAX];
1351 emulate_2op_SrcV("cmp", src, dst, _eflags);
1352 if (_eflags & EFLG_ZF) {
1353 /* Success: write back to memory. */
1354 dst.val = src.orig_val;
1356 /* Failure: write the value we saw to EAX. */
1358 dst.ptr = (unsigned long *)&_regs[VCPU_REGS_RAX];
1363 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1364 emulate_2op_SrcV_nobyte("bt", src, dst, _eflags);
1368 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1369 emulate_2op_SrcV_nobyte("btr", src, dst, _eflags);
1373 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1374 emulate_2op_SrcV_nobyte("bts", src, dst, _eflags);
1376 case 0xb6 ... 0xb7: /* movzx */
1377 dst.bytes = op_bytes;
1378 dst.val = (d & ByteOp) ? (u8) src.val : (u16) src.val;
1382 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1383 emulate_2op_SrcV_nobyte("btc", src, dst, _eflags);
1385 case 0xba: /* Grp8 */
1386 switch (modrm_reg & 3) {
1397 case 0xbe ... 0xbf: /* movsx */
1398 dst.bytes = op_bytes;
1399 dst.val = (d & ByteOp) ? (s8) src.val : (s16) src.val;
1404 twobyte_special_insn:
1405 /* Disable writeback. */
1408 case 0x09: /* wbinvd */
1410 case 0x0d: /* GrpP (prefetch) */
1411 case 0x18: /* Grp16 (prefetch/nop) */
1414 emulate_clts(ctxt->vcpu);
1416 case 0x20: /* mov cr, reg */
1418 goto cannot_emulate;
1419 _regs[modrm_rm] = realmode_get_cr(ctxt->vcpu, modrm_reg);
1421 case 0x22: /* mov reg, cr */
1423 goto cannot_emulate;
1424 realmode_set_cr(ctxt->vcpu, modrm_reg, modrm_val, &_eflags);
1428 msr_data = (u32)_regs[VCPU_REGS_RAX]
1429 | ((u64)_regs[VCPU_REGS_RDX] << 32);
1430 rc = kvm_set_msr(ctxt->vcpu, _regs[VCPU_REGS_RCX], msr_data);
1432 kvm_arch_ops->inject_gp(ctxt->vcpu, 0);
1433 _eip = ctxt->vcpu->rip;
1435 rc = X86EMUL_CONTINUE;
1439 rc = kvm_get_msr(ctxt->vcpu, _regs[VCPU_REGS_RCX], &msr_data);
1441 kvm_arch_ops->inject_gp(ctxt->vcpu, 0);
1442 _eip = ctxt->vcpu->rip;
1444 _regs[VCPU_REGS_RAX] = (u32)msr_data;
1445 _regs[VCPU_REGS_RDX] = msr_data >> 32;
1447 rc = X86EMUL_CONTINUE;
1449 case 0xc7: /* Grp9 (cmpxchg8b) */
1452 if ((rc = ops->read_emulated(cr2, &old, 8, ctxt->vcpu))
1455 if (((u32) (old >> 0) != (u32) _regs[VCPU_REGS_RAX]) ||
1456 ((u32) (old >> 32) != (u32) _regs[VCPU_REGS_RDX])) {
1457 _regs[VCPU_REGS_RAX] = (u32) (old >> 0);
1458 _regs[VCPU_REGS_RDX] = (u32) (old >> 32);
1459 _eflags &= ~EFLG_ZF;
1461 new = ((u64)_regs[VCPU_REGS_RCX] << 32)
1462 | (u32) _regs[VCPU_REGS_RBX];
1463 if ((rc = ops->cmpxchg_emulated(cr2, &old,
1464 &new, 8, ctxt->vcpu)) != 0)
1474 DPRINTF("Cannot emulate %02x\n", b);
1481 #include <asm/uaccess.h>
1484 x86_emulate_read_std(unsigned long addr,
1486 unsigned int bytes, struct x86_emulate_ctxt *ctxt)
1492 if ((rc = copy_from_user((void *)val, (void *)addr, bytes)) != 0) {
1493 propagate_page_fault(addr + bytes - rc, 0); /* read fault */
1494 return X86EMUL_PROPAGATE_FAULT;
1497 return X86EMUL_CONTINUE;
1501 x86_emulate_write_std(unsigned long addr,
1503 unsigned int bytes, struct x86_emulate_ctxt *ctxt)
1507 if ((rc = copy_to_user((void *)addr, (void *)&val, bytes)) != 0) {
1508 propagate_page_fault(addr + bytes - rc, PGERR_write_access);
1509 return X86EMUL_PROPAGATE_FAULT;
1512 return X86EMUL_CONTINUE;