Lines Matching refs:ctxt

198 		int (*execute)(struct x86_emulate_ctxt *ctxt);
207 int (*check_perm)(struct x86_emulate_ctxt *ctxt);
246 static void writeback_registers(struct x86_emulate_ctxt *ctxt)
248 unsigned long dirty = ctxt->regs_dirty;
252 ctxt->ops->write_gpr(ctxt, reg, ctxt->_regs[reg]);
255 static void invalidate_registers(struct x86_emulate_ctxt *ctxt)
257 ctxt->regs_dirty = 0;
258 ctxt->regs_valid = 0;
291 static int fastop(struct x86_emulate_ctxt *ctxt, fastop_t fop);
466 static int emulator_check_intercept(struct x86_emulate_ctxt *ctxt,
472 .rep_prefix = ctxt->rep_prefix,
473 .modrm_mod = ctxt->modrm_mod,
474 .modrm_reg = ctxt->modrm_reg,
475 .modrm_rm = ctxt->modrm_rm,
476 .src_val = ctxt->src.val64,
477 .dst_val = ctxt->dst.val64,
478 .src_bytes = ctxt->src.bytes,
479 .dst_bytes = ctxt->dst.bytes,
480 .ad_bytes = ctxt->ad_bytes,
481 .next_rip = ctxt->eip,
484 return ctxt->ops->intercept(ctxt, &info, stage);
511 static inline unsigned long ad_mask(struct x86_emulate_ctxt *ctxt)
513 return (1UL << (ctxt->ad_bytes << 3)) - 1;
516 static ulong stack_mask(struct x86_emulate_ctxt *ctxt)
521 if (ctxt->mode == X86EMUL_MODE_PROT64)
523 ctxt->ops->get_segment(ctxt, &sel, &ss, NULL, VCPU_SREG_SS);
527 static int stack_size(struct x86_emulate_ctxt *ctxt)
529 return (__fls(stack_mask(ctxt)) + 1) >> 3;
534 address_mask(struct x86_emulate_ctxt *ctxt, unsigned long reg)
536 if (ctxt->ad_bytes == sizeof(unsigned long))
539 return reg & ad_mask(ctxt);
543 register_address(struct x86_emulate_ctxt *ctxt, int reg)
545 return address_mask(ctxt, reg_read(ctxt, reg));
554 register_address_increment(struct x86_emulate_ctxt *ctxt, int reg, int inc)
556 ulong *preg = reg_rmw(ctxt, reg);
558 assign_register(preg, *preg + inc, ctxt->ad_bytes);
561 static void rsp_increment(struct x86_emulate_ctxt *ctxt, int inc)
563 masked_increment(reg_rmw(ctxt, VCPU_REGS_RSP), stack_mask(ctxt), inc);
573 static unsigned long seg_base(struct x86_emulate_ctxt *ctxt, int seg)
575 if (ctxt->mode == X86EMUL_MODE_PROT64 && seg < VCPU_SREG_FS)
578 return ctxt->ops->get_cached_segment_base(ctxt, seg);
581 static int emulate_exception(struct x86_emulate_ctxt *ctxt, int vec,
584 if (KVM_EMULATOR_BUG_ON(vec > 0x1f, ctxt))
587 ctxt->exception.vector = vec;
588 ctxt->exception.error_code = error;
589 ctxt->exception.error_code_valid = valid;
593 static int emulate_db(struct x86_emulate_ctxt *ctxt)
595 return emulate_exception(ctxt, DB_VECTOR, 0, false);
598 static int emulate_gp(struct x86_emulate_ctxt *ctxt, int err)
600 return emulate_exception(ctxt, GP_VECTOR, err, true);
603 static int emulate_ss(struct x86_emulate_ctxt *ctxt, int err)
605 return emulate_exception(ctxt, SS_VECTOR, err, true);
608 static int emulate_ud(struct x86_emulate_ctxt *ctxt)
610 return emulate_exception(ctxt, UD_VECTOR, 0, false);
613 static int emulate_ts(struct x86_emulate_ctxt *ctxt, int err)
615 return emulate_exception(ctxt, TS_VECTOR, err, true);
618 static int emulate_de(struct x86_emulate_ctxt *ctxt)
620 return emulate_exception(ctxt, DE_VECTOR, 0, false);
623 static int emulate_nm(struct x86_emulate_ctxt *ctxt)
625 return emulate_exception(ctxt, NM_VECTOR, 0, false);
628 static u16 get_segment_selector(struct x86_emulate_ctxt *ctxt, unsigned seg)
633 ctxt->ops->get_segment(ctxt, &selector, &desc, NULL, seg);
637 static void set_segment_selector(struct x86_emulate_ctxt *ctxt, u16 selector,
644 ctxt->ops->get_segment(ctxt, &dummy, &desc, &base3, seg);
645 ctxt->ops->set_segment(ctxt, selector, &desc, base3, seg);
648 static inline u8 ctxt_virt_addr_bits(struct x86_emulate_ctxt *ctxt)
650 return (ctxt->ops->get_cr(ctxt, 4) & X86_CR4_LA57) ? 57 : 48;
654 struct x86_emulate_ctxt *ctxt)
656 return !__is_canonical_address(la, ctxt_virt_addr_bits(ctxt));
668 static unsigned insn_alignment(struct x86_emulate_ctxt *ctxt, unsigned size)
670 u64 alignment = ctxt->d & AlignMask;
687 static __always_inline int __linearize(struct x86_emulate_ctxt *ctxt,
700 la = seg_base(ctxt, addr.seg) + addr.ea;
704 *linear = la = ctxt->ops->get_untagged_addr(ctxt, la, flags);
705 va_bits = ctxt_virt_addr_bits(ctxt);
715 usable = ctxt->ops->get_segment(ctxt, &sel, &desc, NULL,
720 if ((((ctxt->mode != X86EMUL_MODE_REAL) && (desc.type & 8)) || !(desc.type & 2)) &&
744 if (la & (insn_alignment(ctxt, size) - 1))
745 return emulate_gp(ctxt, 0);
749 return emulate_ss(ctxt, 0);
751 return emulate_gp(ctxt, 0);
754 static int linearize(struct x86_emulate_ctxt *ctxt,
760 return __linearize(ctxt, addr, &max_size, size, ctxt->mode, linear,
764 static inline int assign_eip(struct x86_emulate_ctxt *ctxt, ulong dst)
772 if (ctxt->op_bytes != sizeof(unsigned long))
773 addr.ea = dst & ((1UL << (ctxt->op_bytes << 3)) - 1);
774 rc = __linearize(ctxt, addr, &max_size, 1, ctxt->mode, &linear,
777 ctxt->_eip = addr.ea;
781 static inline int emulator_recalc_and_set_mode(struct x86_emulate_ctxt *ctxt)
788 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
790 if (!(ctxt->ops->get_cr(ctxt, 0) & X86_CR0_PE)) {
794 ctxt->mode = X86EMUL_MODE_REAL;
798 if (ctxt->eflags & X86_EFLAGS_VM) {
802 ctxt->mode = X86EMUL_MODE_VM86;
806 if (!ctxt->ops->get_segment(ctxt, &selector, &cs, &base3, VCPU_SREG_CS))
812 ctxt->mode = X86EMUL_MODE_PROT64;
815 ctxt->mode = X86EMUL_MODE_PROT32;
817 ctxt->mode = X86EMUL_MODE_PROT16;
821 ctxt->mode = cs.d ? X86EMUL_MODE_PROT32 : X86EMUL_MODE_PROT16;
827 static inline int assign_eip_near(struct x86_emulate_ctxt *ctxt, ulong dst)
829 return assign_eip(ctxt, dst);
832 static int assign_eip_far(struct x86_emulate_ctxt *ctxt, ulong dst)
834 int rc = emulator_recalc_and_set_mode(ctxt);
839 return assign_eip(ctxt, dst);
842 static inline int jmp_rel(struct x86_emulate_ctxt *ctxt, int rel)
844 return assign_eip_near(ctxt, ctxt->_eip + rel);
847 static int linear_read_system(struct x86_emulate_ctxt *ctxt, ulong linear,
850 return ctxt->ops->read_std(ctxt, linear, data, size, &ctxt->exception, true);
853 static int linear_write_system(struct x86_emulate_ctxt *ctxt,
857 return ctxt->ops->write_std(ctxt, linear, data, size, &ctxt->exception, true);
860 static int segmented_read_std(struct x86_emulate_ctxt *ctxt,
868 rc = linearize(ctxt, addr, size, false, &linear);
871 return ctxt->ops->read_std(ctxt, linear, data, size, &ctxt->exception, false);
874 static int segmented_write_std(struct x86_emulate_ctxt *ctxt,
882 rc = linearize(ctxt, addr, size, true, &linear);
885 return ctxt->ops->write_std(ctxt, linear, data, size, &ctxt->exception, false);
892 static int __do_insn_fetch_bytes(struct x86_emulate_ctxt *ctxt, int op_size)
897 int cur_size = ctxt->fetch.end - ctxt->fetch.data;
899 .ea = ctxt->eip + cur_size };
911 rc = __linearize(ctxt, addr, &max_size, 0, ctxt->mode, &linear,
926 return emulate_gp(ctxt, 0);
928 rc = ctxt->ops->fetch(ctxt, linear, ctxt->fetch.end,
929 size, &ctxt->exception);
932 ctxt->fetch.end += size;
936 static __always_inline int do_insn_fetch_bytes(struct x86_emulate_ctxt *ctxt,
939 unsigned done_size = ctxt->fetch.end - ctxt->fetch.ptr;
942 return __do_insn_fetch_bytes(ctxt, size - done_size);
954 ctxt->_eip += sizeof(_type); \
955 memcpy(&_x, ctxt->fetch.ptr, sizeof(_type)); \
956 ctxt->fetch.ptr += sizeof(_type); \
965 ctxt->_eip += (_size); \
966 memcpy(_arr, ctxt->fetch.ptr, _size); \
967 ctxt->fetch.ptr += (_size); \
975 static void *decode_register(struct x86_emulate_ctxt *ctxt, u8 modrm_reg,
979 int highbyte_regs = (ctxt->rex_prefix == 0) && byteop;
982 p = (unsigned char *)reg_rmw(ctxt, modrm_reg & 3) + 1;
984 p = reg_rmw(ctxt, modrm_reg);
988 static int read_descriptor(struct x86_emulate_ctxt *ctxt,
997 rc = segmented_read_std(ctxt, addr, size, 2);
1001 rc = segmented_read_std(ctxt, addr, address, op_bytes);
1049 static int em_bsf_c(struct x86_emulate_ctxt *ctxt)
1052 if (ctxt->src.val == 0)
1053 ctxt->dst.type = OP_NONE;
1054 return fastop(ctxt, em_bsf);
1057 static int em_bsr_c(struct x86_emulate_ctxt *ctxt)
1060 if (ctxt->src.val == 0)
1061 ctxt->dst.type = OP_NONE;
1062 return fastop(ctxt, em_bsr);
1094 static int em_fninit(struct x86_emulate_ctxt *ctxt)
1096 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM))
1097 return emulate_nm(ctxt);
1105 static int em_fnstcw(struct x86_emulate_ctxt *ctxt)
1109 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM))
1110 return emulate_nm(ctxt);
1116 ctxt->dst.val = fcw;
1121 static int em_fnstsw(struct x86_emulate_ctxt *ctxt)
1125 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM))
1126 return emulate_nm(ctxt);
1132 ctxt->dst.val = fsw;
1137 static void decode_register_operand(struct x86_emulate_ctxt *ctxt,
1142 if (ctxt->d & ModRM)
1143 reg = ctxt->modrm_reg;
1145 reg = (ctxt->b & 7) | ((ctxt->rex_prefix & 1) << 3);
1147 if (ctxt->d & Sse) {
1154 if (ctxt->d & Mmx) {
1163 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
1164 op->addr.reg = decode_register(ctxt, reg, ctxt->d & ByteOp);
1170 static void adjust_modrm_seg(struct x86_emulate_ctxt *ctxt, int base_reg)
1173 ctxt->modrm_seg = VCPU_SREG_SS;
1176 static int decode_modrm(struct x86_emulate_ctxt *ctxt,
1184 ctxt->modrm_reg = ((ctxt->rex_prefix << 1) & 8); /* REX.R */
1185 index_reg = (ctxt->rex_prefix << 2) & 8; /* REX.X */
1186 base_reg = (ctxt->rex_prefix << 3) & 8; /* REX.B */
1188 ctxt->modrm_mod = (ctxt->modrm & 0xc0) >> 6;
1189 ctxt->modrm_reg |= (ctxt->modrm & 0x38) >> 3;
1190 ctxt->modrm_rm = base_reg | (ctxt->modrm & 0x07);
1191 ctxt->modrm_seg = VCPU_SREG_DS;
1193 if (ctxt->modrm_mod == 3 || (ctxt->d & NoMod)) {
1195 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
1196 op->addr.reg = decode_register(ctxt, ctxt->modrm_rm,
1197 ctxt->d & ByteOp);
1198 if (ctxt->d & Sse) {
1201 op->addr.xmm = ctxt->modrm_rm;
1202 kvm_read_sse_reg(ctxt->modrm_rm, &op->vec_val);
1205 if (ctxt->d & Mmx) {
1208 op->addr.mm = ctxt->modrm_rm & 7;
1217 if (ctxt->ad_bytes == 2) {
1218 unsigned bx = reg_read(ctxt, VCPU_REGS_RBX);
1219 unsigned bp = reg_read(ctxt, VCPU_REGS_RBP);
1220 unsigned si = reg_read(ctxt, VCPU_REGS_RSI);
1221 unsigned di = reg_read(ctxt, VCPU_REGS_RDI);
1224 switch (ctxt->modrm_mod) {
1226 if (ctxt->modrm_rm == 6)
1227 modrm_ea += insn_fetch(u16, ctxt);
1230 modrm_ea += insn_fetch(s8, ctxt);
1233 modrm_ea += insn_fetch(u16, ctxt);
1236 switch (ctxt->modrm_rm) {
1256 if (ctxt->modrm_mod != 0)
1263 if (ctxt->modrm_rm == 2 || ctxt->modrm_rm == 3 ||
1264 (ctxt->modrm_rm == 6 && ctxt->modrm_mod != 0))
1265 ctxt->modrm_seg = VCPU_SREG_SS;
1269 if ((ctxt->modrm_rm & 7) == 4) {
1270 sib = insn_fetch(u8, ctxt);
1275 if ((base_reg & 7) == 5 && ctxt->modrm_mod == 0)
1276 modrm_ea += insn_fetch(s32, ctxt);
1278 modrm_ea += reg_read(ctxt, base_reg);
1279 adjust_modrm_seg(ctxt, base_reg);
1281 if ((ctxt->d & IncSP) &&
1283 modrm_ea += ctxt->op_bytes;
1286 modrm_ea += reg_read(ctxt, index_reg) << scale;
1287 } else if ((ctxt->modrm_rm & 7) == 5 && ctxt->modrm_mod == 0) {
1288 modrm_ea += insn_fetch(s32, ctxt);
1289 if (ctxt->mode == X86EMUL_MODE_PROT64)
1290 ctxt->rip_relative = 1;
1292 base_reg = ctxt->modrm_rm;
1293 modrm_ea += reg_read(ctxt, base_reg);
1294 adjust_modrm_seg(ctxt, base_reg);
1296 switch (ctxt->modrm_mod) {
1298 modrm_ea += insn_fetch(s8, ctxt);
1301 modrm_ea += insn_fetch(s32, ctxt);
1306 if (ctxt->ad_bytes != 8)
1307 ctxt->memop.addr.mem.ea = (u32)ctxt->memop.addr.mem.ea;
1313 static int decode_abs(struct x86_emulate_ctxt *ctxt,
1319 switch (ctxt->ad_bytes) {
1321 op->addr.mem.ea = insn_fetch(u16, ctxt);
1324 op->addr.mem.ea = insn_fetch(u32, ctxt);
1327 op->addr.mem.ea = insn_fetch(u64, ctxt);
1334 static void fetch_bit_operand(struct x86_emulate_ctxt *ctxt)
1338 if (ctxt->dst.type == OP_MEM && ctxt->src.type == OP_REG) {
1339 mask = ~((long)ctxt->dst.bytes * 8 - 1);
1341 if (ctxt->src.bytes == 2)
1342 sv = (s16)ctxt->src.val & (s16)mask;
1343 else if (ctxt->src.bytes == 4)
1344 sv = (s32)ctxt->src.val & (s32)mask;
1346 sv = (s64)ctxt->src.val & (s64)mask;
1348 ctxt->dst.addr.mem.ea = address_mask(ctxt,
1349 ctxt->dst.addr.mem.ea + (sv >> 3));
1353 ctxt->src.val &= (ctxt->dst.bytes << 3) - 1;
1356 static int read_emulated(struct x86_emulate_ctxt *ctxt,
1360 struct read_cache *mc = &ctxt->mem_read;
1365 if (KVM_EMULATOR_BUG_ON((mc->end + size) >= sizeof(mc->data), ctxt))
1368 rc = ctxt->ops->read_emulated(ctxt, addr, mc->data + mc->end, size,
1369 &ctxt->exception);
1381 static int segmented_read(struct x86_emulate_ctxt *ctxt,
1389 rc = linearize(ctxt, addr, size, false, &linear);
1392 return read_emulated(ctxt, linear, data, size);
1395 static int segmented_write(struct x86_emulate_ctxt *ctxt,
1403 rc = linearize(ctxt, addr, size, true, &linear);
1406 return ctxt->ops->write_emulated(ctxt, linear, data, size,
1407 &ctxt->exception);
1410 static int segmented_cmpxchg(struct x86_emulate_ctxt *ctxt,
1418 rc = linearize(ctxt, addr, size, true, &linear);
1421 return ctxt->ops->cmpxchg_emulated(ctxt, linear, orig_data, data,
1422 size, &ctxt->exception);
1425 static int pio_in_emulated(struct x86_emulate_ctxt *ctxt,
1429 struct read_cache *rc = &ctxt->io_read;
1433 unsigned int count = ctxt->rep_prefix ?
1434 address_mask(ctxt, reg_read(ctxt, VCPU_REGS_RCX)) : 1;
1435 in_page = (ctxt->eflags & X86_EFLAGS_DF) ?
1436 offset_in_page(reg_read(ctxt, VCPU_REGS_RDI)) :
1437 PAGE_SIZE - offset_in_page(reg_read(ctxt, VCPU_REGS_RDI));
1442 if (!ctxt->ops->pio_in_emulated(ctxt, size, port, rc->data, n))
1447 if (ctxt->rep_prefix && (ctxt->d & String) &&
1448 !(ctxt->eflags & X86_EFLAGS_DF)) {
1449 ctxt->dst.data = rc->data + rc->pos;
1450 ctxt->dst.type = OP_MEM_STR;
1451 ctxt->dst.count = (rc->end - rc->pos) / size;
1460 static int read_interrupt_descriptor(struct x86_emulate_ctxt *ctxt,
1466 ctxt->ops->get_idt(ctxt, &dt);
1469 return emulate_gp(ctxt, index << 3 | 0x2);
1472 return linear_read_system(ctxt, addr, desc, sizeof(*desc));
1475 static void get_descriptor_table_ptr(struct x86_emulate_ctxt *ctxt,
1478 const struct x86_emulate_ops *ops = ctxt->ops;
1486 if (!ops->get_segment(ctxt, &sel, &desc, &base3,
1493 ops->get_gdt(ctxt, dt);
1496 static int get_descriptor_ptr(struct x86_emulate_ctxt *ctxt,
1503 get_descriptor_table_ptr(ctxt, selector, &dt);
1506 return emulate_gp(ctxt, selector & 0xfffc);
1514 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
1525 static int read_segment_descriptor(struct x86_emulate_ctxt *ctxt,
1531 rc = get_descriptor_ptr(ctxt, selector, desc_addr_p);
1535 return linear_read_system(ctxt, *desc_addr_p, desc, sizeof(*desc));
1539 static int write_segment_descriptor(struct x86_emulate_ctxt *ctxt,
1545 rc = get_descriptor_ptr(ctxt, selector, &addr);
1549 return linear_write_system(ctxt, addr, desc, sizeof(*desc));
1552 static int __load_segment_descriptor(struct x86_emulate_ctxt *ctxt,
1569 if (ctxt->mode == X86EMUL_MODE_REAL) {
1572 ctxt->ops->get_segment(ctxt, &dummy, &seg_desc, NULL, seg);
1575 } else if (seg <= VCPU_SREG_GS && ctxt->mode == X86EMUL_MODE_VM86) {
1598 if (ctxt->mode != X86EMUL_MODE_PROT64 || rpl != cpl)
1602 * ctxt->ops->set_segment expects the CPL to be in
1617 ret = read_segment_descriptor(ctxt, selector, &seg_desc, &desc_addr);
1688 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
1726 ret = write_segment_descriptor(ctxt, selector,
1731 } else if (ctxt->mode == X86EMUL_MODE_PROT64) {
1732 ret = linear_read_system(ctxt, desc_addr+8, &base3, sizeof(base3));
1736 ((u64)base3 << 32), ctxt))
1737 return emulate_gp(ctxt, err_code);
1743 ret = ctxt->ops->cmpxchg_emulated(ctxt, desc_addr, &old_desc, &seg_desc,
1744 sizeof(seg_desc), &ctxt->exception);
1749 ctxt->ops->set_segment(ctxt, selector, &seg_desc, base3, seg);
1754 return emulate_exception(ctxt, err_vec, err_code, true);
1757 static int load_segment_descriptor(struct x86_emulate_ctxt *ctxt,
1760 u8 cpl = ctxt->ops->cpl(ctxt);
1773 ctxt->mode == X86EMUL_MODE_PROT64)
1774 return emulate_exception(ctxt, GP_VECTOR, 0, true);
1776 return __load_segment_descriptor(ctxt, selector, seg, cpl,
1785 static int writeback(struct x86_emulate_ctxt *ctxt, struct operand *op)
1792 if (ctxt->lock_prefix)
1793 return segmented_cmpxchg(ctxt,
1799 return segmented_write(ctxt,
1804 return segmented_write(ctxt,
1823 static int emulate_push(struct x86_emulate_ctxt *ctxt, const void *data, int len)
1827 rsp_increment(ctxt, -len);
1828 addr.ea = reg_read(ctxt, VCPU_REGS_RSP) & stack_mask(ctxt);
1831 return segmented_write(ctxt, addr, data, len);
1834 static int em_push(struct x86_emulate_ctxt *ctxt)
1837 ctxt->dst.type = OP_NONE;
1838 return emulate_push(ctxt, &ctxt->src.val, ctxt->op_bytes);
1841 static int emulate_pop(struct x86_emulate_ctxt *ctxt,
1847 addr.ea = reg_read(ctxt, VCPU_REGS_RSP) & stack_mask(ctxt);
1849 rc = segmented_read(ctxt, addr, dest, len);
1853 rsp_increment(ctxt, len);
1857 static int em_pop(struct x86_emulate_ctxt *ctxt)
1859 return emulate_pop(ctxt, &ctxt->dst.val, ctxt->op_bytes);
1862 static int emulate_popf(struct x86_emulate_ctxt *ctxt,
1868 int iopl = (ctxt->eflags & X86_EFLAGS_IOPL) >> X86_EFLAGS_IOPL_BIT;
1869 int cpl = ctxt->ops->cpl(ctxt);
1871 rc = emulate_pop(ctxt, &val, len);
1880 switch(ctxt->mode) {
1891 return emulate_gp(ctxt, 0);
1900 (ctxt->eflags & ~change_mask) | (val & change_mask);
1905 static int em_popf(struct x86_emulate_ctxt *ctxt)
1907 ctxt->dst.type = OP_REG;
1908 ctxt->dst.addr.reg = &ctxt->eflags;
1909 ctxt->dst.bytes = ctxt->op_bytes;
1910 return emulate_popf(ctxt, &ctxt->dst.val, ctxt->op_bytes);
1913 static int em_enter(struct x86_emulate_ctxt *ctxt)
1916 unsigned frame_size = ctxt->src.val;
1917 unsigned nesting_level = ctxt->src2.val & 31;
1923 rbp = reg_read(ctxt, VCPU_REGS_RBP);
1924 rc = emulate_push(ctxt, &rbp, stack_size(ctxt));
1927 assign_masked(reg_rmw(ctxt, VCPU_REGS_RBP), reg_read(ctxt, VCPU_REGS_RSP),
1928 stack_mask(ctxt));
1929 assign_masked(reg_rmw(ctxt, VCPU_REGS_RSP),
1930 reg_read(ctxt, VCPU_REGS_RSP) - frame_size,
1931 stack_mask(ctxt));
1935 static int em_leave(struct x86_emulate_ctxt *ctxt)
1937 assign_masked(reg_rmw(ctxt, VCPU_REGS_RSP), reg_read(ctxt, VCPU_REGS_RBP),
1938 stack_mask(ctxt));
1939 return emulate_pop(ctxt, reg_rmw(ctxt, VCPU_REGS_RBP), ctxt->op_bytes);
1942 static int em_push_sreg(struct x86_emulate_ctxt *ctxt)
1944 int seg = ctxt->src2.val;
1946 ctxt->src.val = get_segment_selector(ctxt, seg);
1947 if (ctxt->op_bytes == 4) {
1948 rsp_increment(ctxt, -2);
1949 ctxt->op_bytes = 2;
1952 return em_push(ctxt);
1955 static int em_pop_sreg(struct x86_emulate_ctxt *ctxt)
1957 int seg = ctxt->src2.val;
1961 rc = emulate_pop(ctxt, &selector, 2);
1966 ctxt->interruptibility = KVM_X86_SHADOW_INT_MOV_SS;
1967 if (ctxt->op_bytes > 2)
1968 rsp_increment(ctxt, ctxt->op_bytes - 2);
1970 rc = load_segment_descriptor(ctxt, (u16)selector, seg);
1974 static int em_pusha(struct x86_emulate_ctxt *ctxt)
1976 unsigned long old_esp = reg_read(ctxt, VCPU_REGS_RSP);
1982 (ctxt->src.val = old_esp) : (ctxt->src.val = reg_read(ctxt, reg));
1984 rc = em_push(ctxt);
1994 static int em_pushf(struct x86_emulate_ctxt *ctxt)
1996 ctxt->src.val = (unsigned long)ctxt->eflags & ~X86_EFLAGS_VM;
1997 return em_push(ctxt);
2000 static int em_popa(struct x86_emulate_ctxt *ctxt)
2008 rsp_increment(ctxt, ctxt->op_bytes);
2012 rc = emulate_pop(ctxt, &val, ctxt->op_bytes);
2015 assign_register(reg_rmw(ctxt, reg), val, ctxt->op_bytes);
2021 static int __emulate_int_real(struct x86_emulate_ctxt *ctxt, int irq)
2023 const struct x86_emulate_ops *ops = ctxt->ops;
2031 ctxt->src.val = ctxt->eflags;
2032 rc = em_push(ctxt);
2036 ctxt->eflags &= ~(X86_EFLAGS_IF | X86_EFLAGS_TF | X86_EFLAGS_AC);
2038 ctxt->src.val = get_segment_selector(ctxt, VCPU_SREG_CS);
2039 rc = em_push(ctxt);
2043 ctxt->src.val = ctxt->_eip;
2044 rc = em_push(ctxt);
2048 ops->get_idt(ctxt, &dt);
2053 rc = linear_read_system(ctxt, cs_addr, &cs, 2);
2057 rc = linear_read_system(ctxt, eip_addr, &eip, 2);
2061 rc = load_segment_descriptor(ctxt, cs, VCPU_SREG_CS);
2065 ctxt->_eip = eip;
2070 int emulate_int_real(struct x86_emulate_ctxt *ctxt, int irq)
2074 invalidate_registers(ctxt);
2075 rc = __emulate_int_real(ctxt, irq);
2077 writeback_registers(ctxt);
2081 static int emulate_int(struct x86_emulate_ctxt *ctxt, int irq)
2083 switch(ctxt->mode) {
2085 return __emulate_int_real(ctxt, irq);
2096 static int emulate_iret_real(struct x86_emulate_ctxt *ctxt)
2113 rc = emulate_pop(ctxt, &temp_eip, ctxt->op_bytes);
2119 return emulate_gp(ctxt, 0);
2121 rc = emulate_pop(ctxt, &cs, ctxt->op_bytes);
2126 rc = emulate_pop(ctxt, &temp_eflags, ctxt->op_bytes);
2131 rc = load_segment_descriptor(ctxt, (u16)cs, VCPU_SREG_CS);
2136 ctxt->_eip = temp_eip;
2138 if (ctxt->op_bytes == 4)
2139 ctxt->eflags = ((temp_eflags & mask) | (ctxt->eflags & vm86_mask));
2140 else if (ctxt->op_bytes == 2) {
2141 ctxt->eflags &= ~0xffff;
2142 ctxt->eflags |= temp_eflags;
2145 ctxt->eflags &= ~EFLG_RESERVED_ZEROS_MASK; /* Clear reserved zeros */
2146 ctxt->eflags |= X86_EFLAGS_FIXED;
2147 ctxt->ops->set_nmi_mask(ctxt, false);
2152 static int em_iret(struct x86_emulate_ctxt *ctxt)
2154 switch(ctxt->mode) {
2156 return emulate_iret_real(ctxt);
2167 static int em_jmp_far(struct x86_emulate_ctxt *ctxt)
2172 u8 cpl = ctxt->ops->cpl(ctxt);
2174 memcpy(&sel, ctxt->src.valptr + ctxt->op_bytes, 2);
2176 rc = __load_segment_descriptor(ctxt, sel, VCPU_SREG_CS, cpl,
2182 rc = assign_eip_far(ctxt, ctxt->src.val);
2190 static int em_jmp_abs(struct x86_emulate_ctxt *ctxt)
2192 return assign_eip_near(ctxt, ctxt->src.val);
2195 static int em_call_near_abs(struct x86_emulate_ctxt *ctxt)
2200 old_eip = ctxt->_eip;
2201 rc = assign_eip_near(ctxt, ctxt->src.val);
2204 ctxt->src.val = old_eip;
2205 rc = em_push(ctxt);
2209 static int em_cmpxchg8b(struct x86_emulate_ctxt *ctxt)
2211 u64 old = ctxt->dst.orig_val64;
2213 if (ctxt->dst.bytes == 16)
2216 if (((u32) (old >> 0) != (u32) reg_read(ctxt, VCPU_REGS_RAX)) ||
2217 ((u32) (old >> 32) != (u32) reg_read(ctxt, VCPU_REGS_RDX))) {
2218 *reg_write(ctxt, VCPU_REGS_RAX) = (u32) (old >> 0);
2219 *reg_write(ctxt, VCPU_REGS_RDX) = (u32) (old >> 32);
2220 ctxt->eflags &= ~X86_EFLAGS_ZF;
2222 ctxt->dst.val64 = ((u64)reg_read(ctxt, VCPU_REGS_RCX) << 32) |
2223 (u32) reg_read(ctxt, VCPU_REGS_RBX);
2225 ctxt->eflags |= X86_EFLAGS_ZF;
2230 static int em_ret(struct x86_emulate_ctxt *ctxt)
2235 rc = emulate_pop(ctxt, &eip, ctxt->op_bytes);
2239 return assign_eip_near(ctxt, eip);
2242 static int em_ret_far(struct x86_emulate_ctxt *ctxt)
2247 int cpl = ctxt->ops->cpl(ctxt);
2250 rc = emulate_pop(ctxt, &eip, ctxt->op_bytes);
2253 rc = emulate_pop(ctxt, &cs, ctxt->op_bytes);
2256 rc = __load_segment_descriptor(ctxt, (u16)cs, VCPU_SREG_CS, cpl,
2261 rc = assign_eip_far(ctxt, eip);
2269 static int em_ret_far_imm(struct x86_emulate_ctxt *ctxt)
2273 rc = em_ret_far(ctxt);
2276 rsp_increment(ctxt, ctxt->src.val);
2280 static int em_cmpxchg(struct x86_emulate_ctxt *ctxt)
2283 ctxt->dst.orig_val = ctxt->dst.val;
2284 ctxt->dst.val = reg_read(ctxt, VCPU_REGS_RAX);
2285 ctxt->src.orig_val = ctxt->src.val;
2286 ctxt->src.val = ctxt->dst.orig_val;
2287 fastop(ctxt, em_cmp);
2289 if (ctxt->eflags & X86_EFLAGS_ZF) {
2291 ctxt->src.type = OP_NONE;
2292 ctxt->dst.val = ctxt->src.orig_val;
2295 ctxt->src.type = OP_REG;
2296 ctxt->src.addr.reg = reg_rmw(ctxt, VCPU_REGS_RAX);
2297 ctxt->src.val = ctxt->dst.orig_val;
2299 ctxt->dst.val = ctxt->dst.orig_val;
2304 static int em_lseg(struct x86_emulate_ctxt *ctxt)
2306 int seg = ctxt->src2.val;
2310 memcpy(&sel, ctxt->src.valptr + ctxt->op_bytes, 2);
2312 rc = load_segment_descriptor(ctxt, sel, seg);
2316 ctxt->dst.val = ctxt->src.val;
2320 static int em_rsm(struct x86_emulate_ctxt *ctxt)
2322 if (!ctxt->ops->is_smm(ctxt))
2323 return emulate_ud(ctxt);
2325 if (ctxt->ops->leave_smm(ctxt))
2326 ctxt->ops->triple_fault(ctxt);
2328 return emulator_recalc_and_set_mode(ctxt);
2357 static bool vendor_intel(struct x86_emulate_ctxt *ctxt)
2362 ctxt->ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx, true);
2366 static bool em_syscall_is_enabled(struct x86_emulate_ctxt *ctxt)
2368 const struct x86_emulate_ops *ops = ctxt->ops;
2375 if (ctxt->mode == X86EMUL_MODE_PROT64)
2380 ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx, true);
2401 static int em_syscall(struct x86_emulate_ctxt *ctxt)
2403 const struct x86_emulate_ops *ops = ctxt->ops;
2410 if (ctxt->mode == X86EMUL_MODE_REAL ||
2411 ctxt->mode == X86EMUL_MODE_VM86)
2412 return emulate_ud(ctxt);
2414 if (!(em_syscall_is_enabled(ctxt)))
2415 return emulate_ud(ctxt);
2417 ops->get_msr(ctxt, MSR_EFER, &efer);
2419 return emulate_ud(ctxt);
2422 ops->get_msr(ctxt, MSR_STAR, &msr_data);
2431 ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS);
2432 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS);
2434 *reg_write(ctxt, VCPU_REGS_RCX) = ctxt->_eip;
2437 *reg_write(ctxt, VCPU_REGS_R11) = ctxt->eflags;
2439 ops->get_msr(ctxt,
2440 ctxt->mode == X86EMUL_MODE_PROT64 ?
2442 ctxt->_eip = msr_data;
2444 ops->get_msr(ctxt, MSR_SYSCALL_MASK, &msr_data);
2445 ctxt->eflags &= ~msr_data;
2446 ctxt->eflags |= X86_EFLAGS_FIXED;
2450 ops->get_msr(ctxt, MSR_STAR, &msr_data);
2451 ctxt->_eip = (u32)msr_data;
2453 ctxt->eflags &= ~(X86_EFLAGS_VM | X86_EFLAGS_IF);
2456 ctxt->tf = (ctxt->eflags & X86_EFLAGS_TF) != 0;
2460 static int em_sysenter(struct x86_emulate_ctxt *ctxt)
2462 const struct x86_emulate_ops *ops = ctxt->ops;
2468 ops->get_msr(ctxt, MSR_EFER, &efer);
2470 if (ctxt->mode == X86EMUL_MODE_REAL)
2471 return emulate_gp(ctxt, 0);
2477 if ((ctxt->mode != X86EMUL_MODE_PROT64) && (efer & EFER_LMA)
2478 && !vendor_intel(ctxt))
2479 return emulate_ud(ctxt);
2482 if (ctxt->mode == X86EMUL_MODE_PROT64)
2485 ops->get_msr(ctxt, MSR_IA32_SYSENTER_CS, &msr_data);
2487 return emulate_gp(ctxt, 0);
2490 ctxt->eflags &= ~(X86_EFLAGS_VM | X86_EFLAGS_IF);
2498 ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS);
2499 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS);
2501 ops->get_msr(ctxt, MSR_IA32_SYSENTER_EIP, &msr_data);
2502 ctxt->_eip = (efer & EFER_LMA) ? msr_data : (u32)msr_data;
2504 ops->get_msr(ctxt, MSR_IA32_SYSENTER_ESP, &msr_data);
2505 *reg_write(ctxt, VCPU_REGS_RSP) = (efer & EFER_LMA) ? msr_data :
2508 ctxt->mode = X86EMUL_MODE_PROT64;
2513 static int em_sysexit(struct x86_emulate_ctxt *ctxt)
2515 const struct x86_emulate_ops *ops = ctxt->ops;
2522 if (ctxt->mode == X86EMUL_MODE_REAL ||
2523 ctxt->mode == X86EMUL_MODE_VM86)
2524 return emulate_gp(ctxt, 0);
2528 if ((ctxt->rex_prefix & 0x8) != 0x0)
2533 rcx = reg_read(ctxt, VCPU_REGS_RCX);
2534 rdx = reg_read(ctxt, VCPU_REGS_RDX);
2538 ops->get_msr(ctxt, MSR_IA32_SYSENTER_CS, &msr_data);
2543 return emulate_gp(ctxt, 0);
2551 return emulate_gp(ctxt, 0);
2555 if (emul_is_noncanonical_address(rcx, ctxt) ||
2556 emul_is_noncanonical_address(rdx, ctxt))
2557 return emulate_gp(ctxt, 0);
2563 ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS);
2564 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS);
2566 ctxt->_eip = rdx;
2567 ctxt->mode = usermode;
2568 *reg_write(ctxt, VCPU_REGS_RSP) = rcx;
2573 static bool emulator_bad_iopl(struct x86_emulate_ctxt *ctxt)
2576 if (ctxt->mode == X86EMUL_MODE_REAL)
2578 if (ctxt->mode == X86EMUL_MODE_VM86)
2580 iopl = (ctxt->eflags & X86_EFLAGS_IOPL) >> X86_EFLAGS_IOPL_BIT;
2581 return ctxt->ops->cpl(ctxt) > iopl;
2587 static bool emulator_io_port_access_allowed(struct x86_emulate_ctxt *ctxt,
2590 const struct x86_emulate_ops *ops = ctxt->ops;
2606 ops->get_segment(ctxt, &tr, &tr_seg, &base3, VCPU_SREG_TR);
2615 r = ops->read_std(ctxt, base + 102, &io_bitmap_ptr, 2, NULL, true);
2620 r = ops->read_std(ctxt, base + io_bitmap_ptr + port/8, &perm, 2, NULL, true);
2628 static bool emulator_io_permitted(struct x86_emulate_ctxt *ctxt,
2631 if (ctxt->perm_ok)
2634 if (emulator_bad_iopl(ctxt))
2635 if (!emulator_io_port_access_allowed(ctxt, port, len))
2638 ctxt->perm_ok = true;
2643 static void string_registers_quirk(struct x86_emulate_ctxt *ctxt)
2650 if (ctxt->ad_bytes != 4 || !vendor_intel(ctxt))
2653 *reg_write(ctxt, VCPU_REGS_RCX) = 0;
2655 switch (ctxt->b) {
2658 *reg_rmw(ctxt, VCPU_REGS_RSI) &= (u32)-1;
2662 *reg_rmw(ctxt, VCPU_REGS_RDI) &= (u32)-1;
2667 static void save_state_to_tss16(struct x86_emulate_ctxt *ctxt,
2670 tss->ip = ctxt->_eip;
2671 tss->flag = ctxt->eflags;
2672 tss->ax = reg_read(ctxt, VCPU_REGS_RAX);
2673 tss->cx = reg_read(ctxt, VCPU_REGS_RCX);
2674 tss->dx = reg_read(ctxt, VCPU_REGS_RDX);
2675 tss->bx = reg_read(ctxt, VCPU_REGS_RBX);
2676 tss->sp = reg_read(ctxt, VCPU_REGS_RSP);
2677 tss->bp = reg_read(ctxt, VCPU_REGS_RBP);
2678 tss->si = reg_read(ctxt, VCPU_REGS_RSI);
2679 tss->di = reg_read(ctxt, VCPU_REGS_RDI);
2681 tss->es = get_segment_selector(ctxt, VCPU_SREG_ES);
2682 tss->cs = get_segment_selector(ctxt, VCPU_SREG_CS);
2683 tss->ss = get_segment_selector(ctxt, VCPU_SREG_SS);
2684 tss->ds = get_segment_selector(ctxt, VCPU_SREG_DS);
2685 tss->ldt = get_segment_selector(ctxt, VCPU_SREG_LDTR);
2688 static int load_state_from_tss16(struct x86_emulate_ctxt *ctxt,
2694 ctxt->_eip = tss->ip;
2695 ctxt->eflags = tss->flag | 2;
2696 *reg_write(ctxt, VCPU_REGS_RAX) = tss->ax;
2697 *reg_write(ctxt, VCPU_REGS_RCX) = tss->cx;
2698 *reg_write(ctxt, VCPU_REGS_RDX) = tss->dx;
2699 *reg_write(ctxt, VCPU_REGS_RBX) = tss->bx;
2700 *reg_write(ctxt, VCPU_REGS_RSP) = tss->sp;
2701 *reg_write(ctxt, VCPU_REGS_RBP) = tss->bp;
2702 *reg_write(ctxt, VCPU_REGS_RSI) = tss->si;
2703 *reg_write(ctxt, VCPU_REGS_RDI) = tss->di;
2709 set_segment_selector(ctxt, tss->ldt, VCPU_SREG_LDTR);
2710 set_segment_selector(ctxt, tss->es, VCPU_SREG_ES);
2711 set_segment_selector(ctxt, tss->cs, VCPU_SREG_CS);
2712 set_segment_selector(ctxt, tss->ss, VCPU_SREG_SS);
2713 set_segment_selector(ctxt, tss->ds, VCPU_SREG_DS);
2721 ret = __load_segment_descriptor(ctxt, tss->ldt, VCPU_SREG_LDTR, cpl,
2725 ret = __load_segment_descriptor(ctxt, tss->es, VCPU_SREG_ES, cpl,
2729 ret = __load_segment_descriptor(ctxt, tss->cs, VCPU_SREG_CS, cpl,
2733 ret = __load_segment_descriptor(ctxt, tss->ss, VCPU_SREG_SS, cpl,
2737 ret = __load_segment_descriptor(ctxt, tss->ds, VCPU_SREG_DS, cpl,
2745 static int task_switch_16(struct x86_emulate_ctxt *ctxt, u16 old_tss_sel,
2752 ret = linear_read_system(ctxt, old_tss_base, &tss_seg, sizeof(tss_seg));
2756 save_state_to_tss16(ctxt, &tss_seg);
2758 ret = linear_write_system(ctxt, old_tss_base, &tss_seg, sizeof(tss_seg));
2762 ret = linear_read_system(ctxt, new_tss_base, &tss_seg, sizeof(tss_seg));
2769 ret = linear_write_system(ctxt, new_tss_base,
2776 return load_state_from_tss16(ctxt, &tss_seg);
2779 static void save_state_to_tss32(struct x86_emulate_ctxt *ctxt,
2783 tss->eip = ctxt->_eip;
2784 tss->eflags = ctxt->eflags;
2785 tss->eax = reg_read(ctxt, VCPU_REGS_RAX);
2786 tss->ecx = reg_read(ctxt, VCPU_REGS_RCX);
2787 tss->edx = reg_read(ctxt, VCPU_REGS_RDX);
2788 tss->ebx = reg_read(ctxt, VCPU_REGS_RBX);
2789 tss->esp = reg_read(ctxt, VCPU_REGS_RSP);
2790 tss->ebp = reg_read(ctxt, VCPU_REGS_RBP);
2791 tss->esi = reg_read(ctxt, VCPU_REGS_RSI);
2792 tss->edi = reg_read(ctxt, VCPU_REGS_RDI);
2794 tss->es = get_segment_selector(ctxt, VCPU_SREG_ES);
2795 tss->cs = get_segment_selector(ctxt, VCPU_SREG_CS);
2796 tss->ss = get_segment_selector(ctxt, VCPU_SREG_SS);
2797 tss->ds = get_segment_selector(ctxt, VCPU_SREG_DS);
2798 tss->fs = get_segment_selector(ctxt, VCPU_SREG_FS);
2799 tss->gs = get_segment_selector(ctxt, VCPU_SREG_GS);
2802 static int load_state_from_tss32(struct x86_emulate_ctxt *ctxt,
2808 if (ctxt->ops->set_cr(ctxt, 3, tss->cr3))
2809 return emulate_gp(ctxt, 0);
2810 ctxt->_eip = tss->eip;
2811 ctxt->eflags = tss->eflags | 2;
2814 *reg_write(ctxt, VCPU_REGS_RAX) = tss->eax;
2815 *reg_write(ctxt, VCPU_REGS_RCX) = tss->ecx;
2816 *reg_write(ctxt, VCPU_REGS_RDX) = tss->edx;
2817 *reg_write(ctxt, VCPU_REGS_RBX) = tss->ebx;
2818 *reg_write(ctxt, VCPU_REGS_RSP) = tss->esp;
2819 *reg_write(ctxt, VCPU_REGS_RBP) = tss->ebp;
2820 *reg_write(ctxt, VCPU_REGS_RSI) = tss->esi;
2821 *reg_write(ctxt, VCPU_REGS_RDI) = tss->edi;
2828 set_segment_selector(ctxt, tss->ldt_selector, VCPU_SREG_LDTR);
2829 set_segment_selector(ctxt, tss->es, VCPU_SREG_ES);
2830 set_segment_selector(ctxt, tss->cs, VCPU_SREG_CS);
2831 set_segment_selector(ctxt, tss->ss, VCPU_SREG_SS);
2832 set_segment_selector(ctxt, tss->ds, VCPU_SREG_DS);
2833 set_segment_selector(ctxt, tss->fs, VCPU_SREG_FS);
2834 set_segment_selector(ctxt, tss->gs, VCPU_SREG_GS);
2841 if (ctxt->eflags & X86_EFLAGS_VM) {
2842 ctxt->mode = X86EMUL_MODE_VM86;
2845 ctxt->mode = X86EMUL_MODE_PROT32;
2853 ret = __load_segment_descriptor(ctxt, tss->ldt_selector, VCPU_SREG_LDTR,
2857 ret = __load_segment_descriptor(ctxt, tss->es, VCPU_SREG_ES, cpl,
2861 ret = __load_segment_descriptor(ctxt, tss->cs, VCPU_SREG_CS, cpl,
2865 ret = __load_segment_descriptor(ctxt, tss->ss, VCPU_SREG_SS, cpl,
2869 ret = __load_segment_descriptor(ctxt, tss->ds, VCPU_SREG_DS, cpl,
2873 ret = __load_segment_descriptor(ctxt, tss->fs, VCPU_SREG_FS, cpl,
2877 ret = __load_segment_descriptor(ctxt, tss->gs, VCPU_SREG_GS, cpl,
2883 static int task_switch_32(struct x86_emulate_ctxt *ctxt, u16 old_tss_sel,
2892 ret = linear_read_system(ctxt, old_tss_base, &tss_seg, sizeof(tss_seg));
2896 save_state_to_tss32(ctxt, &tss_seg);
2899 ret = linear_write_system(ctxt, old_tss_base + eip_offset, &tss_seg.eip,
2904 ret = linear_read_system(ctxt, new_tss_base, &tss_seg, sizeof(tss_seg));
2911 ret = linear_write_system(ctxt, new_tss_base,
2918 return load_state_from_tss32(ctxt, &tss_seg);
2921 static int emulator_do_task_switch(struct x86_emulate_ctxt *ctxt,
2925 const struct x86_emulate_ops *ops = ctxt->ops;
2928 u16 old_tss_sel = get_segment_selector(ctxt, VCPU_SREG_TR);
2930 ops->get_cached_segment_base(ctxt, VCPU_SREG_TR);
2936 ret = read_segment_descriptor(ctxt, tss_selector, &next_tss_desc, &desc_addr);
2939 ret = read_segment_descriptor(ctxt, old_tss_sel, &curr_tss_desc, &desc_addr);
2959 ret = read_interrupt_descriptor(ctxt, idt_index,
2965 if ((tss_selector & 3) > dpl || ops->cpl(ctxt) > dpl)
2966 return emulate_gp(ctxt, (idt_index << 3) | 0x2);
2974 return emulate_ts(ctxt, tss_selector & 0xfffc);
2979 write_segment_descriptor(ctxt, old_tss_sel, &curr_tss_desc);
2983 ctxt->eflags = ctxt->eflags & ~X86_EFLAGS_NT;
2991 ret = task_switch_32(ctxt, old_tss_sel, old_tss_base, &next_tss_desc);
2993 ret = task_switch_16(ctxt, old_tss_sel,
2999 ctxt->eflags = ctxt->eflags | X86_EFLAGS_NT;
3003 write_segment_descriptor(ctxt, tss_selector, &next_tss_desc);
3006 ops->set_cr(ctxt, 0, ops->get_cr(ctxt, 0) | X86_CR0_TS);
3007 ops->set_segment(ctxt, tss_selector, &next_tss_desc, 0, VCPU_SREG_TR);
3010 ctxt->op_bytes = ctxt->ad_bytes = (next_tss_desc.type & 8) ? 4 : 2;
3011 ctxt->lock_prefix = 0;
3012 ctxt->src.val = (unsigned long) error_code;
3013 ret = em_push(ctxt);
3016 dr7 = ops->get_dr(ctxt, 7);
3017 ops->set_dr(ctxt, 7, dr7 & ~(DR_LOCAL_ENABLE_MASK | DR_LOCAL_SLOWDOWN));
3022 int emulator_task_switch(struct x86_emulate_ctxt *ctxt,
3028 invalidate_registers(ctxt);
3029 ctxt->_eip = ctxt->eip;
3030 ctxt->dst.type = OP_NONE;
3032 rc = emulator_do_task_switch(ctxt, tss_selector, idt_index, reason,
3036 ctxt->eip = ctxt->_eip;
3037 writeback_registers(ctxt);
3043 static void string_addr_inc(struct x86_emulate_ctxt *ctxt, int reg,
3046 int df = (ctxt->eflags & X86_EFLAGS_DF) ? -op->count : op->count;
3048 register_address_increment(ctxt, reg, df * op->bytes);
3049 op->addr.mem.ea = register_address(ctxt, reg);
3052 static int em_das(struct x86_emulate_ctxt *ctxt)
3057 cf = ctxt->eflags & X86_EFLAGS_CF;
3058 al = ctxt->dst.val;
3063 af = ctxt->eflags & X86_EFLAGS_AF;
3076 ctxt->dst.val = al;
3078 ctxt->src.type = OP_IMM;
3079 ctxt->src.val = 0;
3080 ctxt->src.bytes = 1;
3081 fastop(ctxt, em_or);
3082 ctxt->eflags &= ~(X86_EFLAGS_AF | X86_EFLAGS_CF);
3084 ctxt->eflags |= X86_EFLAGS_CF;
3086 ctxt->eflags |= X86_EFLAGS_AF;
3090 static int em_aam(struct x86_emulate_ctxt *ctxt)
3094 if (ctxt->src.val == 0)
3095 return emulate_de(ctxt);
3097 al = ctxt->dst.val & 0xff;
3098 ah = al / ctxt->src.val;
3099 al %= ctxt->src.val;
3101 ctxt->dst.val = (ctxt->dst.val & 0xffff0000) | al | (ah << 8);
3104 ctxt->src.type = OP_IMM;
3105 ctxt->src.val = 0;
3106 ctxt->src.bytes = 1;
3107 fastop(ctxt, em_or);
3112 static int em_aad(struct x86_emulate_ctxt *ctxt)
3114 u8 al = ctxt->dst.val & 0xff;
3115 u8 ah = (ctxt->dst.val >> 8) & 0xff;
3117 al = (al + (ah * ctxt->src.val)) & 0xff;
3119 ctxt->dst.val = (ctxt->dst.val & 0xffff0000) | al;
3122 ctxt->src.type = OP_IMM;
3123 ctxt->src.val = 0;
3124 ctxt->src.bytes = 1;
3125 fastop(ctxt, em_or);
3130 static int em_call(struct x86_emulate_ctxt *ctxt)
3133 long rel = ctxt->src.val;
3135 ctxt->src.val = (unsigned long)ctxt->_eip;
3136 rc = jmp_rel(ctxt, rel);
3139 return em_push(ctxt);
3142 static int em_call_far(struct x86_emulate_ctxt *ctxt)
3148 const struct x86_emulate_ops *ops = ctxt->ops;
3149 int cpl = ctxt->ops->cpl(ctxt);
3150 enum x86emul_mode prev_mode = ctxt->mode;
3152 old_eip = ctxt->_eip;
3153 ops->get_segment(ctxt, &old_cs, &old_desc, NULL, VCPU_SREG_CS);
3155 memcpy(&sel, ctxt->src.valptr + ctxt->op_bytes, 2);
3156 rc = __load_segment_descriptor(ctxt, sel, VCPU_SREG_CS, cpl,
3161 rc = assign_eip_far(ctxt, ctxt->src.val);
3165 ctxt->src.val = old_cs;
3166 rc = em_push(ctxt);
3170 ctxt->src.val = old_eip;
3171 rc = em_push(ctxt);
3180 ops->set_segment(ctxt, old_cs, &old_desc, 0, VCPU_SREG_CS);
3181 ctxt->mode = prev_mode;
3186 static int em_ret_near_imm(struct x86_emulate_ctxt *ctxt)
3191 rc = emulate_pop(ctxt, &eip, ctxt->op_bytes);
3194 rc = assign_eip_near(ctxt, eip);
3197 rsp_increment(ctxt, ctxt->src.val);
3201 static int em_xchg(struct x86_emulate_ctxt *ctxt)
3204 ctxt->src.val = ctxt->dst.val;
3205 write_register_operand(&ctxt->src);
3208 ctxt->dst.val = ctxt->src.orig_val;
3209 ctxt->lock_prefix = 1;
3213 static int em_imul_3op(struct x86_emulate_ctxt *ctxt)
3215 ctxt->dst.val = ctxt->src2.val;
3216 return fastop(ctxt, em_imul);
3219 static int em_cwd(struct x86_emulate_ctxt *ctxt)
3221 ctxt->dst.type = OP_REG;
3222 ctxt->dst.bytes = ctxt->src.bytes;
3223 ctxt->dst.addr.reg = reg_rmw(ctxt, VCPU_REGS_RDX);
3224 ctxt->dst.val = ~((ctxt->src.val >> (ctxt->src.bytes * 8 - 1)) - 1);
3229 static int em_rdpid(struct x86_emulate_ctxt *ctxt)
3233 if (!ctxt->ops->guest_has_rdpid(ctxt))
3234 return emulate_ud(ctxt);
3236 ctxt->ops->get_msr(ctxt, MSR_TSC_AUX, &tsc_aux);
3237 ctxt->dst.val = tsc_aux;
3241 static int em_rdtsc(struct x86_emulate_ctxt *ctxt)
3245 ctxt->ops->get_msr(ctxt, MSR_IA32_TSC, &tsc);
3246 *reg_write(ctxt, VCPU_REGS_RAX) = (u32)tsc;
3247 *reg_write(ctxt, VCPU_REGS_RDX) = tsc >> 32;
3251 static int em_rdpmc(struct x86_emulate_ctxt *ctxt)
3255 if (ctxt->ops->read_pmc(ctxt, reg_read(ctxt, VCPU_REGS_RCX), &pmc))
3256 return emulate_gp(ctxt, 0);
3257 *reg_write(ctxt, VCPU_REGS_RAX) = (u32)pmc;
3258 *reg_write(ctxt, VCPU_REGS_RDX) = pmc >> 32;
3262 static int em_mov(struct x86_emulate_ctxt *ctxt)
3264 memcpy(ctxt->dst.valptr, ctxt->src.valptr, sizeof(ctxt->src.valptr));
3268 static int em_movbe(struct x86_emulate_ctxt *ctxt)
3272 if (!ctxt->ops->guest_has_movbe(ctxt))
3273 return emulate_ud(ctxt);
3275 switch (ctxt->op_bytes) {
3285 tmp = (u16)ctxt->src.val;
3286 ctxt->dst.val &= ~0xffffUL;
3287 ctxt->dst.val |= (unsigned long)swab16(tmp);
3290 ctxt->dst.val = swab32((u32)ctxt->src.val);
3293 ctxt->dst.val = swab64(ctxt->src.val);
3301 static int em_cr_write(struct x86_emulate_ctxt *ctxt)
3303 int cr_num = ctxt->modrm_reg;
3306 if (ctxt->ops->set_cr(ctxt, cr_num, ctxt->src.val))
3307 return emulate_gp(ctxt, 0);
3310 ctxt->dst.type = OP_NONE;
3317 r = emulator_recalc_and_set_mode(ctxt);
3325 static int em_dr_write(struct x86_emulate_ctxt *ctxt)
3329 if (ctxt->mode == X86EMUL_MODE_PROT64)
3330 val = ctxt->src.val & ~0ULL;
3332 val = ctxt->src.val & ~0U;
3335 if (ctxt->ops->set_dr(ctxt, ctxt->modrm_reg, val) < 0)
3336 return emulate_gp(ctxt, 0);
3339 ctxt->dst.type = OP_NONE;
3343 static int em_wrmsr(struct x86_emulate_ctxt *ctxt)
3345 u64 msr_index = reg_read(ctxt, VCPU_REGS_RCX);
3349 msr_data = (u32)reg_read(ctxt, VCPU_REGS_RAX)
3350 | ((u64)reg_read(ctxt, VCPU_REGS_RDX) << 32);
3351 r = ctxt->ops->set_msr_with_filter(ctxt, msr_index, msr_data);
3354 return emulate_gp(ctxt, 0);
3359 static int em_rdmsr(struct x86_emulate_ctxt *ctxt)
3361 u64 msr_index = reg_read(ctxt, VCPU_REGS_RCX);
3365 r = ctxt->ops->get_msr_with_filter(ctxt, msr_index, &msr_data);
3368 return emulate_gp(ctxt, 0);
3371 *reg_write(ctxt, VCPU_REGS_RAX) = (u32)msr_data;
3372 *reg_write(ctxt, VCPU_REGS_RDX) = msr_data >> 32;
3377 static int em_store_sreg(struct x86_emulate_ctxt *ctxt, int segment)
3380 (ctxt->ops->get_cr(ctxt, 4) & X86_CR4_UMIP) &&
3381 ctxt->ops->cpl(ctxt) > 0)
3382 return emulate_gp(ctxt, 0);
3384 ctxt->dst.val = get_segment_selector(ctxt, segment);
3385 if (ctxt->dst.bytes == 4 && ctxt->dst.type == OP_MEM)
3386 ctxt->dst.bytes = 2;
3390 static int em_mov_rm_sreg(struct x86_emulate_ctxt *ctxt)
3392 if (ctxt->modrm_reg > VCPU_SREG_GS)
3393 return emulate_ud(ctxt);
3395 return em_store_sreg(ctxt, ctxt->modrm_reg);
3398 static int em_mov_sreg_rm(struct x86_emulate_ctxt *ctxt)
3400 u16 sel = ctxt->src.val;
3402 if (ctxt->modrm_reg == VCPU_SREG_CS || ctxt->modrm_reg > VCPU_SREG_GS)
3403 return emulate_ud(ctxt);
3405 if (ctxt->modrm_reg == VCPU_SREG_SS)
3406 ctxt->interruptibility = KVM_X86_SHADOW_INT_MOV_SS;
3409 ctxt->dst.type = OP_NONE;
3410 return load_segment_descriptor(ctxt, sel, ctxt->modrm_reg);
3413 static int em_sldt(struct x86_emulate_ctxt *ctxt)
3415 return em_store_sreg(ctxt, VCPU_SREG_LDTR);
3418 static int em_lldt(struct x86_emulate_ctxt *ctxt)
3420 u16 sel = ctxt->src.val;
3423 ctxt->dst.type = OP_NONE;
3424 return load_segment_descriptor(ctxt, sel, VCPU_SREG_LDTR);
3427 static int em_str(struct x86_emulate_ctxt *ctxt)
3429 return em_store_sreg(ctxt, VCPU_SREG_TR);
3432 static int em_ltr(struct x86_emulate_ctxt *ctxt)
3434 u16 sel = ctxt->src.val;
3437 ctxt->dst.type = OP_NONE;
3438 return load_segment_descriptor(ctxt, sel, VCPU_SREG_TR);
3441 static int em_invlpg(struct x86_emulate_ctxt *ctxt)
3447 rc = __linearize(ctxt, ctxt->src.addr.mem, &max_size, 1, ctxt->mode,
3450 ctxt->ops->invlpg(ctxt, linear);
3452 ctxt->dst.type = OP_NONE;
3456 static int em_clts(struct x86_emulate_ctxt *ctxt)
3460 cr0 = ctxt->ops->get_cr(ctxt, 0);
3462 ctxt->ops->set_cr(ctxt, 0, cr0);
3466 static int em_hypercall(struct x86_emulate_ctxt *ctxt)
3468 int rc = ctxt->ops->fix_hypercall(ctxt);
3474 ctxt->_eip = ctxt->eip;
3476 ctxt->dst.type = OP_NONE;
3480 static int emulate_store_desc_ptr(struct x86_emulate_ctxt *ctxt,
3481 void (*get)(struct x86_emulate_ctxt *ctxt,
3486 if ((ctxt->ops->get_cr(ctxt, 4) & X86_CR4_UMIP) &&
3487 ctxt->ops->cpl(ctxt) > 0)
3488 return emulate_gp(ctxt, 0);
3490 if (ctxt->mode == X86EMUL_MODE_PROT64)
3491 ctxt->op_bytes = 8;
3492 get(ctxt, &desc_ptr);
3493 if (ctxt->op_bytes == 2) {
3494 ctxt->op_bytes = 4;
3498 ctxt->dst.type = OP_NONE;
3499 return segmented_write_std(ctxt, ctxt->dst.addr.mem,
3500 &desc_ptr, 2 + ctxt->op_bytes);
3503 static int em_sgdt(struct x86_emulate_ctxt *ctxt)
3505 return emulate_store_desc_ptr(ctxt, ctxt->ops->get_gdt);
3508 static int em_sidt(struct x86_emulate_ctxt *ctxt)
3510 return emulate_store_desc_ptr(ctxt, ctxt->ops->get_idt);
3513 static int em_lgdt_lidt(struct x86_emulate_ctxt *ctxt, bool lgdt)
3518 if (ctxt->mode == X86EMUL_MODE_PROT64)
3519 ctxt->op_bytes = 8;
3520 rc = read_descriptor(ctxt, ctxt->src.addr.mem,
3522 ctxt->op_bytes);
3525 if (ctxt->mode == X86EMUL_MODE_PROT64 &&
3526 emul_is_noncanonical_address(desc_ptr.address, ctxt))
3527 return emulate_gp(ctxt, 0);
3529 ctxt->ops->set_gdt(ctxt, &desc_ptr);
3531 ctxt->ops->set_idt(ctxt, &desc_ptr);
3533 ctxt->dst.type = OP_NONE;
3537 static int em_lgdt(struct x86_emulate_ctxt *ctxt)
3539 return em_lgdt_lidt(ctxt, true);
3542 static int em_lidt(struct x86_emulate_ctxt *ctxt)
3544 return em_lgdt_lidt(ctxt, false);
3547 static int em_smsw(struct x86_emulate_ctxt *ctxt)
3549 if ((ctxt->ops->get_cr(ctxt, 4) & X86_CR4_UMIP) &&
3550 ctxt->ops->cpl(ctxt) > 0)
3551 return emulate_gp(ctxt, 0);
3553 if (ctxt->dst.type == OP_MEM)
3554 ctxt->dst.bytes = 2;
3555 ctxt->dst.val = ctxt->ops->get_cr(ctxt, 0);
3559 static int em_lmsw(struct x86_emulate_ctxt *ctxt)
3561 ctxt->ops->set_cr(ctxt, 0, (ctxt->ops->get_cr(ctxt, 0) & ~0x0eul)
3562 | (ctxt->src.val & 0x0f));
3563 ctxt->dst.type = OP_NONE;
3567 static int em_loop(struct x86_emulate_ctxt *ctxt)
3571 register_address_increment(ctxt, VCPU_REGS_RCX, -1);
3572 if ((address_mask(ctxt, reg_read(ctxt, VCPU_REGS_RCX)) != 0) &&
3573 (ctxt->b == 0xe2 || test_cc(ctxt->b ^ 0x5, ctxt->eflags)))
3574 rc = jmp_rel(ctxt, ctxt->src.val);
3579 static int em_jcxz(struct x86_emulate_ctxt *ctxt)
3583 if (address_mask(ctxt, reg_read(ctxt, VCPU_REGS_RCX)) == 0)
3584 rc = jmp_rel(ctxt, ctxt->src.val);
3589 static int em_in(struct x86_emulate_ctxt *ctxt)
3591 if (!pio_in_emulated(ctxt, ctxt->dst.bytes, ctxt->src.val,
3592 &ctxt->dst.val))
3598 static int em_out(struct x86_emulate_ctxt *ctxt)
3600 ctxt->ops->pio_out_emulated(ctxt, ctxt->src.bytes, ctxt->dst.val,
3601 &ctxt->src.val, 1);
3603 ctxt->dst.type = OP_NONE;
3607 static int em_cli(struct x86_emulate_ctxt *ctxt)
3609 if (emulator_bad_iopl(ctxt))
3610 return emulate_gp(ctxt, 0);
3612 ctxt->eflags &= ~X86_EFLAGS_IF;
3616 static int em_sti(struct x86_emulate_ctxt *ctxt)
3618 if (emulator_bad_iopl(ctxt))
3619 return emulate_gp(ctxt, 0);
3621 ctxt->interruptibility = KVM_X86_SHADOW_INT_STI;
3622 ctxt->eflags |= X86_EFLAGS_IF;
3626 static int em_cpuid(struct x86_emulate_ctxt *ctxt)
3631 ctxt->ops->get_msr(ctxt, MSR_MISC_FEATURES_ENABLES, &msr);
3633 ctxt->ops->cpl(ctxt)) {
3634 return emulate_gp(ctxt, 0);
3637 eax = reg_read(ctxt, VCPU_REGS_RAX);
3638 ecx = reg_read(ctxt, VCPU_REGS_RCX);
3639 ctxt->ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx, false);
3640 *reg_write(ctxt, VCPU_REGS_RAX) = eax;
3641 *reg_write(ctxt, VCPU_REGS_RBX) = ebx;
3642 *reg_write(ctxt, VCPU_REGS_RCX) = ecx;
3643 *reg_write(ctxt, VCPU_REGS_RDX) = edx;
3647 static int em_sahf(struct x86_emulate_ctxt *ctxt)
3653 flags &= *reg_rmw(ctxt, VCPU_REGS_RAX) >> 8;
3655 ctxt->eflags &= ~0xffUL;
3656 ctxt->eflags |= flags | X86_EFLAGS_FIXED;
3660 static int em_lahf(struct x86_emulate_ctxt *ctxt)
3662 *reg_rmw(ctxt, VCPU_REGS_RAX) &= ~0xff00UL;
3663 *reg_rmw(ctxt, VCPU_REGS_RAX) |= (ctxt->eflags & 0xff) << 8;
3667 static int em_bswap(struct x86_emulate_ctxt *ctxt)
3669 switch (ctxt->op_bytes) {
3672 asm("bswap %0" : "+r"(ctxt->dst.val));
3676 asm("bswap %0" : "+r"(*(u32 *)&ctxt->dst.val));
3682 static int em_clflush(struct x86_emulate_ctxt *ctxt)
3688 static int em_clflushopt(struct x86_emulate_ctxt *ctxt)
3694 static int em_movsxd(struct x86_emulate_ctxt *ctxt)
3696 ctxt->dst.val = (s32) ctxt->src.val;
3700 static int check_fxsr(struct x86_emulate_ctxt *ctxt)
3702 if (!ctxt->ops->guest_has_fxsr(ctxt))
3703 return emulate_ud(ctxt);
3705 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM))
3706 return emulate_nm(ctxt);
3712 if (ctxt->mode >= X86EMUL_MODE_PROT64)
3727 static inline size_t fxstate_size(struct x86_emulate_ctxt *ctxt)
3730 if (ctxt->mode == X86EMUL_MODE_PROT64)
3733 cr4_osfxsr = ctxt->ops->get_cr(ctxt, 4) & X86_CR4_OSFXSR;
3755 static int em_fxsave(struct x86_emulate_ctxt *ctxt)
3760 rc = check_fxsr(ctxt);
3773 return segmented_write_std(ctxt, ctxt->memop.addr.mem, &fx_state,
3774 fxstate_size(ctxt));
3797 static int em_fxrstor(struct x86_emulate_ctxt *ctxt)
3803 rc = check_fxsr(ctxt);
3807 size = fxstate_size(ctxt);
3808 rc = segmented_read_std(ctxt, ctxt->memop.addr.mem, &fx_state, size);
3821 rc = emulate_gp(ctxt, 0);
3834 static int em_xsetbv(struct x86_emulate_ctxt *ctxt)
3838 if (!(ctxt->ops->get_cr(ctxt, 4) & X86_CR4_OSXSAVE))
3839 return emulate_ud(ctxt);
3841 eax = reg_read(ctxt, VCPU_REGS_RAX);
3842 edx = reg_read(ctxt, VCPU_REGS_RDX);
3843 ecx = reg_read(ctxt, VCPU_REGS_RCX);
3845 if (ctxt->ops->set_xcr(ctxt, ecx, ((u64)edx << 32) | eax))
3846 return emulate_gp(ctxt, 0);
3863 static int check_cr_access(struct x86_emulate_ctxt *ctxt)
3865 if (!valid_cr(ctxt->modrm_reg))
3866 return emulate_ud(ctxt);
3871 static int check_dr_read(struct x86_emulate_ctxt *ctxt)
3873 int dr = ctxt->modrm_reg;
3877 return emulate_ud(ctxt);
3879 cr4 = ctxt->ops->get_cr(ctxt, 4);
3881 return emulate_ud(ctxt);
3883 if (ctxt->ops->get_dr(ctxt, 7) & DR7_GD) {
3886 dr6 = ctxt->ops->get_dr(ctxt, 6);
3889 ctxt->ops->set_dr(ctxt, 6, dr6);
3890 return emulate_db(ctxt);
3896 static int check_dr_write(struct x86_emulate_ctxt *ctxt)
3898 u64 new_val = ctxt->src.val64;
3899 int dr = ctxt->modrm_reg;
3902 return emulate_gp(ctxt, 0);
3904 return check_dr_read(ctxt);
3907 static int check_svme(struct x86_emulate_ctxt *ctxt)
3911 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
3914 return emulate_ud(ctxt);
3919 static int check_svme_pa(struct x86_emulate_ctxt *ctxt)
3921 u64 rax = reg_read(ctxt, VCPU_REGS_RAX);
3925 return emulate_gp(ctxt, 0);
3927 return check_svme(ctxt);
3930 static int check_rdtsc(struct x86_emulate_ctxt *ctxt)
3932 u64 cr4 = ctxt->ops->get_cr(ctxt, 4);
3934 if (cr4 & X86_CR4_TSD && ctxt->ops->cpl(ctxt))
3935 return emulate_gp(ctxt, 0);
3940 static int check_rdpmc(struct x86_emulate_ctxt *ctxt)
3942 u64 cr4 = ctxt->ops->get_cr(ctxt, 4);
3943 u64 rcx = reg_read(ctxt, VCPU_REGS_RCX);
3957 if ((!(cr4 & X86_CR4_PCE) && ctxt->ops->cpl(ctxt)) ||
3958 ctxt->ops->check_rdpmc_early(ctxt, rcx))
3959 return emulate_gp(ctxt, 0);
3964 static int check_perm_in(struct x86_emulate_ctxt *ctxt)
3966 ctxt->dst.bytes = min(ctxt->dst.bytes, 4u);
3967 if (!emulator_io_permitted(ctxt, ctxt->src.val, ctxt->dst.bytes))
3968 return emulate_gp(ctxt, 0);
3973 static int check_perm_out(struct x86_emulate_ctxt *ctxt)
3975 ctxt->src.bytes = min(ctxt->src.bytes, 4u);
3976 if (!emulator_io_permitted(ctxt, ctxt->dst.val, ctxt->src.bytes))
3977 return emulate_gp(ctxt, 0);
4540 static unsigned imm_size(struct x86_emulate_ctxt *ctxt)
4544 size = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
4550 static int decode_imm(struct x86_emulate_ctxt *ctxt, struct operand *op,
4557 op->addr.mem.ea = ctxt->_eip;
4561 op->val = insn_fetch(s8, ctxt);
4564 op->val = insn_fetch(s16, ctxt);
4567 op->val = insn_fetch(s32, ctxt);
4570 op->val = insn_fetch(s64, ctxt);
4590 static int decode_operand(struct x86_emulate_ctxt *ctxt, struct operand *op,
4597 decode_register_operand(ctxt, op);
4600 rc = decode_imm(ctxt, op, 1, false);
4603 ctxt->memop.bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
4605 *op = ctxt->memop;
4606 ctxt->memopp = op;
4607 if (ctxt->d & BitOp)
4608 fetch_bit_operand(ctxt);
4612 ctxt->memop.bytes = (ctxt->op_bytes == 8) ? 16 : 8;
4616 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
4617 op->addr.reg = reg_rmw(ctxt, VCPU_REGS_RAX);
4623 op->bytes = (ctxt->d & ByteOp) ? 2 : ctxt->op_bytes;
4624 op->addr.reg = reg_rmw(ctxt, VCPU_REGS_RAX);
4629 if (ctxt->d & ByteOp) {
4634 op->bytes = ctxt->op_bytes;
4635 op->addr.reg = reg_rmw(ctxt, VCPU_REGS_RDX);
4641 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
4643 register_address(ctxt, VCPU_REGS_RDI);
4651 op->addr.reg = reg_rmw(ctxt, VCPU_REGS_RDX);
4657 op->val = reg_read(ctxt, VCPU_REGS_RCX) & 0xff;
4660 rc = decode_imm(ctxt, op, 1, true);
4668 rc = decode_imm(ctxt, op, imm_size(ctxt), true);
4671 rc = decode_imm(ctxt, op, ctxt->op_bytes, true);
4674 ctxt->memop.bytes = 1;
4675 if (ctxt->memop.type == OP_REG) {
4676 ctxt->memop.addr.reg = decode_register(ctxt,
4677 ctxt->modrm_rm, true);
4678 fetch_register_operand(&ctxt->memop);
4682 ctxt->memop.bytes = 2;
4685 ctxt->memop.bytes = 4;
4688 rc = decode_imm(ctxt, op, 2, false);
4691 rc = decode_imm(ctxt, op, imm_size(ctxt), false);
4695 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
4697 register_address(ctxt, VCPU_REGS_RSI);
4698 op->addr.mem.seg = ctxt->seg_override;
4704 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
4706 address_mask(ctxt,
4707 reg_read(ctxt, VCPU_REGS_RBX) +
4708 (reg_read(ctxt, VCPU_REGS_RAX) & 0xff));
4709 op->addr.mem.seg = ctxt->seg_override;
4714 op->addr.mem.ea = ctxt->_eip;
4715 op->bytes = ctxt->op_bytes + 2;
4716 insn_fetch_arr(op->valptr, op->bytes, ctxt);
4719 ctxt->memop.bytes = ctxt->op_bytes + 2;
4756 int x86_decode_insn(struct x86_emulate_ctxt *ctxt, void *insn, int insn_len, int emulation_type)
4759 int mode = ctxt->mode;
4767 ctxt->memop.type = OP_NONE;
4768 ctxt->memopp = NULL;
4769 ctxt->_eip = ctxt->eip;
4770 ctxt->fetch.ptr = ctxt->fetch.data;
4771 ctxt->fetch.end = ctxt->fetch.data + insn_len;
4772 ctxt->opcode_len = 1;
4773 ctxt->intercept = x86_intercept_none;
4775 memcpy(ctxt->fetch.data, insn, insn_len);
4777 rc = __do_insn_fetch_bytes(ctxt, 1);
4786 ctxt->ops->get_segment(ctxt, &dummy, &desc, NULL, VCPU_SREG_CS);
4806 ctxt->op_bytes = def_op_bytes;
4807 ctxt->ad_bytes = def_ad_bytes;
4811 switch (ctxt->b = insn_fetch(u8, ctxt)) {
4815 ctxt->op_bytes = def_op_bytes ^ 6;
4820 ctxt->ad_bytes = def_ad_bytes ^ 12;
4823 ctxt->ad_bytes = def_ad_bytes ^ 6;
4827 ctxt->seg_override = VCPU_SREG_ES;
4831 ctxt->seg_override = VCPU_SREG_CS;
4835 ctxt->seg_override = VCPU_SREG_SS;
4839 ctxt->seg_override = VCPU_SREG_DS;
4843 ctxt->seg_override = VCPU_SREG_FS;
4847 ctxt->seg_override = VCPU_SREG_GS;
4852 ctxt->rex_prefix = ctxt->b;
4855 ctxt->lock_prefix = 1;
4859 ctxt->rep_prefix = ctxt->b;
4867 ctxt->rex_prefix = 0;
4873 if (ctxt->rex_prefix & 8)
4874 ctxt->op_bytes = 8; /* REX.W */
4877 opcode = opcode_table[ctxt->b];
4879 if (ctxt->b == 0x0f) {
4880 ctxt->opcode_len = 2;
4881 ctxt->b = insn_fetch(u8, ctxt);
4882 opcode = twobyte_table[ctxt->b];
4885 if (ctxt->b == 0x38) {
4886 ctxt->opcode_len = 3;
4887 ctxt->b = insn_fetch(u8, ctxt);
4888 opcode = opcode_map_0f_38[ctxt->b];
4891 ctxt->d = opcode.flags;
4893 if (ctxt->d & ModRM)
4894 ctxt->modrm = insn_fetch(u8, ctxt);
4897 if (ctxt->opcode_len == 1 && (ctxt->b == 0xc5 || ctxt->b == 0xc4) &&
4898 (mode == X86EMUL_MODE_PROT64 || (ctxt->modrm & 0xc0) == 0xc0)) {
4899 ctxt->d = NotImpl;
4902 while (ctxt->d & GroupMask) {
4903 switch (ctxt->d & GroupMask) {
4905 goffset = (ctxt->modrm >> 3) & 7;
4909 goffset = (ctxt->modrm >> 3) & 7;
4910 if ((ctxt->modrm >> 6) == 3)
4916 goffset = ctxt->modrm & 7;
4920 if (ctxt->rep_prefix && op_prefix)
4922 simd_prefix = op_prefix ? 0x66 : ctxt->rep_prefix;
4931 if (ctxt->modrm > 0xbf) {
4934 ctxt->modrm - 0xc0, size);
4938 opcode = opcode.u.esc->op[(ctxt->modrm >> 3) & 7];
4942 if ((ctxt->modrm >> 6) == 3)
4948 if (ctxt->mode == X86EMUL_MODE_PROT64)
4957 ctxt->d &= ~(u64)GroupMask;
4958 ctxt->d |= opcode.flags;
4961 ctxt->is_branch = opcode.flags & IsBranch;
4964 if (ctxt->d == 0)
4967 ctxt->execute = opcode.u.execute;
4970 likely(!(ctxt->d & EmulateOnUD)))
4973 if (unlikely(ctxt->d &
4980 ctxt->check_perm = opcode.check_perm;
4981 ctxt->intercept = opcode.intercept;
4983 if (ctxt->d & NotImpl)
4987 if (ctxt->op_bytes == 4 && (ctxt->d & Stack))
4988 ctxt->op_bytes = 8;
4989 else if (ctxt->d & NearBranch)
4990 ctxt->op_bytes = 8;
4993 if (ctxt->d & Op3264) {
4995 ctxt->op_bytes = 8;
4997 ctxt->op_bytes = 4;
5000 if ((ctxt->d & No16) && ctxt->op_bytes == 2)
5001 ctxt->op_bytes = 4;
5003 if (ctxt->d & Sse)
5004 ctxt->op_bytes = 16;
5005 else if (ctxt->d & Mmx)
5006 ctxt->op_bytes = 8;
5010 if (ctxt->d & ModRM) {
5011 rc = decode_modrm(ctxt, &ctxt->memop);
5014 ctxt->seg_override = ctxt->modrm_seg;
5016 } else if (ctxt->d & MemAbs)
5017 rc = decode_abs(ctxt, &ctxt->memop);
5022 ctxt->seg_override = VCPU_SREG_DS;
5024 ctxt->memop.addr.mem.seg = ctxt->seg_override;
5030 rc = decode_operand(ctxt, &ctxt->src, (ctxt->d >> SrcShift) & OpMask);
5038 rc = decode_operand(ctxt, &ctxt->src2, (ctxt->d >> Src2Shift) & OpMask);
5043 rc = decode_operand(ctxt, &ctxt->dst, (ctxt->d >> DstShift) & OpMask);
5045 if (ctxt->rip_relative && likely(ctxt->memopp))
5046 ctxt->memopp->addr.mem.ea = address_mask(ctxt,
5047 ctxt->memopp->addr.mem.ea + ctxt->_eip);
5051 ctxt->have_exception = true;
5055 bool x86_page_table_writing_insn(struct x86_emulate_ctxt *ctxt)
5057 return ctxt->d & PageTable;
5060 static bool string_insn_completed(struct x86_emulate_ctxt *ctxt)
5069 if (((ctxt->b == 0xa6) || (ctxt->b == 0xa7) ||
5070 (ctxt->b == 0xae) || (ctxt->b == 0xaf))
5071 && (((ctxt->rep_prefix == REPE_PREFIX) &&
5072 ((ctxt->eflags & X86_EFLAGS_ZF) == 0))
5073 || ((ctxt->rep_prefix == REPNE_PREFIX) &&
5074 ((ctxt->eflags & X86_EFLAGS_ZF) == X86_EFLAGS_ZF))))
5080 static int flush_pending_x87_faults(struct x86_emulate_ctxt *ctxt)
5089 return emulate_exception(ctxt, MF_VECTOR, 0, false);
5100 static int fastop(struct x86_emulate_ctxt *ctxt, fastop_t fop)
5102 ulong flags = (ctxt->eflags & EFLAGS_MASK) | X86_EFLAGS_IF;
5104 if (!(ctxt->d & ByteOp))
5105 fop += __ffs(ctxt->dst.bytes) * FASTOP_SIZE;
5108 : "+a"(ctxt->dst.val), "+d"(ctxt->src.val), [flags]"+D"(flags),
5110 : "c"(ctxt->src2.val));
5112 ctxt->eflags = (ctxt->eflags & ~EFLAGS_MASK) | (flags & EFLAGS_MASK);
5114 return emulate_de(ctxt);
5118 void init_decode_cache(struct x86_emulate_ctxt *ctxt)
5121 ctxt->rip_relative = false;
5122 ctxt->rex_prefix = 0;
5123 ctxt->lock_prefix = 0;
5124 ctxt->rep_prefix = 0;
5125 ctxt->regs_valid = 0;
5126 ctxt->regs_dirty = 0;
5128 ctxt->io_read.pos = 0;
5129 ctxt->io_read.end = 0;
5130 ctxt->mem_read.end = 0;
5133 int x86_emulate_insn(struct x86_emulate_ctxt *ctxt)
5135 const struct x86_emulate_ops *ops = ctxt->ops;
5137 int saved_dst_type = ctxt->dst.type;
5138 bool is_guest_mode = ctxt->ops->is_guest_mode(ctxt);
5140 ctxt->mem_read.pos = 0;
5143 if (ctxt->lock_prefix && (!(ctxt->d & Lock) || ctxt->dst.type != OP_MEM)) {
5144 rc = emulate_ud(ctxt);
5148 if ((ctxt->d & SrcMask) == SrcMemFAddr && ctxt->src.type != OP_MEM) {
5149 rc = emulate_ud(ctxt);
5153 if (unlikely(ctxt->d &
5155 if ((ctxt->mode == X86EMUL_MODE_PROT64 && (ctxt->d & No64)) ||
5156 (ctxt->d & Undefined)) {
5157 rc = emulate_ud(ctxt);
5161 if (((ctxt->d & (Sse|Mmx)) && ((ops->get_cr(ctxt, 0) & X86_CR0_EM)))
5162 || ((ctxt->d & Sse) && !(ops->get_cr(ctxt, 4) & X86_CR4_OSFXSR))) {
5163 rc = emulate_ud(ctxt);
5167 if ((ctxt->d & (Sse|Mmx)) && (ops->get_cr(ctxt, 0) & X86_CR0_TS)) {
5168 rc = emulate_nm(ctxt);
5172 if (ctxt->d & Mmx) {
5173 rc = flush_pending_x87_faults(ctxt);
5180 fetch_possible_mmx_operand(&ctxt->src);
5181 fetch_possible_mmx_operand(&ctxt->src2);
5182 if (!(ctxt->d & Mov))
5183 fetch_possible_mmx_operand(&ctxt->dst);
5186 if (unlikely(is_guest_mode) && ctxt->intercept) {
5187 rc = emulator_check_intercept(ctxt, ctxt->intercept,
5194 if ((ctxt->d & Prot) && ctxt->mode < X86EMUL_MODE_PROT16) {
5195 rc = emulate_ud(ctxt);
5200 if ((ctxt->d & Priv) && ops->cpl(ctxt)) {
5201 if (ctxt->d & PrivUD)
5202 rc = emulate_ud(ctxt);
5204 rc = emulate_gp(ctxt, 0);
5209 if (ctxt->d & CheckPerm) {
5210 rc = ctxt->check_perm(ctxt);
5215 if (unlikely(is_guest_mode) && (ctxt->d & Intercept)) {
5216 rc = emulator_check_intercept(ctxt, ctxt->intercept,
5222 if (ctxt->rep_prefix && (ctxt->d & String)) {
5224 if (address_mask(ctxt, reg_read(ctxt, VCPU_REGS_RCX)) == 0) {
5225 string_registers_quirk(ctxt);
5226 ctxt->eip = ctxt->_eip;
5227 ctxt->eflags &= ~X86_EFLAGS_RF;
5233 if ((ctxt->src.type == OP_MEM) && !(ctxt->d & NoAccess)) {
5234 rc = segmented_read(ctxt, ctxt->src.addr.mem,
5235 ctxt->src.valptr, ctxt->src.bytes);
5238 ctxt->src.orig_val64 = ctxt->src.val64;
5241 if (ctxt->src2.type == OP_MEM) {
5242 rc = segmented_read(ctxt, ctxt->src2.addr.mem,
5243 &ctxt->src2.val, ctxt->src2.bytes);
5248 if ((ctxt->d & DstMask) == ImplicitOps)
5252 if ((ctxt->dst.type == OP_MEM) && !(ctxt->d & Mov)) {
5254 rc = segmented_read(ctxt, ctxt->dst.addr.mem,
5255 &ctxt->dst.val, ctxt->dst.bytes);
5257 if (!(ctxt->d & NoWrite) &&
5259 ctxt->exception.vector == PF_VECTOR)
5260 ctxt->exception.error_code |= PFERR_WRITE_MASK;
5265 ctxt->dst.orig_val64 = ctxt->dst.val64;
5269 if (unlikely(is_guest_mode) && (ctxt->d & Intercept)) {
5270 rc = emulator_check_intercept(ctxt, ctxt->intercept,
5276 if (ctxt->rep_prefix && (ctxt->d & String))
5277 ctxt->eflags |= X86_EFLAGS_RF;
5279 ctxt->eflags &= ~X86_EFLAGS_RF;
5281 if (ctxt->execute) {
5282 if (ctxt->d & Fastop)
5283 rc = fastop(ctxt, ctxt->fop);
5285 rc = ctxt->execute(ctxt);
5291 if (ctxt->opcode_len == 2)
5293 else if (ctxt->opcode_len == 3)
5296 switch (ctxt->b) {
5298 if (test_cc(ctxt->b, ctxt->eflags))
5299 rc = jmp_rel(ctxt, ctxt->src.val);
5302 ctxt->dst.val = ctxt->src.addr.mem.ea;
5305 if (ctxt->dst.addr.reg == reg_rmw(ctxt, VCPU_REGS_RAX))
5306 ctxt->dst.type = OP_NONE;
5308 rc = em_xchg(ctxt);
5311 switch (ctxt->op_bytes) {
5312 case 2: ctxt->dst.val = (s8)ctxt->dst.val; break;
5313 case 4: ctxt->dst.val = (s16)ctxt->dst.val; break;
5314 case 8: ctxt->dst.val = (s32)ctxt->dst.val; break;
5318 rc = emulate_int(ctxt, 3);
5321 rc = emulate_int(ctxt, ctxt->src.val);
5324 if (ctxt->eflags & X86_EFLAGS_OF)
5325 rc = emulate_int(ctxt, 4);
5329 rc = jmp_rel(ctxt, ctxt->src.val);
5330 ctxt->dst.type = OP_NONE; /* Disable writeback. */
5333 ctxt->ops->halt(ctxt);
5337 ctxt->eflags ^= X86_EFLAGS_CF;
5340 ctxt->eflags &= ~X86_EFLAGS_CF;
5343 ctxt->eflags |= X86_EFLAGS_CF;
5346 ctxt->eflags &= ~X86_EFLAGS_DF;
5349 ctxt->eflags |= X86_EFLAGS_DF;
5359 if (ctxt->d & SrcWrite) {
5360 BUG_ON(ctxt->src.type == OP_MEM || ctxt->src.type == OP_MEM_STR);
5361 rc = writeback(ctxt, &ctxt->src);
5365 if (!(ctxt->d & NoWrite)) {
5366 rc = writeback(ctxt, &ctxt->dst);
5375 ctxt->dst.type = saved_dst_type;
5377 if ((ctxt->d & SrcMask) == SrcSI)
5378 string_addr_inc(ctxt, VCPU_REGS_RSI, &ctxt->src);
5380 if ((ctxt->d & DstMask) == DstDI)
5381 string_addr_inc(ctxt, VCPU_REGS_RDI, &ctxt->dst);
5383 if (ctxt->rep_prefix && (ctxt->d & String)) {
5385 struct read_cache *r = &ctxt->io_read;
5386 if ((ctxt->d & SrcMask) == SrcSI)
5387 count = ctxt->src.count;
5389 count = ctxt->dst.count;
5390 register_address_increment(ctxt, VCPU_REGS_RCX, -count);
5392 if (!string_insn_completed(ctxt)) {
5397 if ((r->end != 0 || reg_read(ctxt, VCPU_REGS_RCX) & 0x3ff) &&
5404 ctxt->mem_read.end = 0;
5405 writeback_registers(ctxt);
5410 ctxt->eflags &= ~X86_EFLAGS_RF;
5413 ctxt->eip = ctxt->_eip;
5414 if (ctxt->mode != X86EMUL_MODE_PROT64)
5415 ctxt->eip = (u32)ctxt->_eip;
5419 if (KVM_EMULATOR_BUG_ON(ctxt->exception.vector > 0x1f, ctxt))
5421 ctxt->have_exception = true;
5427 writeback_registers(ctxt);
5432 switch (ctxt->b) {
5434 (ctxt->ops->wbinvd)(ctxt);
5442 ctxt->dst.val = ops->get_cr(ctxt, ctxt->modrm_reg);
5445 ctxt->dst.val = ops->get_dr(ctxt, ctxt->modrm_reg);
5448 if (test_cc(ctxt->b, ctxt->eflags))
5449 ctxt->dst.val = ctxt->src.val;
5450 else if (ctxt->op_bytes != 4)
5451 ctxt->dst.type = OP_NONE; /* no writeback */
5454 if (test_cc(ctxt->b, ctxt->eflags))
5455 rc = jmp_rel(ctxt, ctxt->src.val);
5458 ctxt->dst.val = test_cc(ctxt->b, ctxt->eflags);
5461 ctxt->dst.bytes = ctxt->op_bytes;
5462 ctxt->dst.val = (ctxt->src.bytes == 1) ? (u8) ctxt->src.val
5463 : (u16) ctxt->src.val;
5466 ctxt->dst.bytes = ctxt->op_bytes;
5467 ctxt->dst.val = (ctxt->src.bytes == 1) ? (s8) ctxt->src.val :
5468 (s16) ctxt->src.val;
5485 void emulator_invalidate_register_cache(struct x86_emulate_ctxt *ctxt)
5487 invalidate_registers(ctxt);
5490 void emulator_writeback_register_cache(struct x86_emulate_ctxt *ctxt)
5492 writeback_registers(ctxt);
5495 bool emulator_can_use_gpa(struct x86_emulate_ctxt *ctxt)
5497 if (ctxt->rep_prefix && (ctxt->d & String))
5500 if (ctxt->d & TwoMemOp)