Lines Matching refs:ctxt
213 int (*execute)(struct x86_emulate_ctxt *ctxt);
222 int (*check_perm)(struct x86_emulate_ctxt *ctxt);
261 static ulong reg_read(struct x86_emulate_ctxt *ctxt, unsigned nr) in reg_read() argument
263 if (!(ctxt->regs_valid & (1 << nr))) { in reg_read()
264 ctxt->regs_valid |= 1 << nr; in reg_read()
265 ctxt->_regs[nr] = ctxt->ops->read_gpr(ctxt, nr); in reg_read()
267 return ctxt->_regs[nr]; in reg_read()
270 static ulong *reg_write(struct x86_emulate_ctxt *ctxt, unsigned nr) in reg_write() argument
272 ctxt->regs_valid |= 1 << nr; in reg_write()
273 ctxt->regs_dirty |= 1 << nr; in reg_write()
274 return &ctxt->_regs[nr]; in reg_write()
277 static ulong *reg_rmw(struct x86_emulate_ctxt *ctxt, unsigned nr) in reg_rmw() argument
279 reg_read(ctxt, nr); in reg_rmw()
280 return reg_write(ctxt, nr); in reg_rmw()
283 static void writeback_registers(struct x86_emulate_ctxt *ctxt) in writeback_registers() argument
287 for_each_set_bit(reg, (ulong *)&ctxt->regs_dirty, 16) in writeback_registers()
288 ctxt->ops->write_gpr(ctxt, reg, ctxt->_regs[reg]); in writeback_registers()
291 static void invalidate_registers(struct x86_emulate_ctxt *ctxt) in invalidate_registers() argument
293 ctxt->regs_dirty = 0; in invalidate_registers()
294 ctxt->regs_valid = 0; in invalidate_registers()
310 static int fastop(struct x86_emulate_ctxt *ctxt, void (*fop)(struct fastop *));
437 static int emulator_check_intercept(struct x86_emulate_ctxt *ctxt, in emulator_check_intercept() argument
443 .rep_prefix = ctxt->rep_prefix, in emulator_check_intercept()
444 .modrm_mod = ctxt->modrm_mod, in emulator_check_intercept()
445 .modrm_reg = ctxt->modrm_reg, in emulator_check_intercept()
446 .modrm_rm = ctxt->modrm_rm, in emulator_check_intercept()
447 .src_val = ctxt->src.val64, in emulator_check_intercept()
448 .dst_val = ctxt->dst.val64, in emulator_check_intercept()
449 .src_bytes = ctxt->src.bytes, in emulator_check_intercept()
450 .dst_bytes = ctxt->dst.bytes, in emulator_check_intercept()
451 .ad_bytes = ctxt->ad_bytes, in emulator_check_intercept()
452 .next_rip = ctxt->eip, in emulator_check_intercept()
455 return ctxt->ops->intercept(ctxt, &info, stage); in emulator_check_intercept()
482 static inline unsigned long ad_mask(struct x86_emulate_ctxt *ctxt) in ad_mask() argument
484 return (1UL << (ctxt->ad_bytes << 3)) - 1; in ad_mask()
487 static ulong stack_mask(struct x86_emulate_ctxt *ctxt) in stack_mask() argument
492 if (ctxt->mode == X86EMUL_MODE_PROT64) in stack_mask()
494 ctxt->ops->get_segment(ctxt, &sel, &ss, NULL, VCPU_SREG_SS); in stack_mask()
498 static int stack_size(struct x86_emulate_ctxt *ctxt) in stack_size() argument
500 return (__fls(stack_mask(ctxt)) + 1) >> 3; in stack_size()
505 address_mask(struct x86_emulate_ctxt *ctxt, unsigned long reg) in address_mask() argument
507 if (ctxt->ad_bytes == sizeof(unsigned long)) in address_mask()
510 return reg & ad_mask(ctxt); in address_mask()
514 register_address(struct x86_emulate_ctxt *ctxt, int reg) in register_address() argument
516 return address_mask(ctxt, reg_read(ctxt, reg)); in register_address()
525 register_address_increment(struct x86_emulate_ctxt *ctxt, int reg, int inc) in register_address_increment() argument
527 ulong *preg = reg_rmw(ctxt, reg); in register_address_increment()
529 assign_register(preg, *preg + inc, ctxt->ad_bytes); in register_address_increment()
532 static void rsp_increment(struct x86_emulate_ctxt *ctxt, int inc) in rsp_increment() argument
534 masked_increment(reg_rmw(ctxt, VCPU_REGS_RSP), stack_mask(ctxt), inc); in rsp_increment()
544 static unsigned long seg_base(struct x86_emulate_ctxt *ctxt, int seg) in seg_base() argument
546 if (ctxt->mode == X86EMUL_MODE_PROT64 && seg < VCPU_SREG_FS) in seg_base()
549 return ctxt->ops->get_cached_segment_base(ctxt, seg); in seg_base()
552 static int emulate_exception(struct x86_emulate_ctxt *ctxt, int vec, in emulate_exception() argument
556 ctxt->exception.vector = vec; in emulate_exception()
557 ctxt->exception.error_code = error; in emulate_exception()
558 ctxt->exception.error_code_valid = valid; in emulate_exception()
562 static int emulate_db(struct x86_emulate_ctxt *ctxt) in emulate_db() argument
564 return emulate_exception(ctxt, DB_VECTOR, 0, false); in emulate_db()
567 static int emulate_gp(struct x86_emulate_ctxt *ctxt, int err) in emulate_gp() argument
569 return emulate_exception(ctxt, GP_VECTOR, err, true); in emulate_gp()
572 static int emulate_ss(struct x86_emulate_ctxt *ctxt, int err) in emulate_ss() argument
574 return emulate_exception(ctxt, SS_VECTOR, err, true); in emulate_ss()
577 static int emulate_ud(struct x86_emulate_ctxt *ctxt) in emulate_ud() argument
579 return emulate_exception(ctxt, UD_VECTOR, 0, false); in emulate_ud()
582 static int emulate_ts(struct x86_emulate_ctxt *ctxt, int err) in emulate_ts() argument
584 return emulate_exception(ctxt, TS_VECTOR, err, true); in emulate_ts()
587 static int emulate_de(struct x86_emulate_ctxt *ctxt) in emulate_de() argument
589 return emulate_exception(ctxt, DE_VECTOR, 0, false); in emulate_de()
592 static int emulate_nm(struct x86_emulate_ctxt *ctxt) in emulate_nm() argument
594 return emulate_exception(ctxt, NM_VECTOR, 0, false); in emulate_nm()
597 static u16 get_segment_selector(struct x86_emulate_ctxt *ctxt, unsigned seg) in get_segment_selector() argument
602 ctxt->ops->get_segment(ctxt, &selector, &desc, NULL, seg); in get_segment_selector()
606 static void set_segment_selector(struct x86_emulate_ctxt *ctxt, u16 selector, in set_segment_selector() argument
613 ctxt->ops->get_segment(ctxt, &dummy, &desc, &base3, seg); in set_segment_selector()
614 ctxt->ops->set_segment(ctxt, selector, &desc, base3, seg); in set_segment_selector()
625 static bool insn_aligned(struct x86_emulate_ctxt *ctxt, unsigned size) in insn_aligned() argument
630 if (ctxt->d & Aligned) in insn_aligned()
632 else if (ctxt->d & Unaligned) in insn_aligned()
634 else if (ctxt->d & Avx) in insn_aligned()
640 static __always_inline int __linearize(struct x86_emulate_ctxt *ctxt, in __linearize() argument
652 la = seg_base(ctxt, addr.seg) + addr.ea; in __linearize()
666 usable = ctxt->ops->get_segment(ctxt, &sel, &desc, NULL, in __linearize()
671 if ((((ctxt->mode != X86EMUL_MODE_REAL) && (desc.type & 8)) in __linearize()
695 if (insn_aligned(ctxt, size) && ((la & (size - 1)) != 0)) in __linearize()
696 return emulate_gp(ctxt, 0); in __linearize()
700 return emulate_ss(ctxt, 0); in __linearize()
702 return emulate_gp(ctxt, 0); in __linearize()
705 static int linearize(struct x86_emulate_ctxt *ctxt, in linearize() argument
711 return __linearize(ctxt, addr, &max_size, size, write, false, in linearize()
712 ctxt->mode, linear); in linearize()
715 static inline int assign_eip(struct x86_emulate_ctxt *ctxt, ulong dst, in assign_eip() argument
724 if (ctxt->op_bytes != sizeof(unsigned long)) in assign_eip()
725 addr.ea = dst & ((1UL << (ctxt->op_bytes << 3)) - 1); in assign_eip()
726 rc = __linearize(ctxt, addr, &max_size, 1, false, true, mode, &linear); in assign_eip()
728 ctxt->_eip = addr.ea; in assign_eip()
732 static inline int assign_eip_near(struct x86_emulate_ctxt *ctxt, ulong dst) in assign_eip_near() argument
734 return assign_eip(ctxt, dst, ctxt->mode); in assign_eip_near()
737 static int assign_eip_far(struct x86_emulate_ctxt *ctxt, ulong dst, in assign_eip_far() argument
740 enum x86emul_mode mode = ctxt->mode; in assign_eip_far()
744 if (ctxt->mode >= X86EMUL_MODE_PROT16) { in assign_eip_far()
748 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in assign_eip_far()
757 rc = assign_eip(ctxt, dst, mode); in assign_eip_far()
759 ctxt->mode = mode; in assign_eip_far()
763 static inline int jmp_rel(struct x86_emulate_ctxt *ctxt, int rel) in jmp_rel() argument
765 return assign_eip_near(ctxt, ctxt->_eip + rel); in jmp_rel()
768 static int segmented_read_std(struct x86_emulate_ctxt *ctxt, in segmented_read_std() argument
776 rc = linearize(ctxt, addr, size, false, &linear); in segmented_read_std()
779 return ctxt->ops->read_std(ctxt, linear, data, size, &ctxt->exception); in segmented_read_std()
786 static int __do_insn_fetch_bytes(struct x86_emulate_ctxt *ctxt, int op_size) in __do_insn_fetch_bytes() argument
791 int cur_size = ctxt->fetch.end - ctxt->fetch.data; in __do_insn_fetch_bytes()
793 .ea = ctxt->eip + cur_size }; in __do_insn_fetch_bytes()
805 rc = __linearize(ctxt, addr, &max_size, 0, false, true, ctxt->mode, in __do_insn_fetch_bytes()
820 return emulate_gp(ctxt, 0); in __do_insn_fetch_bytes()
822 rc = ctxt->ops->fetch(ctxt, linear, ctxt->fetch.end, in __do_insn_fetch_bytes()
823 size, &ctxt->exception); in __do_insn_fetch_bytes()
826 ctxt->fetch.end += size; in __do_insn_fetch_bytes()
830 static __always_inline int do_insn_fetch_bytes(struct x86_emulate_ctxt *ctxt, in do_insn_fetch_bytes() argument
833 unsigned done_size = ctxt->fetch.end - ctxt->fetch.ptr; in do_insn_fetch_bytes()
836 return __do_insn_fetch_bytes(ctxt, size - done_size); in do_insn_fetch_bytes()
848 ctxt->_eip += sizeof(_type); \
849 _x = *(_type __aligned(1) *) ctxt->fetch.ptr; \
850 ctxt->fetch.ptr += sizeof(_type); \
859 ctxt->_eip += (_size); \
860 memcpy(_arr, ctxt->fetch.ptr, _size); \
861 ctxt->fetch.ptr += (_size); \
869 static void *decode_register(struct x86_emulate_ctxt *ctxt, u8 modrm_reg, in decode_register() argument
873 int highbyte_regs = (ctxt->rex_prefix == 0) && byteop; in decode_register()
876 p = (unsigned char *)reg_rmw(ctxt, modrm_reg & 3) + 1; in decode_register()
878 p = reg_rmw(ctxt, modrm_reg); in decode_register()
882 static int read_descriptor(struct x86_emulate_ctxt *ctxt, in read_descriptor() argument
891 rc = segmented_read_std(ctxt, addr, size, 2); in read_descriptor()
895 rc = segmented_read_std(ctxt, addr, address, op_bytes); in read_descriptor()
943 static int em_bsf_c(struct x86_emulate_ctxt *ctxt) in em_bsf_c() argument
946 if (ctxt->src.val == 0) in em_bsf_c()
947 ctxt->dst.type = OP_NONE; in em_bsf_c()
948 return fastop(ctxt, em_bsf); in em_bsf_c()
951 static int em_bsr_c(struct x86_emulate_ctxt *ctxt) in em_bsr_c() argument
954 if (ctxt->src.val == 0) in em_bsr_c()
955 ctxt->dst.type = OP_NONE; in em_bsr_c()
956 return fastop(ctxt, em_bsr); in em_bsr_c()
988 static void read_sse_reg(struct x86_emulate_ctxt *ctxt, sse128_t *data, int reg) in read_sse_reg() argument
990 ctxt->ops->get_fpu(ctxt); in read_sse_reg()
1012 ctxt->ops->put_fpu(ctxt); in read_sse_reg()
1015 static void write_sse_reg(struct x86_emulate_ctxt *ctxt, sse128_t *data, in write_sse_reg() argument
1018 ctxt->ops->get_fpu(ctxt); in write_sse_reg()
1040 ctxt->ops->put_fpu(ctxt); in write_sse_reg()
1043 static void read_mmx_reg(struct x86_emulate_ctxt *ctxt, u64 *data, int reg) in read_mmx_reg() argument
1045 ctxt->ops->get_fpu(ctxt); in read_mmx_reg()
1057 ctxt->ops->put_fpu(ctxt); in read_mmx_reg()
1060 static void write_mmx_reg(struct x86_emulate_ctxt *ctxt, u64 *data, int reg) in write_mmx_reg() argument
1062 ctxt->ops->get_fpu(ctxt); in write_mmx_reg()
1074 ctxt->ops->put_fpu(ctxt); in write_mmx_reg()
1077 static int em_fninit(struct x86_emulate_ctxt *ctxt) in em_fninit() argument
1079 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM)) in em_fninit()
1080 return emulate_nm(ctxt); in em_fninit()
1082 ctxt->ops->get_fpu(ctxt); in em_fninit()
1084 ctxt->ops->put_fpu(ctxt); in em_fninit()
1088 static int em_fnstcw(struct x86_emulate_ctxt *ctxt) in em_fnstcw() argument
1092 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM)) in em_fnstcw()
1093 return emulate_nm(ctxt); in em_fnstcw()
1095 ctxt->ops->get_fpu(ctxt); in em_fnstcw()
1097 ctxt->ops->put_fpu(ctxt); in em_fnstcw()
1099 ctxt->dst.val = fcw; in em_fnstcw()
1104 static int em_fnstsw(struct x86_emulate_ctxt *ctxt) in em_fnstsw() argument
1108 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM)) in em_fnstsw()
1109 return emulate_nm(ctxt); in em_fnstsw()
1111 ctxt->ops->get_fpu(ctxt); in em_fnstsw()
1113 ctxt->ops->put_fpu(ctxt); in em_fnstsw()
1115 ctxt->dst.val = fsw; in em_fnstsw()
1120 static void decode_register_operand(struct x86_emulate_ctxt *ctxt, in decode_register_operand() argument
1123 unsigned reg = ctxt->modrm_reg; in decode_register_operand()
1125 if (!(ctxt->d & ModRM)) in decode_register_operand()
1126 reg = (ctxt->b & 7) | ((ctxt->rex_prefix & 1) << 3); in decode_register_operand()
1128 if (ctxt->d & Sse) { in decode_register_operand()
1132 read_sse_reg(ctxt, &op->vec_val, reg); in decode_register_operand()
1135 if (ctxt->d & Mmx) { in decode_register_operand()
1144 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes; in decode_register_operand()
1145 op->addr.reg = decode_register(ctxt, reg, ctxt->d & ByteOp); in decode_register_operand()
1151 static void adjust_modrm_seg(struct x86_emulate_ctxt *ctxt, int base_reg) in adjust_modrm_seg() argument
1154 ctxt->modrm_seg = VCPU_SREG_SS; in adjust_modrm_seg()
1157 static int decode_modrm(struct x86_emulate_ctxt *ctxt, in decode_modrm() argument
1165 ctxt->modrm_reg = ((ctxt->rex_prefix << 1) & 8); /* REX.R */ in decode_modrm()
1166 index_reg = (ctxt->rex_prefix << 2) & 8; /* REX.X */ in decode_modrm()
1167 base_reg = (ctxt->rex_prefix << 3) & 8; /* REX.B */ in decode_modrm()
1169 ctxt->modrm_mod = (ctxt->modrm & 0xc0) >> 6; in decode_modrm()
1170 ctxt->modrm_reg |= (ctxt->modrm & 0x38) >> 3; in decode_modrm()
1171 ctxt->modrm_rm = base_reg | (ctxt->modrm & 0x07); in decode_modrm()
1172 ctxt->modrm_seg = VCPU_SREG_DS; in decode_modrm()
1174 if (ctxt->modrm_mod == 3 || (ctxt->d & NoMod)) { in decode_modrm()
1176 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes; in decode_modrm()
1177 op->addr.reg = decode_register(ctxt, ctxt->modrm_rm, in decode_modrm()
1178 ctxt->d & ByteOp); in decode_modrm()
1179 if (ctxt->d & Sse) { in decode_modrm()
1182 op->addr.xmm = ctxt->modrm_rm; in decode_modrm()
1183 read_sse_reg(ctxt, &op->vec_val, ctxt->modrm_rm); in decode_modrm()
1186 if (ctxt->d & Mmx) { in decode_modrm()
1189 op->addr.mm = ctxt->modrm_rm & 7; in decode_modrm()
1198 if (ctxt->ad_bytes == 2) { in decode_modrm()
1199 unsigned bx = reg_read(ctxt, VCPU_REGS_RBX); in decode_modrm()
1200 unsigned bp = reg_read(ctxt, VCPU_REGS_RBP); in decode_modrm()
1201 unsigned si = reg_read(ctxt, VCPU_REGS_RSI); in decode_modrm()
1202 unsigned di = reg_read(ctxt, VCPU_REGS_RDI); in decode_modrm()
1205 switch (ctxt->modrm_mod) { in decode_modrm()
1207 if (ctxt->modrm_rm == 6) in decode_modrm()
1208 modrm_ea += insn_fetch(u16, ctxt); in decode_modrm()
1211 modrm_ea += insn_fetch(s8, ctxt); in decode_modrm()
1214 modrm_ea += insn_fetch(u16, ctxt); in decode_modrm()
1217 switch (ctxt->modrm_rm) { in decode_modrm()
1237 if (ctxt->modrm_mod != 0) in decode_modrm()
1244 if (ctxt->modrm_rm == 2 || ctxt->modrm_rm == 3 || in decode_modrm()
1245 (ctxt->modrm_rm == 6 && ctxt->modrm_mod != 0)) in decode_modrm()
1246 ctxt->modrm_seg = VCPU_SREG_SS; in decode_modrm()
1250 if ((ctxt->modrm_rm & 7) == 4) { in decode_modrm()
1251 sib = insn_fetch(u8, ctxt); in decode_modrm()
1256 if ((base_reg & 7) == 5 && ctxt->modrm_mod == 0) in decode_modrm()
1257 modrm_ea += insn_fetch(s32, ctxt); in decode_modrm()
1259 modrm_ea += reg_read(ctxt, base_reg); in decode_modrm()
1260 adjust_modrm_seg(ctxt, base_reg); in decode_modrm()
1262 if ((ctxt->d & IncSP) && in decode_modrm()
1264 modrm_ea += ctxt->op_bytes; in decode_modrm()
1267 modrm_ea += reg_read(ctxt, index_reg) << scale; in decode_modrm()
1268 } else if ((ctxt->modrm_rm & 7) == 5 && ctxt->modrm_mod == 0) { in decode_modrm()
1269 modrm_ea += insn_fetch(s32, ctxt); in decode_modrm()
1270 if (ctxt->mode == X86EMUL_MODE_PROT64) in decode_modrm()
1271 ctxt->rip_relative = 1; in decode_modrm()
1273 base_reg = ctxt->modrm_rm; in decode_modrm()
1274 modrm_ea += reg_read(ctxt, base_reg); in decode_modrm()
1275 adjust_modrm_seg(ctxt, base_reg); in decode_modrm()
1277 switch (ctxt->modrm_mod) { in decode_modrm()
1279 modrm_ea += insn_fetch(s8, ctxt); in decode_modrm()
1282 modrm_ea += insn_fetch(s32, ctxt); in decode_modrm()
1287 if (ctxt->ad_bytes != 8) in decode_modrm()
1288 ctxt->memop.addr.mem.ea = (u32)ctxt->memop.addr.mem.ea; in decode_modrm()
1294 static int decode_abs(struct x86_emulate_ctxt *ctxt, in decode_abs() argument
1300 switch (ctxt->ad_bytes) { in decode_abs()
1302 op->addr.mem.ea = insn_fetch(u16, ctxt); in decode_abs()
1305 op->addr.mem.ea = insn_fetch(u32, ctxt); in decode_abs()
1308 op->addr.mem.ea = insn_fetch(u64, ctxt); in decode_abs()
1315 static void fetch_bit_operand(struct x86_emulate_ctxt *ctxt) in fetch_bit_operand() argument
1319 if (ctxt->dst.type == OP_MEM && ctxt->src.type == OP_REG) { in fetch_bit_operand()
1320 mask = ~((long)ctxt->dst.bytes * 8 - 1); in fetch_bit_operand()
1322 if (ctxt->src.bytes == 2) in fetch_bit_operand()
1323 sv = (s16)ctxt->src.val & (s16)mask; in fetch_bit_operand()
1324 else if (ctxt->src.bytes == 4) in fetch_bit_operand()
1325 sv = (s32)ctxt->src.val & (s32)mask; in fetch_bit_operand()
1327 sv = (s64)ctxt->src.val & (s64)mask; in fetch_bit_operand()
1329 ctxt->dst.addr.mem.ea = address_mask(ctxt, in fetch_bit_operand()
1330 ctxt->dst.addr.mem.ea + (sv >> 3)); in fetch_bit_operand()
1334 ctxt->src.val &= (ctxt->dst.bytes << 3) - 1; in fetch_bit_operand()
1337 static int read_emulated(struct x86_emulate_ctxt *ctxt, in read_emulated() argument
1341 struct read_cache *mc = &ctxt->mem_read; in read_emulated()
1348 rc = ctxt->ops->read_emulated(ctxt, addr, mc->data + mc->end, size, in read_emulated()
1349 &ctxt->exception); in read_emulated()
1361 static int segmented_read(struct x86_emulate_ctxt *ctxt, in segmented_read() argument
1369 rc = linearize(ctxt, addr, size, false, &linear); in segmented_read()
1372 return read_emulated(ctxt, linear, data, size); in segmented_read()
1375 static int segmented_write(struct x86_emulate_ctxt *ctxt, in segmented_write() argument
1383 rc = linearize(ctxt, addr, size, true, &linear); in segmented_write()
1386 return ctxt->ops->write_emulated(ctxt, linear, data, size, in segmented_write()
1387 &ctxt->exception); in segmented_write()
1390 static int segmented_cmpxchg(struct x86_emulate_ctxt *ctxt, in segmented_cmpxchg() argument
1398 rc = linearize(ctxt, addr, size, true, &linear); in segmented_cmpxchg()
1401 return ctxt->ops->cmpxchg_emulated(ctxt, linear, orig_data, data, in segmented_cmpxchg()
1402 size, &ctxt->exception); in segmented_cmpxchg()
1405 static int pio_in_emulated(struct x86_emulate_ctxt *ctxt, in pio_in_emulated() argument
1409 struct read_cache *rc = &ctxt->io_read; in pio_in_emulated()
1413 unsigned int count = ctxt->rep_prefix ? in pio_in_emulated()
1414 address_mask(ctxt, reg_read(ctxt, VCPU_REGS_RCX)) : 1; in pio_in_emulated()
1415 in_page = (ctxt->eflags & X86_EFLAGS_DF) ? in pio_in_emulated()
1416 offset_in_page(reg_read(ctxt, VCPU_REGS_RDI)) : in pio_in_emulated()
1417 PAGE_SIZE - offset_in_page(reg_read(ctxt, VCPU_REGS_RDI)); in pio_in_emulated()
1422 if (!ctxt->ops->pio_in_emulated(ctxt, size, port, rc->data, n)) in pio_in_emulated()
1427 if (ctxt->rep_prefix && (ctxt->d & String) && in pio_in_emulated()
1428 !(ctxt->eflags & X86_EFLAGS_DF)) { in pio_in_emulated()
1429 ctxt->dst.data = rc->data + rc->pos; in pio_in_emulated()
1430 ctxt->dst.type = OP_MEM_STR; in pio_in_emulated()
1431 ctxt->dst.count = (rc->end - rc->pos) / size; in pio_in_emulated()
1440 static int read_interrupt_descriptor(struct x86_emulate_ctxt *ctxt, in read_interrupt_descriptor() argument
1446 ctxt->ops->get_idt(ctxt, &dt); in read_interrupt_descriptor()
1449 return emulate_gp(ctxt, index << 3 | 0x2); in read_interrupt_descriptor()
1452 return ctxt->ops->read_std(ctxt, addr, desc, sizeof *desc, in read_interrupt_descriptor()
1453 &ctxt->exception); in read_interrupt_descriptor()
1456 static void get_descriptor_table_ptr(struct x86_emulate_ctxt *ctxt, in get_descriptor_table_ptr() argument
1459 const struct x86_emulate_ops *ops = ctxt->ops; in get_descriptor_table_ptr()
1467 if (!ops->get_segment(ctxt, &sel, &desc, &base3, in get_descriptor_table_ptr()
1474 ops->get_gdt(ctxt, dt); in get_descriptor_table_ptr()
1477 static int get_descriptor_ptr(struct x86_emulate_ctxt *ctxt, in get_descriptor_ptr() argument
1484 get_descriptor_table_ptr(ctxt, selector, &dt); in get_descriptor_ptr()
1487 return emulate_gp(ctxt, selector & 0xfffc); in get_descriptor_ptr()
1495 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in get_descriptor_ptr()
1506 static int read_segment_descriptor(struct x86_emulate_ctxt *ctxt, in read_segment_descriptor() argument
1512 rc = get_descriptor_ptr(ctxt, selector, desc_addr_p); in read_segment_descriptor()
1516 return ctxt->ops->read_std(ctxt, *desc_addr_p, desc, sizeof(*desc), in read_segment_descriptor()
1517 &ctxt->exception); in read_segment_descriptor()
1521 static int write_segment_descriptor(struct x86_emulate_ctxt *ctxt, in write_segment_descriptor() argument
1527 rc = get_descriptor_ptr(ctxt, selector, &addr); in write_segment_descriptor()
1531 return ctxt->ops->write_std(ctxt, addr, desc, sizeof *desc, in write_segment_descriptor()
1532 &ctxt->exception); in write_segment_descriptor()
1536 static int __load_segment_descriptor(struct x86_emulate_ctxt *ctxt, in __load_segment_descriptor() argument
1553 if (ctxt->mode == X86EMUL_MODE_REAL) { in __load_segment_descriptor()
1556 ctxt->ops->get_segment(ctxt, &dummy, &seg_desc, NULL, seg); in __load_segment_descriptor()
1559 } else if (seg <= VCPU_SREG_GS && ctxt->mode == X86EMUL_MODE_VM86) { in __load_segment_descriptor()
1575 && (ctxt->mode != X86EMUL_MODE_PROT64 || rpl != cpl)) in __load_segment_descriptor()
1587 ret = read_segment_descriptor(ctxt, selector, &seg_desc, &desc_addr); in __load_segment_descriptor()
1635 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in __load_segment_descriptor()
1648 ret = ctxt->ops->cmpxchg_emulated(ctxt, desc_addr, &old_desc, &seg_desc, in __load_segment_descriptor()
1649 sizeof(seg_desc), &ctxt->exception); in __load_segment_descriptor()
1674 ret = write_segment_descriptor(ctxt, selector, in __load_segment_descriptor()
1679 } else if (ctxt->mode == X86EMUL_MODE_PROT64) { in __load_segment_descriptor()
1680 ret = ctxt->ops->read_std(ctxt, desc_addr+8, &base3, in __load_segment_descriptor()
1681 sizeof(base3), &ctxt->exception); in __load_segment_descriptor()
1686 return emulate_gp(ctxt, 0); in __load_segment_descriptor()
1689 ctxt->ops->set_segment(ctxt, selector, &seg_desc, base3, seg); in __load_segment_descriptor()
1694 return emulate_exception(ctxt, err_vec, err_code, true); in __load_segment_descriptor()
1697 static int load_segment_descriptor(struct x86_emulate_ctxt *ctxt, in load_segment_descriptor() argument
1700 u8 cpl = ctxt->ops->cpl(ctxt); in load_segment_descriptor()
1701 return __load_segment_descriptor(ctxt, selector, seg, cpl, in load_segment_descriptor()
1710 static int writeback(struct x86_emulate_ctxt *ctxt, struct operand *op) in writeback() argument
1717 if (ctxt->lock_prefix) in writeback()
1718 return segmented_cmpxchg(ctxt, in writeback()
1724 return segmented_write(ctxt, in writeback()
1730 return segmented_write(ctxt, in writeback()
1736 write_sse_reg(ctxt, &op->vec_val, op->addr.xmm); in writeback()
1739 write_mmx_reg(ctxt, &op->mm_val, op->addr.mm); in writeback()
1750 static int push(struct x86_emulate_ctxt *ctxt, void *data, int bytes) in push() argument
1754 rsp_increment(ctxt, -bytes); in push()
1755 addr.ea = reg_read(ctxt, VCPU_REGS_RSP) & stack_mask(ctxt); in push()
1758 return segmented_write(ctxt, addr, data, bytes); in push()
1761 static int em_push(struct x86_emulate_ctxt *ctxt) in em_push() argument
1764 ctxt->dst.type = OP_NONE; in em_push()
1765 return push(ctxt, &ctxt->src.val, ctxt->op_bytes); in em_push()
1768 static int emulate_pop(struct x86_emulate_ctxt *ctxt, in emulate_pop() argument
1774 addr.ea = reg_read(ctxt, VCPU_REGS_RSP) & stack_mask(ctxt); in emulate_pop()
1776 rc = segmented_read(ctxt, addr, dest, len); in emulate_pop()
1780 rsp_increment(ctxt, len); in emulate_pop()
1784 static int em_pop(struct x86_emulate_ctxt *ctxt) in em_pop() argument
1786 return emulate_pop(ctxt, &ctxt->dst.val, ctxt->op_bytes); in em_pop()
1789 static int emulate_popf(struct x86_emulate_ctxt *ctxt, in emulate_popf() argument
1794 int iopl = (ctxt->eflags & X86_EFLAGS_IOPL) >> X86_EFLAGS_IOPL_BIT; in emulate_popf()
1795 int cpl = ctxt->ops->cpl(ctxt); in emulate_popf()
1797 rc = emulate_pop(ctxt, &val, len); in emulate_popf()
1806 switch(ctxt->mode) { in emulate_popf()
1817 return emulate_gp(ctxt, 0); in emulate_popf()
1826 (ctxt->eflags & ~change_mask) | (val & change_mask); in emulate_popf()
1831 static int em_popf(struct x86_emulate_ctxt *ctxt) in em_popf() argument
1833 ctxt->dst.type = OP_REG; in em_popf()
1834 ctxt->dst.addr.reg = &ctxt->eflags; in em_popf()
1835 ctxt->dst.bytes = ctxt->op_bytes; in em_popf()
1836 return emulate_popf(ctxt, &ctxt->dst.val, ctxt->op_bytes); in em_popf()
1839 static int em_enter(struct x86_emulate_ctxt *ctxt) in em_enter() argument
1842 unsigned frame_size = ctxt->src.val; in em_enter()
1843 unsigned nesting_level = ctxt->src2.val & 31; in em_enter()
1849 rbp = reg_read(ctxt, VCPU_REGS_RBP); in em_enter()
1850 rc = push(ctxt, &rbp, stack_size(ctxt)); in em_enter()
1853 assign_masked(reg_rmw(ctxt, VCPU_REGS_RBP), reg_read(ctxt, VCPU_REGS_RSP), in em_enter()
1854 stack_mask(ctxt)); in em_enter()
1855 assign_masked(reg_rmw(ctxt, VCPU_REGS_RSP), in em_enter()
1856 reg_read(ctxt, VCPU_REGS_RSP) - frame_size, in em_enter()
1857 stack_mask(ctxt)); in em_enter()
1861 static int em_leave(struct x86_emulate_ctxt *ctxt) in em_leave() argument
1863 assign_masked(reg_rmw(ctxt, VCPU_REGS_RSP), reg_read(ctxt, VCPU_REGS_RBP), in em_leave()
1864 stack_mask(ctxt)); in em_leave()
1865 return emulate_pop(ctxt, reg_rmw(ctxt, VCPU_REGS_RBP), ctxt->op_bytes); in em_leave()
1868 static int em_push_sreg(struct x86_emulate_ctxt *ctxt) in em_push_sreg() argument
1870 int seg = ctxt->src2.val; in em_push_sreg()
1872 ctxt->src.val = get_segment_selector(ctxt, seg); in em_push_sreg()
1873 if (ctxt->op_bytes == 4) { in em_push_sreg()
1874 rsp_increment(ctxt, -2); in em_push_sreg()
1875 ctxt->op_bytes = 2; in em_push_sreg()
1878 return em_push(ctxt); in em_push_sreg()
1881 static int em_pop_sreg(struct x86_emulate_ctxt *ctxt) in em_pop_sreg() argument
1883 int seg = ctxt->src2.val; in em_pop_sreg()
1887 rc = emulate_pop(ctxt, &selector, 2); in em_pop_sreg()
1891 if (ctxt->modrm_reg == VCPU_SREG_SS) in em_pop_sreg()
1892 ctxt->interruptibility = KVM_X86_SHADOW_INT_MOV_SS; in em_pop_sreg()
1893 if (ctxt->op_bytes > 2) in em_pop_sreg()
1894 rsp_increment(ctxt, ctxt->op_bytes - 2); in em_pop_sreg()
1896 rc = load_segment_descriptor(ctxt, (u16)selector, seg); in em_pop_sreg()
1900 static int em_pusha(struct x86_emulate_ctxt *ctxt) in em_pusha() argument
1902 unsigned long old_esp = reg_read(ctxt, VCPU_REGS_RSP); in em_pusha()
1908 (ctxt->src.val = old_esp) : (ctxt->src.val = reg_read(ctxt, reg)); in em_pusha()
1910 rc = em_push(ctxt); in em_pusha()
1920 static int em_pushf(struct x86_emulate_ctxt *ctxt) in em_pushf() argument
1922 ctxt->src.val = (unsigned long)ctxt->eflags & ~X86_EFLAGS_VM; in em_pushf()
1923 return em_push(ctxt); in em_pushf()
1926 static int em_popa(struct x86_emulate_ctxt *ctxt) in em_popa() argument
1934 rsp_increment(ctxt, ctxt->op_bytes); in em_popa()
1938 rc = emulate_pop(ctxt, &val, ctxt->op_bytes); in em_popa()
1941 assign_register(reg_rmw(ctxt, reg), val, ctxt->op_bytes); in em_popa()
1947 static int __emulate_int_real(struct x86_emulate_ctxt *ctxt, int irq) in __emulate_int_real() argument
1949 const struct x86_emulate_ops *ops = ctxt->ops; in __emulate_int_real()
1957 ctxt->src.val = ctxt->eflags; in __emulate_int_real()
1958 rc = em_push(ctxt); in __emulate_int_real()
1962 ctxt->eflags &= ~(X86_EFLAGS_IF | X86_EFLAGS_TF | X86_EFLAGS_AC); in __emulate_int_real()
1964 ctxt->src.val = get_segment_selector(ctxt, VCPU_SREG_CS); in __emulate_int_real()
1965 rc = em_push(ctxt); in __emulate_int_real()
1969 ctxt->src.val = ctxt->_eip; in __emulate_int_real()
1970 rc = em_push(ctxt); in __emulate_int_real()
1974 ops->get_idt(ctxt, &dt); in __emulate_int_real()
1979 rc = ops->read_std(ctxt, cs_addr, &cs, 2, &ctxt->exception); in __emulate_int_real()
1983 rc = ops->read_std(ctxt, eip_addr, &eip, 2, &ctxt->exception); in __emulate_int_real()
1987 rc = load_segment_descriptor(ctxt, cs, VCPU_SREG_CS); in __emulate_int_real()
1991 ctxt->_eip = eip; in __emulate_int_real()
1996 int emulate_int_real(struct x86_emulate_ctxt *ctxt, int irq) in emulate_int_real() argument
2000 invalidate_registers(ctxt); in emulate_int_real()
2001 rc = __emulate_int_real(ctxt, irq); in emulate_int_real()
2003 writeback_registers(ctxt); in emulate_int_real()
2007 static int emulate_int(struct x86_emulate_ctxt *ctxt, int irq) in emulate_int() argument
2009 switch(ctxt->mode) { in emulate_int()
2011 return __emulate_int_real(ctxt, irq); in emulate_int()
2022 static int emulate_iret_real(struct x86_emulate_ctxt *ctxt) in emulate_iret_real() argument
2039 rc = emulate_pop(ctxt, &temp_eip, ctxt->op_bytes); in emulate_iret_real()
2045 return emulate_gp(ctxt, 0); in emulate_iret_real()
2047 rc = emulate_pop(ctxt, &cs, ctxt->op_bytes); in emulate_iret_real()
2052 rc = emulate_pop(ctxt, &temp_eflags, ctxt->op_bytes); in emulate_iret_real()
2057 rc = load_segment_descriptor(ctxt, (u16)cs, VCPU_SREG_CS); in emulate_iret_real()
2062 ctxt->_eip = temp_eip; in emulate_iret_real()
2064 if (ctxt->op_bytes == 4) in emulate_iret_real()
2065 ctxt->eflags = ((temp_eflags & mask) | (ctxt->eflags & vm86_mask)); in emulate_iret_real()
2066 else if (ctxt->op_bytes == 2) { in emulate_iret_real()
2067 ctxt->eflags &= ~0xffff; in emulate_iret_real()
2068 ctxt->eflags |= temp_eflags; in emulate_iret_real()
2071 ctxt->eflags &= ~EFLG_RESERVED_ZEROS_MASK; /* Clear reserved zeros */ in emulate_iret_real()
2072 ctxt->eflags |= X86_EFLAGS_FIXED; in emulate_iret_real()
2073 ctxt->ops->set_nmi_mask(ctxt, false); in emulate_iret_real()
2078 static int em_iret(struct x86_emulate_ctxt *ctxt) in em_iret() argument
2080 switch(ctxt->mode) { in em_iret()
2082 return emulate_iret_real(ctxt); in em_iret()
2093 static int em_jmp_far(struct x86_emulate_ctxt *ctxt) in em_jmp_far() argument
2098 const struct x86_emulate_ops *ops = ctxt->ops; in em_jmp_far()
2099 u8 cpl = ctxt->ops->cpl(ctxt); in em_jmp_far()
2102 if (ctxt->mode == X86EMUL_MODE_PROT64) in em_jmp_far()
2103 ops->get_segment(ctxt, &old_sel, &old_desc, NULL, in em_jmp_far()
2106 memcpy(&sel, ctxt->src.valptr + ctxt->op_bytes, 2); in em_jmp_far()
2108 rc = __load_segment_descriptor(ctxt, sel, VCPU_SREG_CS, cpl, in em_jmp_far()
2114 rc = assign_eip_far(ctxt, ctxt->src.val, &new_desc); in em_jmp_far()
2116 WARN_ON(ctxt->mode != X86EMUL_MODE_PROT64); in em_jmp_far()
2118 ops->set_segment(ctxt, old_sel, &old_desc, 0, VCPU_SREG_CS); in em_jmp_far()
2124 static int em_jmp_abs(struct x86_emulate_ctxt *ctxt) in em_jmp_abs() argument
2126 return assign_eip_near(ctxt, ctxt->src.val); in em_jmp_abs()
2129 static int em_call_near_abs(struct x86_emulate_ctxt *ctxt) in em_call_near_abs() argument
2134 old_eip = ctxt->_eip; in em_call_near_abs()
2135 rc = assign_eip_near(ctxt, ctxt->src.val); in em_call_near_abs()
2138 ctxt->src.val = old_eip; in em_call_near_abs()
2139 rc = em_push(ctxt); in em_call_near_abs()
2143 static int em_cmpxchg8b(struct x86_emulate_ctxt *ctxt) in em_cmpxchg8b() argument
2145 u64 old = ctxt->dst.orig_val64; in em_cmpxchg8b()
2147 if (ctxt->dst.bytes == 16) in em_cmpxchg8b()
2150 if (((u32) (old >> 0) != (u32) reg_read(ctxt, VCPU_REGS_RAX)) || in em_cmpxchg8b()
2151 ((u32) (old >> 32) != (u32) reg_read(ctxt, VCPU_REGS_RDX))) { in em_cmpxchg8b()
2152 *reg_write(ctxt, VCPU_REGS_RAX) = (u32) (old >> 0); in em_cmpxchg8b()
2153 *reg_write(ctxt, VCPU_REGS_RDX) = (u32) (old >> 32); in em_cmpxchg8b()
2154 ctxt->eflags &= ~X86_EFLAGS_ZF; in em_cmpxchg8b()
2156 ctxt->dst.val64 = ((u64)reg_read(ctxt, VCPU_REGS_RCX) << 32) | in em_cmpxchg8b()
2157 (u32) reg_read(ctxt, VCPU_REGS_RBX); in em_cmpxchg8b()
2159 ctxt->eflags |= X86_EFLAGS_ZF; in em_cmpxchg8b()
2164 static int em_ret(struct x86_emulate_ctxt *ctxt) in em_ret() argument
2169 rc = emulate_pop(ctxt, &eip, ctxt->op_bytes); in em_ret()
2173 return assign_eip_near(ctxt, eip); in em_ret()
2176 static int em_ret_far(struct x86_emulate_ctxt *ctxt) in em_ret_far() argument
2181 int cpl = ctxt->ops->cpl(ctxt); in em_ret_far()
2183 const struct x86_emulate_ops *ops = ctxt->ops; in em_ret_far()
2185 if (ctxt->mode == X86EMUL_MODE_PROT64) in em_ret_far()
2186 ops->get_segment(ctxt, &old_cs, &old_desc, NULL, in em_ret_far()
2189 rc = emulate_pop(ctxt, &eip, ctxt->op_bytes); in em_ret_far()
2192 rc = emulate_pop(ctxt, &cs, ctxt->op_bytes); in em_ret_far()
2196 if (ctxt->mode >= X86EMUL_MODE_PROT16 && (cs & 3) > cpl) in em_ret_far()
2198 rc = __load_segment_descriptor(ctxt, (u16)cs, VCPU_SREG_CS, cpl, in em_ret_far()
2203 rc = assign_eip_far(ctxt, eip, &new_desc); in em_ret_far()
2205 WARN_ON(ctxt->mode != X86EMUL_MODE_PROT64); in em_ret_far()
2206 ops->set_segment(ctxt, old_cs, &old_desc, 0, VCPU_SREG_CS); in em_ret_far()
2211 static int em_ret_far_imm(struct x86_emulate_ctxt *ctxt) in em_ret_far_imm() argument
2215 rc = em_ret_far(ctxt); in em_ret_far_imm()
2218 rsp_increment(ctxt, ctxt->src.val); in em_ret_far_imm()
2222 static int em_cmpxchg(struct x86_emulate_ctxt *ctxt) in em_cmpxchg() argument
2225 ctxt->dst.orig_val = ctxt->dst.val; in em_cmpxchg()
2226 ctxt->dst.val = reg_read(ctxt, VCPU_REGS_RAX); in em_cmpxchg()
2227 ctxt->src.orig_val = ctxt->src.val; in em_cmpxchg()
2228 ctxt->src.val = ctxt->dst.orig_val; in em_cmpxchg()
2229 fastop(ctxt, em_cmp); in em_cmpxchg()
2231 if (ctxt->eflags & X86_EFLAGS_ZF) { in em_cmpxchg()
2233 ctxt->src.type = OP_NONE; in em_cmpxchg()
2234 ctxt->dst.val = ctxt->src.orig_val; in em_cmpxchg()
2237 ctxt->src.type = OP_REG; in em_cmpxchg()
2238 ctxt->src.addr.reg = reg_rmw(ctxt, VCPU_REGS_RAX); in em_cmpxchg()
2239 ctxt->src.val = ctxt->dst.orig_val; in em_cmpxchg()
2241 ctxt->dst.val = ctxt->dst.orig_val; in em_cmpxchg()
2246 static int em_lseg(struct x86_emulate_ctxt *ctxt) in em_lseg() argument
2248 int seg = ctxt->src2.val; in em_lseg()
2252 memcpy(&sel, ctxt->src.valptr + ctxt->op_bytes, 2); in em_lseg()
2254 rc = load_segment_descriptor(ctxt, sel, seg); in em_lseg()
2258 ctxt->dst.val = ctxt->src.val; in em_lseg()
2262 static int emulator_has_longmode(struct x86_emulate_ctxt *ctxt) in emulator_has_longmode() argument
2268 ctxt->ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx); in emulator_has_longmode()
2275 int r = ctxt->ops->read_phys(ctxt, smbase + offset, &__val, \
2294 static int rsm_load_seg_32(struct x86_emulate_ctxt *ctxt, u64 smbase, int n) in rsm_load_seg_32() argument
2310 ctxt->ops->set_segment(ctxt, selector, &desc, 0, n); in rsm_load_seg_32()
2314 static int rsm_load_seg_64(struct x86_emulate_ctxt *ctxt, u64 smbase, int n) in rsm_load_seg_64() argument
2329 ctxt->ops->set_segment(ctxt, selector, &desc, base3, n); in rsm_load_seg_64()
2333 static int rsm_enter_protected_mode(struct x86_emulate_ctxt *ctxt, in rsm_enter_protected_mode() argument
2343 bad = ctxt->ops->set_cr(ctxt, 4, cr4 & ~X86_CR4_PCIDE); in rsm_enter_protected_mode()
2347 bad = ctxt->ops->set_cr(ctxt, 0, cr0); in rsm_enter_protected_mode()
2352 bad = ctxt->ops->set_cr(ctxt, 4, cr4); in rsm_enter_protected_mode()
2360 static int rsm_load_state_32(struct x86_emulate_ctxt *ctxt, u64 smbase) in rsm_load_state_32() argument
2369 ctxt->ops->set_cr(ctxt, 3, GET_SMSTATE(u32, smbase, 0x7ff8)); in rsm_load_state_32()
2370 ctxt->eflags = GET_SMSTATE(u32, smbase, 0x7ff4) | X86_EFLAGS_FIXED; in rsm_load_state_32()
2371 ctxt->_eip = GET_SMSTATE(u32, smbase, 0x7ff0); in rsm_load_state_32()
2374 *reg_write(ctxt, i) = GET_SMSTATE(u32, smbase, 0x7fd0 + i * 4); in rsm_load_state_32()
2377 ctxt->ops->set_dr(ctxt, 6, (val & DR6_VOLATILE) | DR6_FIXED_1); in rsm_load_state_32()
2379 ctxt->ops->set_dr(ctxt, 7, (val & DR7_VOLATILE) | DR7_FIXED_1); in rsm_load_state_32()
2385 ctxt->ops->set_segment(ctxt, selector, &desc, 0, VCPU_SREG_TR); in rsm_load_state_32()
2391 ctxt->ops->set_segment(ctxt, selector, &desc, 0, VCPU_SREG_LDTR); in rsm_load_state_32()
2395 ctxt->ops->set_gdt(ctxt, &dt); in rsm_load_state_32()
2399 ctxt->ops->set_idt(ctxt, &dt); in rsm_load_state_32()
2402 int r = rsm_load_seg_32(ctxt, smbase, i); in rsm_load_state_32()
2409 ctxt->ops->set_smbase(ctxt, GET_SMSTATE(u32, smbase, 0x7ef8)); in rsm_load_state_32()
2411 return rsm_enter_protected_mode(ctxt, cr0, cr4); in rsm_load_state_32()
2414 static int rsm_load_state_64(struct x86_emulate_ctxt *ctxt, u64 smbase) in rsm_load_state_64() argument
2424 *reg_write(ctxt, i) = GET_SMSTATE(u64, smbase, 0x7ff8 - i * 8); in rsm_load_state_64()
2426 ctxt->_eip = GET_SMSTATE(u64, smbase, 0x7f78); in rsm_load_state_64()
2427 ctxt->eflags = GET_SMSTATE(u32, smbase, 0x7f70) | X86_EFLAGS_FIXED; in rsm_load_state_64()
2430 ctxt->ops->set_dr(ctxt, 6, (val & DR6_VOLATILE) | DR6_FIXED_1); in rsm_load_state_64()
2432 ctxt->ops->set_dr(ctxt, 7, (val & DR7_VOLATILE) | DR7_FIXED_1); in rsm_load_state_64()
2435 ctxt->ops->set_cr(ctxt, 3, GET_SMSTATE(u64, smbase, 0x7f50)); in rsm_load_state_64()
2437 ctxt->ops->set_smbase(ctxt, GET_SMSTATE(u32, smbase, 0x7f00)); in rsm_load_state_64()
2439 ctxt->ops->set_msr(ctxt, MSR_EFER, val & ~EFER_LMA); in rsm_load_state_64()
2446 ctxt->ops->set_segment(ctxt, selector, &desc, base3, VCPU_SREG_TR); in rsm_load_state_64()
2450 ctxt->ops->set_idt(ctxt, &dt); in rsm_load_state_64()
2457 ctxt->ops->set_segment(ctxt, selector, &desc, base3, VCPU_SREG_LDTR); in rsm_load_state_64()
2461 ctxt->ops->set_gdt(ctxt, &dt); in rsm_load_state_64()
2463 r = rsm_enter_protected_mode(ctxt, cr0, cr4); in rsm_load_state_64()
2468 r = rsm_load_seg_64(ctxt, smbase, i); in rsm_load_state_64()
2476 static int em_rsm(struct x86_emulate_ctxt *ctxt) in em_rsm() argument
2482 if ((ctxt->emul_flags & X86EMUL_SMM_MASK) == 0) in em_rsm()
2483 return emulate_ud(ctxt); in em_rsm()
2490 cr4 = ctxt->ops->get_cr(ctxt, 4); in em_rsm()
2491 if (emulator_has_longmode(ctxt)) { in em_rsm()
2496 ctxt->ops->set_cr(ctxt, 4, cr4 & ~X86_CR4_PCIDE); in em_rsm()
2504 ctxt->ops->set_segment(ctxt, 0, &cs_desc, 0, VCPU_SREG_CS); in em_rsm()
2508 cr0 = ctxt->ops->get_cr(ctxt, 0); in em_rsm()
2510 ctxt->ops->set_cr(ctxt, 0, cr0 & ~(X86_CR0_PG | X86_CR0_PE)); in em_rsm()
2514 ctxt->ops->set_cr(ctxt, 4, cr4 & ~X86_CR4_PAE); in em_rsm()
2518 ctxt->ops->set_msr(ctxt, MSR_EFER, efer); in em_rsm()
2520 smbase = ctxt->ops->get_smbase(ctxt); in em_rsm()
2521 if (emulator_has_longmode(ctxt)) in em_rsm()
2522 ret = rsm_load_state_64(ctxt, smbase + 0x8000); in em_rsm()
2524 ret = rsm_load_state_32(ctxt, smbase + 0x8000); in em_rsm()
2531 if ((ctxt->emul_flags & X86EMUL_SMM_INSIDE_NMI_MASK) == 0) in em_rsm()
2532 ctxt->ops->set_nmi_mask(ctxt, false); in em_rsm()
2534 ctxt->emul_flags &= ~X86EMUL_SMM_INSIDE_NMI_MASK; in em_rsm()
2535 ctxt->emul_flags &= ~X86EMUL_SMM_MASK; in em_rsm()
2540 setup_syscalls_segments(struct x86_emulate_ctxt *ctxt, in setup_syscalls_segments() argument
2566 static bool vendor_intel(struct x86_emulate_ctxt *ctxt) in vendor_intel() argument
2571 ctxt->ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx); in vendor_intel()
2577 static bool em_syscall_is_enabled(struct x86_emulate_ctxt *ctxt) in em_syscall_is_enabled() argument
2579 const struct x86_emulate_ops *ops = ctxt->ops; in em_syscall_is_enabled()
2586 if (ctxt->mode == X86EMUL_MODE_PROT64) in em_syscall_is_enabled()
2591 ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx); in em_syscall_is_enabled()
2621 static int em_syscall(struct x86_emulate_ctxt *ctxt) in em_syscall() argument
2623 const struct x86_emulate_ops *ops = ctxt->ops; in em_syscall()
2630 if (ctxt->mode == X86EMUL_MODE_REAL || in em_syscall()
2631 ctxt->mode == X86EMUL_MODE_VM86) in em_syscall()
2632 return emulate_ud(ctxt); in em_syscall()
2634 if (!(em_syscall_is_enabled(ctxt))) in em_syscall()
2635 return emulate_ud(ctxt); in em_syscall()
2637 ops->get_msr(ctxt, MSR_EFER, &efer); in em_syscall()
2638 setup_syscalls_segments(ctxt, &cs, &ss); in em_syscall()
2641 return emulate_ud(ctxt); in em_syscall()
2643 ops->get_msr(ctxt, MSR_STAR, &msr_data); in em_syscall()
2652 ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS); in em_syscall()
2653 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS); in em_syscall()
2655 *reg_write(ctxt, VCPU_REGS_RCX) = ctxt->_eip; in em_syscall()
2658 *reg_write(ctxt, VCPU_REGS_R11) = ctxt->eflags; in em_syscall()
2660 ops->get_msr(ctxt, in em_syscall()
2661 ctxt->mode == X86EMUL_MODE_PROT64 ? in em_syscall()
2663 ctxt->_eip = msr_data; in em_syscall()
2665 ops->get_msr(ctxt, MSR_SYSCALL_MASK, &msr_data); in em_syscall()
2666 ctxt->eflags &= ~msr_data; in em_syscall()
2667 ctxt->eflags |= X86_EFLAGS_FIXED; in em_syscall()
2671 ops->get_msr(ctxt, MSR_STAR, &msr_data); in em_syscall()
2672 ctxt->_eip = (u32)msr_data; in em_syscall()
2674 ctxt->eflags &= ~(X86_EFLAGS_VM | X86_EFLAGS_IF); in em_syscall()
2680 static int em_sysenter(struct x86_emulate_ctxt *ctxt) in em_sysenter() argument
2682 const struct x86_emulate_ops *ops = ctxt->ops; in em_sysenter()
2688 ops->get_msr(ctxt, MSR_EFER, &efer); in em_sysenter()
2690 if (ctxt->mode == X86EMUL_MODE_REAL) in em_sysenter()
2691 return emulate_gp(ctxt, 0); in em_sysenter()
2697 if ((ctxt->mode != X86EMUL_MODE_PROT64) && (efer & EFER_LMA) in em_sysenter()
2698 && !vendor_intel(ctxt)) in em_sysenter()
2699 return emulate_ud(ctxt); in em_sysenter()
2702 if (ctxt->mode == X86EMUL_MODE_PROT64) in em_sysenter()
2705 setup_syscalls_segments(ctxt, &cs, &ss); in em_sysenter()
2707 ops->get_msr(ctxt, MSR_IA32_SYSENTER_CS, &msr_data); in em_sysenter()
2709 return emulate_gp(ctxt, 0); in em_sysenter()
2711 ctxt->eflags &= ~(X86_EFLAGS_VM | X86_EFLAGS_IF); in em_sysenter()
2719 ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS); in em_sysenter()
2720 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS); in em_sysenter()
2722 ops->get_msr(ctxt, MSR_IA32_SYSENTER_EIP, &msr_data); in em_sysenter()
2723 ctxt->_eip = (efer & EFER_LMA) ? msr_data : (u32)msr_data; in em_sysenter()
2725 ops->get_msr(ctxt, MSR_IA32_SYSENTER_ESP, &msr_data); in em_sysenter()
2726 *reg_write(ctxt, VCPU_REGS_RSP) = (efer & EFER_LMA) ? msr_data : in em_sysenter()
2732 static int em_sysexit(struct x86_emulate_ctxt *ctxt) in em_sysexit() argument
2734 const struct x86_emulate_ops *ops = ctxt->ops; in em_sysexit()
2741 if (ctxt->mode == X86EMUL_MODE_REAL || in em_sysexit()
2742 ctxt->mode == X86EMUL_MODE_VM86) in em_sysexit()
2743 return emulate_gp(ctxt, 0); in em_sysexit()
2745 setup_syscalls_segments(ctxt, &cs, &ss); in em_sysexit()
2747 if ((ctxt->rex_prefix & 0x8) != 0x0) in em_sysexit()
2752 rcx = reg_read(ctxt, VCPU_REGS_RCX); in em_sysexit()
2753 rdx = reg_read(ctxt, VCPU_REGS_RDX); in em_sysexit()
2757 ops->get_msr(ctxt, MSR_IA32_SYSENTER_CS, &msr_data); in em_sysexit()
2762 return emulate_gp(ctxt, 0); in em_sysexit()
2770 return emulate_gp(ctxt, 0); in em_sysexit()
2776 return emulate_gp(ctxt, 0); in em_sysexit()
2782 ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS); in em_sysexit()
2783 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS); in em_sysexit()
2785 ctxt->_eip = rdx; in em_sysexit()
2786 *reg_write(ctxt, VCPU_REGS_RSP) = rcx; in em_sysexit()
2791 static bool emulator_bad_iopl(struct x86_emulate_ctxt *ctxt) in emulator_bad_iopl() argument
2794 if (ctxt->mode == X86EMUL_MODE_REAL) in emulator_bad_iopl()
2796 if (ctxt->mode == X86EMUL_MODE_VM86) in emulator_bad_iopl()
2798 iopl = (ctxt->eflags & X86_EFLAGS_IOPL) >> X86_EFLAGS_IOPL_BIT; in emulator_bad_iopl()
2799 return ctxt->ops->cpl(ctxt) > iopl; in emulator_bad_iopl()
2802 static bool emulator_io_port_access_allowed(struct x86_emulate_ctxt *ctxt, in emulator_io_port_access_allowed() argument
2805 const struct x86_emulate_ops *ops = ctxt->ops; in emulator_io_port_access_allowed()
2813 ops->get_segment(ctxt, &tr, &tr_seg, &base3, VCPU_SREG_TR); in emulator_io_port_access_allowed()
2822 r = ops->read_std(ctxt, base + 102, &io_bitmap_ptr, 2, NULL); in emulator_io_port_access_allowed()
2827 r = ops->read_std(ctxt, base + io_bitmap_ptr + port/8, &perm, 2, NULL); in emulator_io_port_access_allowed()
2835 static bool emulator_io_permited(struct x86_emulate_ctxt *ctxt, in emulator_io_permited() argument
2838 if (ctxt->perm_ok) in emulator_io_permited()
2841 if (emulator_bad_iopl(ctxt)) in emulator_io_permited()
2842 if (!emulator_io_port_access_allowed(ctxt, port, len)) in emulator_io_permited()
2845 ctxt->perm_ok = true; in emulator_io_permited()
2850 static void string_registers_quirk(struct x86_emulate_ctxt *ctxt) in string_registers_quirk() argument
2857 if (ctxt->ad_bytes != 4 || !vendor_intel(ctxt)) in string_registers_quirk()
2860 *reg_write(ctxt, VCPU_REGS_RCX) = 0; in string_registers_quirk()
2862 switch (ctxt->b) { in string_registers_quirk()
2865 *reg_rmw(ctxt, VCPU_REGS_RSI) &= (u32)-1; in string_registers_quirk()
2869 *reg_rmw(ctxt, VCPU_REGS_RDI) &= (u32)-1; in string_registers_quirk()
2874 static void save_state_to_tss16(struct x86_emulate_ctxt *ctxt, in save_state_to_tss16() argument
2877 tss->ip = ctxt->_eip; in save_state_to_tss16()
2878 tss->flag = ctxt->eflags; in save_state_to_tss16()
2879 tss->ax = reg_read(ctxt, VCPU_REGS_RAX); in save_state_to_tss16()
2880 tss->cx = reg_read(ctxt, VCPU_REGS_RCX); in save_state_to_tss16()
2881 tss->dx = reg_read(ctxt, VCPU_REGS_RDX); in save_state_to_tss16()
2882 tss->bx = reg_read(ctxt, VCPU_REGS_RBX); in save_state_to_tss16()
2883 tss->sp = reg_read(ctxt, VCPU_REGS_RSP); in save_state_to_tss16()
2884 tss->bp = reg_read(ctxt, VCPU_REGS_RBP); in save_state_to_tss16()
2885 tss->si = reg_read(ctxt, VCPU_REGS_RSI); in save_state_to_tss16()
2886 tss->di = reg_read(ctxt, VCPU_REGS_RDI); in save_state_to_tss16()
2888 tss->es = get_segment_selector(ctxt, VCPU_SREG_ES); in save_state_to_tss16()
2889 tss->cs = get_segment_selector(ctxt, VCPU_SREG_CS); in save_state_to_tss16()
2890 tss->ss = get_segment_selector(ctxt, VCPU_SREG_SS); in save_state_to_tss16()
2891 tss->ds = get_segment_selector(ctxt, VCPU_SREG_DS); in save_state_to_tss16()
2892 tss->ldt = get_segment_selector(ctxt, VCPU_SREG_LDTR); in save_state_to_tss16()
2895 static int load_state_from_tss16(struct x86_emulate_ctxt *ctxt, in load_state_from_tss16() argument
2901 ctxt->_eip = tss->ip; in load_state_from_tss16()
2902 ctxt->eflags = tss->flag | 2; in load_state_from_tss16()
2903 *reg_write(ctxt, VCPU_REGS_RAX) = tss->ax; in load_state_from_tss16()
2904 *reg_write(ctxt, VCPU_REGS_RCX) = tss->cx; in load_state_from_tss16()
2905 *reg_write(ctxt, VCPU_REGS_RDX) = tss->dx; in load_state_from_tss16()
2906 *reg_write(ctxt, VCPU_REGS_RBX) = tss->bx; in load_state_from_tss16()
2907 *reg_write(ctxt, VCPU_REGS_RSP) = tss->sp; in load_state_from_tss16()
2908 *reg_write(ctxt, VCPU_REGS_RBP) = tss->bp; in load_state_from_tss16()
2909 *reg_write(ctxt, VCPU_REGS_RSI) = tss->si; in load_state_from_tss16()
2910 *reg_write(ctxt, VCPU_REGS_RDI) = tss->di; in load_state_from_tss16()
2916 set_segment_selector(ctxt, tss->ldt, VCPU_SREG_LDTR); in load_state_from_tss16()
2917 set_segment_selector(ctxt, tss->es, VCPU_SREG_ES); in load_state_from_tss16()
2918 set_segment_selector(ctxt, tss->cs, VCPU_SREG_CS); in load_state_from_tss16()
2919 set_segment_selector(ctxt, tss->ss, VCPU_SREG_SS); in load_state_from_tss16()
2920 set_segment_selector(ctxt, tss->ds, VCPU_SREG_DS); in load_state_from_tss16()
2928 ret = __load_segment_descriptor(ctxt, tss->ldt, VCPU_SREG_LDTR, cpl, in load_state_from_tss16()
2932 ret = __load_segment_descriptor(ctxt, tss->es, VCPU_SREG_ES, cpl, in load_state_from_tss16()
2936 ret = __load_segment_descriptor(ctxt, tss->cs, VCPU_SREG_CS, cpl, in load_state_from_tss16()
2940 ret = __load_segment_descriptor(ctxt, tss->ss, VCPU_SREG_SS, cpl, in load_state_from_tss16()
2944 ret = __load_segment_descriptor(ctxt, tss->ds, VCPU_SREG_DS, cpl, in load_state_from_tss16()
2952 static int task_switch_16(struct x86_emulate_ctxt *ctxt, in task_switch_16() argument
2956 const struct x86_emulate_ops *ops = ctxt->ops; in task_switch_16()
2961 ret = ops->read_std(ctxt, old_tss_base, &tss_seg, sizeof tss_seg, in task_switch_16()
2962 &ctxt->exception); in task_switch_16()
2966 save_state_to_tss16(ctxt, &tss_seg); in task_switch_16()
2968 ret = ops->write_std(ctxt, old_tss_base, &tss_seg, sizeof tss_seg, in task_switch_16()
2969 &ctxt->exception); in task_switch_16()
2973 ret = ops->read_std(ctxt, new_tss_base, &tss_seg, sizeof tss_seg, in task_switch_16()
2974 &ctxt->exception); in task_switch_16()
2981 ret = ops->write_std(ctxt, new_tss_base, in task_switch_16()
2984 &ctxt->exception); in task_switch_16()
2989 return load_state_from_tss16(ctxt, &tss_seg); in task_switch_16()
2992 static void save_state_to_tss32(struct x86_emulate_ctxt *ctxt, in save_state_to_tss32() argument
2996 tss->eip = ctxt->_eip; in save_state_to_tss32()
2997 tss->eflags = ctxt->eflags; in save_state_to_tss32()
2998 tss->eax = reg_read(ctxt, VCPU_REGS_RAX); in save_state_to_tss32()
2999 tss->ecx = reg_read(ctxt, VCPU_REGS_RCX); in save_state_to_tss32()
3000 tss->edx = reg_read(ctxt, VCPU_REGS_RDX); in save_state_to_tss32()
3001 tss->ebx = reg_read(ctxt, VCPU_REGS_RBX); in save_state_to_tss32()
3002 tss->esp = reg_read(ctxt, VCPU_REGS_RSP); in save_state_to_tss32()
3003 tss->ebp = reg_read(ctxt, VCPU_REGS_RBP); in save_state_to_tss32()
3004 tss->esi = reg_read(ctxt, VCPU_REGS_RSI); in save_state_to_tss32()
3005 tss->edi = reg_read(ctxt, VCPU_REGS_RDI); in save_state_to_tss32()
3007 tss->es = get_segment_selector(ctxt, VCPU_SREG_ES); in save_state_to_tss32()
3008 tss->cs = get_segment_selector(ctxt, VCPU_SREG_CS); in save_state_to_tss32()
3009 tss->ss = get_segment_selector(ctxt, VCPU_SREG_SS); in save_state_to_tss32()
3010 tss->ds = get_segment_selector(ctxt, VCPU_SREG_DS); in save_state_to_tss32()
3011 tss->fs = get_segment_selector(ctxt, VCPU_SREG_FS); in save_state_to_tss32()
3012 tss->gs = get_segment_selector(ctxt, VCPU_SREG_GS); in save_state_to_tss32()
3015 static int load_state_from_tss32(struct x86_emulate_ctxt *ctxt, in load_state_from_tss32() argument
3021 if (ctxt->ops->set_cr(ctxt, 3, tss->cr3)) in load_state_from_tss32()
3022 return emulate_gp(ctxt, 0); in load_state_from_tss32()
3023 ctxt->_eip = tss->eip; in load_state_from_tss32()
3024 ctxt->eflags = tss->eflags | 2; in load_state_from_tss32()
3027 *reg_write(ctxt, VCPU_REGS_RAX) = tss->eax; in load_state_from_tss32()
3028 *reg_write(ctxt, VCPU_REGS_RCX) = tss->ecx; in load_state_from_tss32()
3029 *reg_write(ctxt, VCPU_REGS_RDX) = tss->edx; in load_state_from_tss32()
3030 *reg_write(ctxt, VCPU_REGS_RBX) = tss->ebx; in load_state_from_tss32()
3031 *reg_write(ctxt, VCPU_REGS_RSP) = tss->esp; in load_state_from_tss32()
3032 *reg_write(ctxt, VCPU_REGS_RBP) = tss->ebp; in load_state_from_tss32()
3033 *reg_write(ctxt, VCPU_REGS_RSI) = tss->esi; in load_state_from_tss32()
3034 *reg_write(ctxt, VCPU_REGS_RDI) = tss->edi; in load_state_from_tss32()
3041 set_segment_selector(ctxt, tss->ldt_selector, VCPU_SREG_LDTR); in load_state_from_tss32()
3042 set_segment_selector(ctxt, tss->es, VCPU_SREG_ES); in load_state_from_tss32()
3043 set_segment_selector(ctxt, tss->cs, VCPU_SREG_CS); in load_state_from_tss32()
3044 set_segment_selector(ctxt, tss->ss, VCPU_SREG_SS); in load_state_from_tss32()
3045 set_segment_selector(ctxt, tss->ds, VCPU_SREG_DS); in load_state_from_tss32()
3046 set_segment_selector(ctxt, tss->fs, VCPU_SREG_FS); in load_state_from_tss32()
3047 set_segment_selector(ctxt, tss->gs, VCPU_SREG_GS); in load_state_from_tss32()
3054 if (ctxt->eflags & X86_EFLAGS_VM) { in load_state_from_tss32()
3055 ctxt->mode = X86EMUL_MODE_VM86; in load_state_from_tss32()
3058 ctxt->mode = X86EMUL_MODE_PROT32; in load_state_from_tss32()
3066 ret = __load_segment_descriptor(ctxt, tss->ldt_selector, VCPU_SREG_LDTR, in load_state_from_tss32()
3070 ret = __load_segment_descriptor(ctxt, tss->es, VCPU_SREG_ES, cpl, in load_state_from_tss32()
3074 ret = __load_segment_descriptor(ctxt, tss->cs, VCPU_SREG_CS, cpl, in load_state_from_tss32()
3078 ret = __load_segment_descriptor(ctxt, tss->ss, VCPU_SREG_SS, cpl, in load_state_from_tss32()
3082 ret = __load_segment_descriptor(ctxt, tss->ds, VCPU_SREG_DS, cpl, in load_state_from_tss32()
3086 ret = __load_segment_descriptor(ctxt, tss->fs, VCPU_SREG_FS, cpl, in load_state_from_tss32()
3090 ret = __load_segment_descriptor(ctxt, tss->gs, VCPU_SREG_GS, cpl, in load_state_from_tss32()
3096 static int task_switch_32(struct x86_emulate_ctxt *ctxt, in task_switch_32() argument
3100 const struct x86_emulate_ops *ops = ctxt->ops; in task_switch_32()
3107 ret = ops->read_std(ctxt, old_tss_base, &tss_seg, sizeof tss_seg, in task_switch_32()
3108 &ctxt->exception); in task_switch_32()
3112 save_state_to_tss32(ctxt, &tss_seg); in task_switch_32()
3115 ret = ops->write_std(ctxt, old_tss_base + eip_offset, &tss_seg.eip, in task_switch_32()
3116 ldt_sel_offset - eip_offset, &ctxt->exception); in task_switch_32()
3120 ret = ops->read_std(ctxt, new_tss_base, &tss_seg, sizeof tss_seg, in task_switch_32()
3121 &ctxt->exception); in task_switch_32()
3128 ret = ops->write_std(ctxt, new_tss_base, in task_switch_32()
3131 &ctxt->exception); in task_switch_32()
3136 return load_state_from_tss32(ctxt, &tss_seg); in task_switch_32()
3139 static int emulator_do_task_switch(struct x86_emulate_ctxt *ctxt, in emulator_do_task_switch() argument
3143 const struct x86_emulate_ops *ops = ctxt->ops; in emulator_do_task_switch()
3146 u16 old_tss_sel = get_segment_selector(ctxt, VCPU_SREG_TR); in emulator_do_task_switch()
3148 ops->get_cached_segment_base(ctxt, VCPU_SREG_TR); in emulator_do_task_switch()
3154 ret = read_segment_descriptor(ctxt, tss_selector, &next_tss_desc, &desc_addr); in emulator_do_task_switch()
3157 ret = read_segment_descriptor(ctxt, old_tss_sel, &curr_tss_desc, &desc_addr); in emulator_do_task_switch()
3177 ret = read_interrupt_descriptor(ctxt, idt_index, in emulator_do_task_switch()
3183 if ((tss_selector & 3) > dpl || ops->cpl(ctxt) > dpl) in emulator_do_task_switch()
3184 return emulate_gp(ctxt, (idt_index << 3) | 0x2); in emulator_do_task_switch()
3192 return emulate_ts(ctxt, tss_selector & 0xfffc); in emulator_do_task_switch()
3197 write_segment_descriptor(ctxt, old_tss_sel, &curr_tss_desc); in emulator_do_task_switch()
3201 ctxt->eflags = ctxt->eflags & ~X86_EFLAGS_NT; in emulator_do_task_switch()
3209 ret = task_switch_32(ctxt, tss_selector, old_tss_sel, in emulator_do_task_switch()
3212 ret = task_switch_16(ctxt, tss_selector, old_tss_sel, in emulator_do_task_switch()
3218 ctxt->eflags = ctxt->eflags | X86_EFLAGS_NT; in emulator_do_task_switch()
3222 write_segment_descriptor(ctxt, tss_selector, &next_tss_desc); in emulator_do_task_switch()
3225 ops->set_cr(ctxt, 0, ops->get_cr(ctxt, 0) | X86_CR0_TS); in emulator_do_task_switch()
3226 ops->set_segment(ctxt, tss_selector, &next_tss_desc, 0, VCPU_SREG_TR); in emulator_do_task_switch()
3229 ctxt->op_bytes = ctxt->ad_bytes = (next_tss_desc.type & 8) ? 4 : 2; in emulator_do_task_switch()
3230 ctxt->lock_prefix = 0; in emulator_do_task_switch()
3231 ctxt->src.val = (unsigned long) error_code; in emulator_do_task_switch()
3232 ret = em_push(ctxt); in emulator_do_task_switch()
3235 ops->get_dr(ctxt, 7, &dr7); in emulator_do_task_switch()
3236 ops->set_dr(ctxt, 7, dr7 & ~(DR_LOCAL_ENABLE_MASK | DR_LOCAL_SLOWDOWN)); in emulator_do_task_switch()
3241 int emulator_task_switch(struct x86_emulate_ctxt *ctxt, in emulator_task_switch() argument
3247 invalidate_registers(ctxt); in emulator_task_switch()
3248 ctxt->_eip = ctxt->eip; in emulator_task_switch()
3249 ctxt->dst.type = OP_NONE; in emulator_task_switch()
3251 rc = emulator_do_task_switch(ctxt, tss_selector, idt_index, reason, in emulator_task_switch()
3255 ctxt->eip = ctxt->_eip; in emulator_task_switch()
3256 writeback_registers(ctxt); in emulator_task_switch()
3262 static void string_addr_inc(struct x86_emulate_ctxt *ctxt, int reg, in string_addr_inc() argument
3265 int df = (ctxt->eflags & X86_EFLAGS_DF) ? -op->count : op->count; in string_addr_inc()
3267 register_address_increment(ctxt, reg, df * op->bytes); in string_addr_inc()
3268 op->addr.mem.ea = register_address(ctxt, reg); in string_addr_inc()
3271 static int em_das(struct x86_emulate_ctxt *ctxt) in em_das() argument
3276 cf = ctxt->eflags & X86_EFLAGS_CF; in em_das()
3277 al = ctxt->dst.val; in em_das()
3282 af = ctxt->eflags & X86_EFLAGS_AF; in em_das()
3295 ctxt->dst.val = al; in em_das()
3297 ctxt->src.type = OP_IMM; in em_das()
3298 ctxt->src.val = 0; in em_das()
3299 ctxt->src.bytes = 1; in em_das()
3300 fastop(ctxt, em_or); in em_das()
3301 ctxt->eflags &= ~(X86_EFLAGS_AF | X86_EFLAGS_CF); in em_das()
3303 ctxt->eflags |= X86_EFLAGS_CF; in em_das()
3305 ctxt->eflags |= X86_EFLAGS_AF; in em_das()
3309 static int em_aam(struct x86_emulate_ctxt *ctxt) in em_aam() argument
3313 if (ctxt->src.val == 0) in em_aam()
3314 return emulate_de(ctxt); in em_aam()
3316 al = ctxt->dst.val & 0xff; in em_aam()
3317 ah = al / ctxt->src.val; in em_aam()
3318 al %= ctxt->src.val; in em_aam()
3320 ctxt->dst.val = (ctxt->dst.val & 0xffff0000) | al | (ah << 8); in em_aam()
3323 ctxt->src.type = OP_IMM; in em_aam()
3324 ctxt->src.val = 0; in em_aam()
3325 ctxt->src.bytes = 1; in em_aam()
3326 fastop(ctxt, em_or); in em_aam()
3331 static int em_aad(struct x86_emulate_ctxt *ctxt) in em_aad() argument
3333 u8 al = ctxt->dst.val & 0xff; in em_aad()
3334 u8 ah = (ctxt->dst.val >> 8) & 0xff; in em_aad()
3336 al = (al + (ah * ctxt->src.val)) & 0xff; in em_aad()
3338 ctxt->dst.val = (ctxt->dst.val & 0xffff0000) | al; in em_aad()
3341 ctxt->src.type = OP_IMM; in em_aad()
3342 ctxt->src.val = 0; in em_aad()
3343 ctxt->src.bytes = 1; in em_aad()
3344 fastop(ctxt, em_or); in em_aad()
3349 static int em_call(struct x86_emulate_ctxt *ctxt) in em_call() argument
3352 long rel = ctxt->src.val; in em_call()
3354 ctxt->src.val = (unsigned long)ctxt->_eip; in em_call()
3355 rc = jmp_rel(ctxt, rel); in em_call()
3358 return em_push(ctxt); in em_call()
3361 static int em_call_far(struct x86_emulate_ctxt *ctxt) in em_call_far() argument
3367 const struct x86_emulate_ops *ops = ctxt->ops; in em_call_far()
3368 int cpl = ctxt->ops->cpl(ctxt); in em_call_far()
3369 enum x86emul_mode prev_mode = ctxt->mode; in em_call_far()
3371 old_eip = ctxt->_eip; in em_call_far()
3372 ops->get_segment(ctxt, &old_cs, &old_desc, NULL, VCPU_SREG_CS); in em_call_far()
3374 memcpy(&sel, ctxt->src.valptr + ctxt->op_bytes, 2); in em_call_far()
3375 rc = __load_segment_descriptor(ctxt, sel, VCPU_SREG_CS, cpl, in em_call_far()
3380 rc = assign_eip_far(ctxt, ctxt->src.val, &new_desc); in em_call_far()
3384 ctxt->src.val = old_cs; in em_call_far()
3385 rc = em_push(ctxt); in em_call_far()
3389 ctxt->src.val = old_eip; in em_call_far()
3390 rc = em_push(ctxt); in em_call_far()
3399 ops->set_segment(ctxt, old_cs, &old_desc, 0, VCPU_SREG_CS); in em_call_far()
3400 ctxt->mode = prev_mode; in em_call_far()
3405 static int em_ret_near_imm(struct x86_emulate_ctxt *ctxt) in em_ret_near_imm() argument
3410 rc = emulate_pop(ctxt, &eip, ctxt->op_bytes); in em_ret_near_imm()
3413 rc = assign_eip_near(ctxt, eip); in em_ret_near_imm()
3416 rsp_increment(ctxt, ctxt->src.val); in em_ret_near_imm()
3420 static int em_xchg(struct x86_emulate_ctxt *ctxt) in em_xchg() argument
3423 ctxt->src.val = ctxt->dst.val; in em_xchg()
3424 write_register_operand(&ctxt->src); in em_xchg()
3427 ctxt->dst.val = ctxt->src.orig_val; in em_xchg()
3428 ctxt->lock_prefix = 1; in em_xchg()
3432 static int em_imul_3op(struct x86_emulate_ctxt *ctxt) in em_imul_3op() argument
3434 ctxt->dst.val = ctxt->src2.val; in em_imul_3op()
3435 return fastop(ctxt, em_imul); in em_imul_3op()
3438 static int em_cwd(struct x86_emulate_ctxt *ctxt) in em_cwd() argument
3440 ctxt->dst.type = OP_REG; in em_cwd()
3441 ctxt->dst.bytes = ctxt->src.bytes; in em_cwd()
3442 ctxt->dst.addr.reg = reg_rmw(ctxt, VCPU_REGS_RDX); in em_cwd()
3443 ctxt->dst.val = ~((ctxt->src.val >> (ctxt->src.bytes * 8 - 1)) - 1); in em_cwd()
3448 static int em_rdtsc(struct x86_emulate_ctxt *ctxt) in em_rdtsc() argument
3452 ctxt->ops->get_msr(ctxt, MSR_IA32_TSC, &tsc); in em_rdtsc()
3453 *reg_write(ctxt, VCPU_REGS_RAX) = (u32)tsc; in em_rdtsc()
3454 *reg_write(ctxt, VCPU_REGS_RDX) = tsc >> 32; in em_rdtsc()
3458 static int em_rdpmc(struct x86_emulate_ctxt *ctxt) in em_rdpmc() argument
3462 if (ctxt->ops->read_pmc(ctxt, reg_read(ctxt, VCPU_REGS_RCX), &pmc)) in em_rdpmc()
3463 return emulate_gp(ctxt, 0); in em_rdpmc()
3464 *reg_write(ctxt, VCPU_REGS_RAX) = (u32)pmc; in em_rdpmc()
3465 *reg_write(ctxt, VCPU_REGS_RDX) = pmc >> 32; in em_rdpmc()
3469 static int em_mov(struct x86_emulate_ctxt *ctxt) in em_mov() argument
3471 memcpy(ctxt->dst.valptr, ctxt->src.valptr, sizeof(ctxt->src.valptr)); in em_mov()
3477 static int em_movbe(struct x86_emulate_ctxt *ctxt) in em_movbe() argument
3485 ctxt->ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx); in em_movbe()
3487 return emulate_ud(ctxt); in em_movbe()
3489 switch (ctxt->op_bytes) { in em_movbe()
3499 tmp = (u16)ctxt->src.val; in em_movbe()
3500 ctxt->dst.val &= ~0xffffUL; in em_movbe()
3501 ctxt->dst.val |= (unsigned long)swab16(tmp); in em_movbe()
3504 ctxt->dst.val = swab32((u32)ctxt->src.val); in em_movbe()
3507 ctxt->dst.val = swab64(ctxt->src.val); in em_movbe()
3515 static int em_cr_write(struct x86_emulate_ctxt *ctxt) in em_cr_write() argument
3517 if (ctxt->ops->set_cr(ctxt, ctxt->modrm_reg, ctxt->src.val)) in em_cr_write()
3518 return emulate_gp(ctxt, 0); in em_cr_write()
3521 ctxt->dst.type = OP_NONE; in em_cr_write()
3525 static int em_dr_write(struct x86_emulate_ctxt *ctxt) in em_dr_write() argument
3529 if (ctxt->mode == X86EMUL_MODE_PROT64) in em_dr_write()
3530 val = ctxt->src.val & ~0ULL; in em_dr_write()
3532 val = ctxt->src.val & ~0U; in em_dr_write()
3535 if (ctxt->ops->set_dr(ctxt, ctxt->modrm_reg, val) < 0) in em_dr_write()
3536 return emulate_gp(ctxt, 0); in em_dr_write()
3539 ctxt->dst.type = OP_NONE; in em_dr_write()
3543 static int em_wrmsr(struct x86_emulate_ctxt *ctxt) in em_wrmsr() argument
3547 msr_data = (u32)reg_read(ctxt, VCPU_REGS_RAX) in em_wrmsr()
3548 | ((u64)reg_read(ctxt, VCPU_REGS_RDX) << 32); in em_wrmsr()
3549 if (ctxt->ops->set_msr(ctxt, reg_read(ctxt, VCPU_REGS_RCX), msr_data)) in em_wrmsr()
3550 return emulate_gp(ctxt, 0); in em_wrmsr()
3555 static int em_rdmsr(struct x86_emulate_ctxt *ctxt) in em_rdmsr() argument
3559 if (ctxt->ops->get_msr(ctxt, reg_read(ctxt, VCPU_REGS_RCX), &msr_data)) in em_rdmsr()
3560 return emulate_gp(ctxt, 0); in em_rdmsr()
3562 *reg_write(ctxt, VCPU_REGS_RAX) = (u32)msr_data; in em_rdmsr()
3563 *reg_write(ctxt, VCPU_REGS_RDX) = msr_data >> 32; in em_rdmsr()
3567 static int em_mov_rm_sreg(struct x86_emulate_ctxt *ctxt) in em_mov_rm_sreg() argument
3569 if (ctxt->modrm_reg > VCPU_SREG_GS) in em_mov_rm_sreg()
3570 return emulate_ud(ctxt); in em_mov_rm_sreg()
3572 ctxt->dst.val = get_segment_selector(ctxt, ctxt->modrm_reg); in em_mov_rm_sreg()
3573 if (ctxt->dst.bytes == 4 && ctxt->dst.type == OP_MEM) in em_mov_rm_sreg()
3574 ctxt->dst.bytes = 2; in em_mov_rm_sreg()
3578 static int em_mov_sreg_rm(struct x86_emulate_ctxt *ctxt) in em_mov_sreg_rm() argument
3580 u16 sel = ctxt->src.val; in em_mov_sreg_rm()
3582 if (ctxt->modrm_reg == VCPU_SREG_CS || ctxt->modrm_reg > VCPU_SREG_GS) in em_mov_sreg_rm()
3583 return emulate_ud(ctxt); in em_mov_sreg_rm()
3585 if (ctxt->modrm_reg == VCPU_SREG_SS) in em_mov_sreg_rm()
3586 ctxt->interruptibility = KVM_X86_SHADOW_INT_MOV_SS; in em_mov_sreg_rm()
3589 ctxt->dst.type = OP_NONE; in em_mov_sreg_rm()
3590 return load_segment_descriptor(ctxt, sel, ctxt->modrm_reg); in em_mov_sreg_rm()
3593 static int em_lldt(struct x86_emulate_ctxt *ctxt) in em_lldt() argument
3595 u16 sel = ctxt->src.val; in em_lldt()
3598 ctxt->dst.type = OP_NONE; in em_lldt()
3599 return load_segment_descriptor(ctxt, sel, VCPU_SREG_LDTR); in em_lldt()
3602 static int em_ltr(struct x86_emulate_ctxt *ctxt) in em_ltr() argument
3604 u16 sel = ctxt->src.val; in em_ltr()
3607 ctxt->dst.type = OP_NONE; in em_ltr()
3608 return load_segment_descriptor(ctxt, sel, VCPU_SREG_TR); in em_ltr()
3611 static int em_invlpg(struct x86_emulate_ctxt *ctxt) in em_invlpg() argument
3616 rc = linearize(ctxt, ctxt->src.addr.mem, 1, false, &linear); in em_invlpg()
3618 ctxt->ops->invlpg(ctxt, linear); in em_invlpg()
3620 ctxt->dst.type = OP_NONE; in em_invlpg()
3624 static int em_clts(struct x86_emulate_ctxt *ctxt) in em_clts() argument
3628 cr0 = ctxt->ops->get_cr(ctxt, 0); in em_clts()
3630 ctxt->ops->set_cr(ctxt, 0, cr0); in em_clts()
3634 static int em_hypercall(struct x86_emulate_ctxt *ctxt) in em_hypercall() argument
3636 int rc = ctxt->ops->fix_hypercall(ctxt); in em_hypercall()
3642 ctxt->_eip = ctxt->eip; in em_hypercall()
3644 ctxt->dst.type = OP_NONE; in em_hypercall()
3648 static int emulate_store_desc_ptr(struct x86_emulate_ctxt *ctxt, in emulate_store_desc_ptr() argument
3649 void (*get)(struct x86_emulate_ctxt *ctxt, in emulate_store_desc_ptr() argument
3654 if (ctxt->mode == X86EMUL_MODE_PROT64) in emulate_store_desc_ptr()
3655 ctxt->op_bytes = 8; in emulate_store_desc_ptr()
3656 get(ctxt, &desc_ptr); in emulate_store_desc_ptr()
3657 if (ctxt->op_bytes == 2) { in emulate_store_desc_ptr()
3658 ctxt->op_bytes = 4; in emulate_store_desc_ptr()
3662 ctxt->dst.type = OP_NONE; in emulate_store_desc_ptr()
3663 return segmented_write(ctxt, ctxt->dst.addr.mem, in emulate_store_desc_ptr()
3664 &desc_ptr, 2 + ctxt->op_bytes); in emulate_store_desc_ptr()
3667 static int em_sgdt(struct x86_emulate_ctxt *ctxt) in em_sgdt() argument
3669 return emulate_store_desc_ptr(ctxt, ctxt->ops->get_gdt); in em_sgdt()
3672 static int em_sidt(struct x86_emulate_ctxt *ctxt) in em_sidt() argument
3674 return emulate_store_desc_ptr(ctxt, ctxt->ops->get_idt); in em_sidt()
3677 static int em_lgdt_lidt(struct x86_emulate_ctxt *ctxt, bool lgdt) in em_lgdt_lidt() argument
3682 if (ctxt->mode == X86EMUL_MODE_PROT64) in em_lgdt_lidt()
3683 ctxt->op_bytes = 8; in em_lgdt_lidt()
3684 rc = read_descriptor(ctxt, ctxt->src.addr.mem, in em_lgdt_lidt()
3686 ctxt->op_bytes); in em_lgdt_lidt()
3689 if (ctxt->mode == X86EMUL_MODE_PROT64 && in em_lgdt_lidt()
3691 return emulate_gp(ctxt, 0); in em_lgdt_lidt()
3693 ctxt->ops->set_gdt(ctxt, &desc_ptr); in em_lgdt_lidt()
3695 ctxt->ops->set_idt(ctxt, &desc_ptr); in em_lgdt_lidt()
3697 ctxt->dst.type = OP_NONE; in em_lgdt_lidt()
3701 static int em_lgdt(struct x86_emulate_ctxt *ctxt) in em_lgdt() argument
3703 return em_lgdt_lidt(ctxt, true); in em_lgdt()
3706 static int em_lidt(struct x86_emulate_ctxt *ctxt) in em_lidt() argument
3708 return em_lgdt_lidt(ctxt, false); in em_lidt()
3711 static int em_smsw(struct x86_emulate_ctxt *ctxt) in em_smsw() argument
3713 if (ctxt->dst.type == OP_MEM) in em_smsw()
3714 ctxt->dst.bytes = 2; in em_smsw()
3715 ctxt->dst.val = ctxt->ops->get_cr(ctxt, 0); in em_smsw()
3719 static int em_lmsw(struct x86_emulate_ctxt *ctxt) in em_lmsw() argument
3721 ctxt->ops->set_cr(ctxt, 0, (ctxt->ops->get_cr(ctxt, 0) & ~0x0eul) in em_lmsw()
3722 | (ctxt->src.val & 0x0f)); in em_lmsw()
3723 ctxt->dst.type = OP_NONE; in em_lmsw()
3727 static int em_loop(struct x86_emulate_ctxt *ctxt) in em_loop() argument
3731 register_address_increment(ctxt, VCPU_REGS_RCX, -1); in em_loop()
3732 if ((address_mask(ctxt, reg_read(ctxt, VCPU_REGS_RCX)) != 0) && in em_loop()
3733 (ctxt->b == 0xe2 || test_cc(ctxt->b ^ 0x5, ctxt->eflags))) in em_loop()
3734 rc = jmp_rel(ctxt, ctxt->src.val); in em_loop()
3739 static int em_jcxz(struct x86_emulate_ctxt *ctxt) in em_jcxz() argument
3743 if (address_mask(ctxt, reg_read(ctxt, VCPU_REGS_RCX)) == 0) in em_jcxz()
3744 rc = jmp_rel(ctxt, ctxt->src.val); in em_jcxz()
3749 static int em_in(struct x86_emulate_ctxt *ctxt) in em_in() argument
3751 if (!pio_in_emulated(ctxt, ctxt->dst.bytes, ctxt->src.val, in em_in()
3752 &ctxt->dst.val)) in em_in()
3758 static int em_out(struct x86_emulate_ctxt *ctxt) in em_out() argument
3760 ctxt->ops->pio_out_emulated(ctxt, ctxt->src.bytes, ctxt->dst.val, in em_out()
3761 &ctxt->src.val, 1); in em_out()
3763 ctxt->dst.type = OP_NONE; in em_out()
3767 static int em_cli(struct x86_emulate_ctxt *ctxt) in em_cli() argument
3769 if (emulator_bad_iopl(ctxt)) in em_cli()
3770 return emulate_gp(ctxt, 0); in em_cli()
3772 ctxt->eflags &= ~X86_EFLAGS_IF; in em_cli()
3776 static int em_sti(struct x86_emulate_ctxt *ctxt) in em_sti() argument
3778 if (emulator_bad_iopl(ctxt)) in em_sti()
3779 return emulate_gp(ctxt, 0); in em_sti()
3781 ctxt->interruptibility = KVM_X86_SHADOW_INT_STI; in em_sti()
3782 ctxt->eflags |= X86_EFLAGS_IF; in em_sti()
3786 static int em_cpuid(struct x86_emulate_ctxt *ctxt) in em_cpuid() argument
3790 eax = reg_read(ctxt, VCPU_REGS_RAX); in em_cpuid()
3791 ecx = reg_read(ctxt, VCPU_REGS_RCX); in em_cpuid()
3792 ctxt->ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx); in em_cpuid()
3793 *reg_write(ctxt, VCPU_REGS_RAX) = eax; in em_cpuid()
3794 *reg_write(ctxt, VCPU_REGS_RBX) = ebx; in em_cpuid()
3795 *reg_write(ctxt, VCPU_REGS_RCX) = ecx; in em_cpuid()
3796 *reg_write(ctxt, VCPU_REGS_RDX) = edx; in em_cpuid()
3800 static int em_sahf(struct x86_emulate_ctxt *ctxt) in em_sahf() argument
3806 flags &= *reg_rmw(ctxt, VCPU_REGS_RAX) >> 8; in em_sahf()
3808 ctxt->eflags &= ~0xffUL; in em_sahf()
3809 ctxt->eflags |= flags | X86_EFLAGS_FIXED; in em_sahf()
3813 static int em_lahf(struct x86_emulate_ctxt *ctxt) in em_lahf() argument
3815 *reg_rmw(ctxt, VCPU_REGS_RAX) &= ~0xff00UL; in em_lahf()
3816 *reg_rmw(ctxt, VCPU_REGS_RAX) |= (ctxt->eflags & 0xff) << 8; in em_lahf()
3820 static int em_bswap(struct x86_emulate_ctxt *ctxt) in em_bswap() argument
3822 switch (ctxt->op_bytes) { in em_bswap()
3825 asm("bswap %0" : "+r"(ctxt->dst.val)); in em_bswap()
3829 asm("bswap %0" : "+r"(*(u32 *)&ctxt->dst.val)); in em_bswap()
3835 static int em_clflush(struct x86_emulate_ctxt *ctxt) in em_clflush() argument
3841 static int em_movsxd(struct x86_emulate_ctxt *ctxt) in em_movsxd() argument
3843 ctxt->dst.val = (s32) ctxt->src.val; in em_movsxd()
3859 static int check_cr_read(struct x86_emulate_ctxt *ctxt) in check_cr_read() argument
3861 if (!valid_cr(ctxt->modrm_reg)) in check_cr_read()
3862 return emulate_ud(ctxt); in check_cr_read()
3867 static int check_cr_write(struct x86_emulate_ctxt *ctxt) in check_cr_write() argument
3869 u64 new_val = ctxt->src.val64; in check_cr_write()
3870 int cr = ctxt->modrm_reg; in check_cr_write()
3882 return emulate_ud(ctxt); in check_cr_write()
3885 return emulate_gp(ctxt, 0); in check_cr_write()
3892 return emulate_gp(ctxt, 0); in check_cr_write()
3894 cr4 = ctxt->ops->get_cr(ctxt, 4); in check_cr_write()
3895 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in check_cr_write()
3899 return emulate_gp(ctxt, 0); in check_cr_write()
3906 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in check_cr_write()
3911 return emulate_gp(ctxt, 0); in check_cr_write()
3916 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in check_cr_write()
3919 return emulate_gp(ctxt, 0); in check_cr_write()
3928 static int check_dr7_gd(struct x86_emulate_ctxt *ctxt) in check_dr7_gd() argument
3932 ctxt->ops->get_dr(ctxt, 7, &dr7); in check_dr7_gd()
3938 static int check_dr_read(struct x86_emulate_ctxt *ctxt) in check_dr_read() argument
3940 int dr = ctxt->modrm_reg; in check_dr_read()
3944 return emulate_ud(ctxt); in check_dr_read()
3946 cr4 = ctxt->ops->get_cr(ctxt, 4); in check_dr_read()
3948 return emulate_ud(ctxt); in check_dr_read()
3950 if (check_dr7_gd(ctxt)) { in check_dr_read()
3953 ctxt->ops->get_dr(ctxt, 6, &dr6); in check_dr_read()
3956 ctxt->ops->set_dr(ctxt, 6, dr6); in check_dr_read()
3957 return emulate_db(ctxt); in check_dr_read()
3963 static int check_dr_write(struct x86_emulate_ctxt *ctxt) in check_dr_write() argument
3965 u64 new_val = ctxt->src.val64; in check_dr_write()
3966 int dr = ctxt->modrm_reg; in check_dr_write()
3969 return emulate_gp(ctxt, 0); in check_dr_write()
3971 return check_dr_read(ctxt); in check_dr_write()
3974 static int check_svme(struct x86_emulate_ctxt *ctxt) in check_svme() argument
3978 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in check_svme()
3981 return emulate_ud(ctxt); in check_svme()
3986 static int check_svme_pa(struct x86_emulate_ctxt *ctxt) in check_svme_pa() argument
3988 u64 rax = reg_read(ctxt, VCPU_REGS_RAX); in check_svme_pa()
3992 return emulate_gp(ctxt, 0); in check_svme_pa()
3994 return check_svme(ctxt); in check_svme_pa()
3997 static int check_rdtsc(struct x86_emulate_ctxt *ctxt) in check_rdtsc() argument
3999 u64 cr4 = ctxt->ops->get_cr(ctxt, 4); in check_rdtsc()
4001 if (cr4 & X86_CR4_TSD && ctxt->ops->cpl(ctxt)) in check_rdtsc()
4002 return emulate_ud(ctxt); in check_rdtsc()
4007 static int check_rdpmc(struct x86_emulate_ctxt *ctxt) in check_rdpmc() argument
4009 u64 cr4 = ctxt->ops->get_cr(ctxt, 4); in check_rdpmc()
4010 u64 rcx = reg_read(ctxt, VCPU_REGS_RCX); in check_rdpmc()
4012 if ((!(cr4 & X86_CR4_PCE) && ctxt->ops->cpl(ctxt)) || in check_rdpmc()
4013 ctxt->ops->check_pmc(ctxt, rcx)) in check_rdpmc()
4014 return emulate_gp(ctxt, 0); in check_rdpmc()
4019 static int check_perm_in(struct x86_emulate_ctxt *ctxt) in check_perm_in() argument
4021 ctxt->dst.bytes = min(ctxt->dst.bytes, 4u); in check_perm_in()
4022 if (!emulator_io_permited(ctxt, ctxt->src.val, ctxt->dst.bytes)) in check_perm_in()
4023 return emulate_gp(ctxt, 0); in check_perm_in()
4028 static int check_perm_out(struct x86_emulate_ctxt *ctxt) in check_perm_out() argument
4030 ctxt->src.bytes = min(ctxt->src.bytes, 4u); in check_perm_out()
4031 if (!emulator_io_permited(ctxt, ctxt->dst.val, ctxt->src.bytes)) in check_perm_out()
4032 return emulate_gp(ctxt, 0); in check_perm_out()
4559 static unsigned imm_size(struct x86_emulate_ctxt *ctxt) in imm_size() argument
4563 size = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes; in imm_size()
4569 static int decode_imm(struct x86_emulate_ctxt *ctxt, struct operand *op, in decode_imm() argument
4576 op->addr.mem.ea = ctxt->_eip; in decode_imm()
4580 op->val = insn_fetch(s8, ctxt); in decode_imm()
4583 op->val = insn_fetch(s16, ctxt); in decode_imm()
4586 op->val = insn_fetch(s32, ctxt); in decode_imm()
4589 op->val = insn_fetch(s64, ctxt); in decode_imm()
4609 static int decode_operand(struct x86_emulate_ctxt *ctxt, struct operand *op, in decode_operand() argument
4616 decode_register_operand(ctxt, op); in decode_operand()
4619 rc = decode_imm(ctxt, op, 1, false); in decode_operand()
4622 ctxt->memop.bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes; in decode_operand()
4624 *op = ctxt->memop; in decode_operand()
4625 ctxt->memopp = op; in decode_operand()
4626 if (ctxt->d & BitOp) in decode_operand()
4627 fetch_bit_operand(ctxt); in decode_operand()
4631 ctxt->memop.bytes = (ctxt->op_bytes == 8) ? 16 : 8; in decode_operand()
4635 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes; in decode_operand()
4636 op->addr.reg = reg_rmw(ctxt, VCPU_REGS_RAX); in decode_operand()
4642 op->bytes = (ctxt->d & ByteOp) ? 2 : ctxt->op_bytes; in decode_operand()
4643 op->addr.reg = reg_rmw(ctxt, VCPU_REGS_RAX); in decode_operand()
4648 if (ctxt->d & ByteOp) { in decode_operand()
4653 op->bytes = ctxt->op_bytes; in decode_operand()
4654 op->addr.reg = reg_rmw(ctxt, VCPU_REGS_RDX); in decode_operand()
4660 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes; in decode_operand()
4662 register_address(ctxt, VCPU_REGS_RDI); in decode_operand()
4670 op->addr.reg = reg_rmw(ctxt, VCPU_REGS_RDX); in decode_operand()
4676 op->val = reg_read(ctxt, VCPU_REGS_RCX) & 0xff; in decode_operand()
4679 rc = decode_imm(ctxt, op, 1, true); in decode_operand()
4687 rc = decode_imm(ctxt, op, imm_size(ctxt), true); in decode_operand()
4690 rc = decode_imm(ctxt, op, ctxt->op_bytes, true); in decode_operand()
4693 ctxt->memop.bytes = 1; in decode_operand()
4694 if (ctxt->memop.type == OP_REG) { in decode_operand()
4695 ctxt->memop.addr.reg = decode_register(ctxt, in decode_operand()
4696 ctxt->modrm_rm, true); in decode_operand()
4697 fetch_register_operand(&ctxt->memop); in decode_operand()
4701 ctxt->memop.bytes = 2; in decode_operand()
4704 ctxt->memop.bytes = 4; in decode_operand()
4707 rc = decode_imm(ctxt, op, 2, false); in decode_operand()
4710 rc = decode_imm(ctxt, op, imm_size(ctxt), false); in decode_operand()
4714 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes; in decode_operand()
4716 register_address(ctxt, VCPU_REGS_RSI); in decode_operand()
4717 op->addr.mem.seg = ctxt->seg_override; in decode_operand()
4723 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes; in decode_operand()
4725 address_mask(ctxt, in decode_operand()
4726 reg_read(ctxt, VCPU_REGS_RBX) + in decode_operand()
4727 (reg_read(ctxt, VCPU_REGS_RAX) & 0xff)); in decode_operand()
4728 op->addr.mem.seg = ctxt->seg_override; in decode_operand()
4733 op->addr.mem.ea = ctxt->_eip; in decode_operand()
4734 op->bytes = ctxt->op_bytes + 2; in decode_operand()
4735 insn_fetch_arr(op->valptr, op->bytes, ctxt); in decode_operand()
4738 ctxt->memop.bytes = ctxt->op_bytes + 2; in decode_operand()
4775 int x86_decode_insn(struct x86_emulate_ctxt *ctxt, void *insn, int insn_len) in x86_decode_insn() argument
4778 int mode = ctxt->mode; in x86_decode_insn()
4784 ctxt->memop.type = OP_NONE; in x86_decode_insn()
4785 ctxt->memopp = NULL; in x86_decode_insn()
4786 ctxt->_eip = ctxt->eip; in x86_decode_insn()
4787 ctxt->fetch.ptr = ctxt->fetch.data; in x86_decode_insn()
4788 ctxt->fetch.end = ctxt->fetch.data + insn_len; in x86_decode_insn()
4789 ctxt->opcode_len = 1; in x86_decode_insn()
4791 memcpy(ctxt->fetch.data, insn, insn_len); in x86_decode_insn()
4793 rc = __do_insn_fetch_bytes(ctxt, 1); in x86_decode_insn()
4817 ctxt->op_bytes = def_op_bytes; in x86_decode_insn()
4818 ctxt->ad_bytes = def_ad_bytes; in x86_decode_insn()
4822 switch (ctxt->b = insn_fetch(u8, ctxt)) { in x86_decode_insn()
4826 ctxt->op_bytes = def_op_bytes ^ 6; in x86_decode_insn()
4831 ctxt->ad_bytes = def_ad_bytes ^ 12; in x86_decode_insn()
4834 ctxt->ad_bytes = def_ad_bytes ^ 6; in x86_decode_insn()
4841 ctxt->seg_override = (ctxt->b >> 3) & 3; in x86_decode_insn()
4846 ctxt->seg_override = ctxt->b & 7; in x86_decode_insn()
4851 ctxt->rex_prefix = ctxt->b; in x86_decode_insn()
4854 ctxt->lock_prefix = 1; in x86_decode_insn()
4858 ctxt->rep_prefix = ctxt->b; in x86_decode_insn()
4866 ctxt->rex_prefix = 0; in x86_decode_insn()
4872 if (ctxt->rex_prefix & 8) in x86_decode_insn()
4873 ctxt->op_bytes = 8; /* REX.W */ in x86_decode_insn()
4876 opcode = opcode_table[ctxt->b]; in x86_decode_insn()
4878 if (ctxt->b == 0x0f) { in x86_decode_insn()
4879 ctxt->opcode_len = 2; in x86_decode_insn()
4880 ctxt->b = insn_fetch(u8, ctxt); in x86_decode_insn()
4881 opcode = twobyte_table[ctxt->b]; in x86_decode_insn()
4884 if (ctxt->b == 0x38) { in x86_decode_insn()
4885 ctxt->opcode_len = 3; in x86_decode_insn()
4886 ctxt->b = insn_fetch(u8, ctxt); in x86_decode_insn()
4887 opcode = opcode_map_0f_38[ctxt->b]; in x86_decode_insn()
4890 ctxt->d = opcode.flags; in x86_decode_insn()
4892 if (ctxt->d & ModRM) in x86_decode_insn()
4893 ctxt->modrm = insn_fetch(u8, ctxt); in x86_decode_insn()
4896 if (ctxt->opcode_len == 1 && (ctxt->b == 0xc5 || ctxt->b == 0xc4) && in x86_decode_insn()
4897 (mode == X86EMUL_MODE_PROT64 || (ctxt->modrm & 0xc0) == 0xc0)) { in x86_decode_insn()
4898 ctxt->d = NotImpl; in x86_decode_insn()
4901 while (ctxt->d & GroupMask) { in x86_decode_insn()
4902 switch (ctxt->d & GroupMask) { in x86_decode_insn()
4904 goffset = (ctxt->modrm >> 3) & 7; in x86_decode_insn()
4908 goffset = (ctxt->modrm >> 3) & 7; in x86_decode_insn()
4909 if ((ctxt->modrm >> 6) == 3) in x86_decode_insn()
4915 goffset = ctxt->modrm & 7; in x86_decode_insn()
4919 if (ctxt->rep_prefix && op_prefix) in x86_decode_insn()
4921 simd_prefix = op_prefix ? 0x66 : ctxt->rep_prefix; in x86_decode_insn()
4930 if (ctxt->modrm > 0xbf) in x86_decode_insn()
4931 opcode = opcode.u.esc->high[ctxt->modrm - 0xc0]; in x86_decode_insn()
4933 opcode = opcode.u.esc->op[(ctxt->modrm >> 3) & 7]; in x86_decode_insn()
4936 if ((ctxt->modrm >> 6) == 3) in x86_decode_insn()
4942 if (ctxt->mode == X86EMUL_MODE_PROT64) in x86_decode_insn()
4951 ctxt->d &= ~(u64)GroupMask; in x86_decode_insn()
4952 ctxt->d |= opcode.flags; in x86_decode_insn()
4956 if (ctxt->d == 0) in x86_decode_insn()
4959 ctxt->execute = opcode.u.execute; in x86_decode_insn()
4961 if (unlikely(ctxt->ud) && likely(!(ctxt->d & EmulateOnUD))) in x86_decode_insn()
4964 if (unlikely(ctxt->d & in x86_decode_insn()
4971 ctxt->check_perm = opcode.check_perm; in x86_decode_insn()
4972 ctxt->intercept = opcode.intercept; in x86_decode_insn()
4974 if (ctxt->d & NotImpl) in x86_decode_insn()
4978 if (ctxt->op_bytes == 4 && (ctxt->d & Stack)) in x86_decode_insn()
4979 ctxt->op_bytes = 8; in x86_decode_insn()
4980 else if (ctxt->d & NearBranch) in x86_decode_insn()
4981 ctxt->op_bytes = 8; in x86_decode_insn()
4984 if (ctxt->d & Op3264) { in x86_decode_insn()
4986 ctxt->op_bytes = 8; in x86_decode_insn()
4988 ctxt->op_bytes = 4; in x86_decode_insn()
4991 if ((ctxt->d & No16) && ctxt->op_bytes == 2) in x86_decode_insn()
4992 ctxt->op_bytes = 4; in x86_decode_insn()
4994 if (ctxt->d & Sse) in x86_decode_insn()
4995 ctxt->op_bytes = 16; in x86_decode_insn()
4996 else if (ctxt->d & Mmx) in x86_decode_insn()
4997 ctxt->op_bytes = 8; in x86_decode_insn()
5001 if (ctxt->d & ModRM) { in x86_decode_insn()
5002 rc = decode_modrm(ctxt, &ctxt->memop); in x86_decode_insn()
5005 ctxt->seg_override = ctxt->modrm_seg; in x86_decode_insn()
5007 } else if (ctxt->d & MemAbs) in x86_decode_insn()
5008 rc = decode_abs(ctxt, &ctxt->memop); in x86_decode_insn()
5013 ctxt->seg_override = VCPU_SREG_DS; in x86_decode_insn()
5015 ctxt->memop.addr.mem.seg = ctxt->seg_override; in x86_decode_insn()
5021 rc = decode_operand(ctxt, &ctxt->src, (ctxt->d >> SrcShift) & OpMask); in x86_decode_insn()
5029 rc = decode_operand(ctxt, &ctxt->src2, (ctxt->d >> Src2Shift) & OpMask); in x86_decode_insn()
5034 rc = decode_operand(ctxt, &ctxt->dst, (ctxt->d >> DstShift) & OpMask); in x86_decode_insn()
5036 if (ctxt->rip_relative) in x86_decode_insn()
5037 ctxt->memopp->addr.mem.ea = address_mask(ctxt, in x86_decode_insn()
5038 ctxt->memopp->addr.mem.ea + ctxt->_eip); in x86_decode_insn()
5044 bool x86_page_table_writing_insn(struct x86_emulate_ctxt *ctxt) in x86_page_table_writing_insn() argument
5046 return ctxt->d & PageTable; in x86_page_table_writing_insn()
5049 static bool string_insn_completed(struct x86_emulate_ctxt *ctxt) in string_insn_completed() argument
5058 if (((ctxt->b == 0xa6) || (ctxt->b == 0xa7) || in string_insn_completed()
5059 (ctxt->b == 0xae) || (ctxt->b == 0xaf)) in string_insn_completed()
5060 && (((ctxt->rep_prefix == REPE_PREFIX) && in string_insn_completed()
5061 ((ctxt->eflags & X86_EFLAGS_ZF) == 0)) in string_insn_completed()
5062 || ((ctxt->rep_prefix == REPNE_PREFIX) && in string_insn_completed()
5063 ((ctxt->eflags & X86_EFLAGS_ZF) == X86_EFLAGS_ZF)))) in string_insn_completed()
5069 static int flush_pending_x87_faults(struct x86_emulate_ctxt *ctxt) in flush_pending_x87_faults() argument
5073 ctxt->ops->get_fpu(ctxt); in flush_pending_x87_faults()
5083 ctxt->ops->put_fpu(ctxt); in flush_pending_x87_faults()
5086 return emulate_exception(ctxt, MF_VECTOR, 0, false); in flush_pending_x87_faults()
5091 static void fetch_possible_mmx_operand(struct x86_emulate_ctxt *ctxt, in fetch_possible_mmx_operand() argument
5095 read_mmx_reg(ctxt, &op->mm_val, op->addr.mm); in fetch_possible_mmx_operand()
5098 static int fastop(struct x86_emulate_ctxt *ctxt, void (*fop)(struct fastop *)) in fastop() argument
5100 ulong flags = (ctxt->eflags & EFLAGS_MASK) | X86_EFLAGS_IF; in fastop()
5101 if (!(ctxt->d & ByteOp)) in fastop()
5102 fop += __ffs(ctxt->dst.bytes) * FASTOP_SIZE; in fastop()
5104 : "+a"(ctxt->dst.val), "+d"(ctxt->src.val), [flags]"+D"(flags), in fastop()
5106 : "c"(ctxt->src2.val)); in fastop()
5107 ctxt->eflags = (ctxt->eflags & ~EFLAGS_MASK) | (flags & EFLAGS_MASK); in fastop()
5109 return emulate_de(ctxt); in fastop()
5113 void init_decode_cache(struct x86_emulate_ctxt *ctxt) in init_decode_cache() argument
5115 memset(&ctxt->rip_relative, 0, in init_decode_cache()
5116 (void *)&ctxt->modrm - (void *)&ctxt->rip_relative); in init_decode_cache()
5118 ctxt->io_read.pos = 0; in init_decode_cache()
5119 ctxt->io_read.end = 0; in init_decode_cache()
5120 ctxt->mem_read.end = 0; in init_decode_cache()
5123 int x86_emulate_insn(struct x86_emulate_ctxt *ctxt) in x86_emulate_insn() argument
5125 const struct x86_emulate_ops *ops = ctxt->ops; in x86_emulate_insn()
5127 int saved_dst_type = ctxt->dst.type; in x86_emulate_insn()
5129 ctxt->mem_read.pos = 0; in x86_emulate_insn()
5132 if (ctxt->lock_prefix && (!(ctxt->d & Lock) || ctxt->dst.type != OP_MEM)) { in x86_emulate_insn()
5133 rc = emulate_ud(ctxt); in x86_emulate_insn()
5137 if ((ctxt->d & SrcMask) == SrcMemFAddr && ctxt->src.type != OP_MEM) { in x86_emulate_insn()
5138 rc = emulate_ud(ctxt); in x86_emulate_insn()
5142 if (unlikely(ctxt->d & in x86_emulate_insn()
5144 if ((ctxt->mode == X86EMUL_MODE_PROT64 && (ctxt->d & No64)) || in x86_emulate_insn()
5145 (ctxt->d & Undefined)) { in x86_emulate_insn()
5146 rc = emulate_ud(ctxt); in x86_emulate_insn()
5150 if (((ctxt->d & (Sse|Mmx)) && ((ops->get_cr(ctxt, 0) & X86_CR0_EM))) in x86_emulate_insn()
5151 || ((ctxt->d & Sse) && !(ops->get_cr(ctxt, 4) & X86_CR4_OSFXSR))) { in x86_emulate_insn()
5152 rc = emulate_ud(ctxt); in x86_emulate_insn()
5156 if ((ctxt->d & (Sse|Mmx)) && (ops->get_cr(ctxt, 0) & X86_CR0_TS)) { in x86_emulate_insn()
5157 rc = emulate_nm(ctxt); in x86_emulate_insn()
5161 if (ctxt->d & Mmx) { in x86_emulate_insn()
5162 rc = flush_pending_x87_faults(ctxt); in x86_emulate_insn()
5169 fetch_possible_mmx_operand(ctxt, &ctxt->src); in x86_emulate_insn()
5170 fetch_possible_mmx_operand(ctxt, &ctxt->src2); in x86_emulate_insn()
5171 if (!(ctxt->d & Mov)) in x86_emulate_insn()
5172 fetch_possible_mmx_operand(ctxt, &ctxt->dst); in x86_emulate_insn()
5175 if (unlikely(ctxt->emul_flags & X86EMUL_GUEST_MASK) && ctxt->intercept) { in x86_emulate_insn()
5176 rc = emulator_check_intercept(ctxt, ctxt->intercept, in x86_emulate_insn()
5183 if ((ctxt->d & Prot) && ctxt->mode < X86EMUL_MODE_PROT16) { in x86_emulate_insn()
5184 rc = emulate_ud(ctxt); in x86_emulate_insn()
5189 if ((ctxt->d & Priv) && ops->cpl(ctxt)) { in x86_emulate_insn()
5190 if (ctxt->d & PrivUD) in x86_emulate_insn()
5191 rc = emulate_ud(ctxt); in x86_emulate_insn()
5193 rc = emulate_gp(ctxt, 0); in x86_emulate_insn()
5198 if (ctxt->d & CheckPerm) { in x86_emulate_insn()
5199 rc = ctxt->check_perm(ctxt); in x86_emulate_insn()
5204 if (unlikely(ctxt->emul_flags & X86EMUL_GUEST_MASK) && (ctxt->d & Intercept)) { in x86_emulate_insn()
5205 rc = emulator_check_intercept(ctxt, ctxt->intercept, in x86_emulate_insn()
5211 if (ctxt->rep_prefix && (ctxt->d & String)) { in x86_emulate_insn()
5213 if (address_mask(ctxt, reg_read(ctxt, VCPU_REGS_RCX)) == 0) { in x86_emulate_insn()
5214 string_registers_quirk(ctxt); in x86_emulate_insn()
5215 ctxt->eip = ctxt->_eip; in x86_emulate_insn()
5216 ctxt->eflags &= ~X86_EFLAGS_RF; in x86_emulate_insn()
5222 if ((ctxt->src.type == OP_MEM) && !(ctxt->d & NoAccess)) { in x86_emulate_insn()
5223 rc = segmented_read(ctxt, ctxt->src.addr.mem, in x86_emulate_insn()
5224 ctxt->src.valptr, ctxt->src.bytes); in x86_emulate_insn()
5227 ctxt->src.orig_val64 = ctxt->src.val64; in x86_emulate_insn()
5230 if (ctxt->src2.type == OP_MEM) { in x86_emulate_insn()
5231 rc = segmented_read(ctxt, ctxt->src2.addr.mem, in x86_emulate_insn()
5232 &ctxt->src2.val, ctxt->src2.bytes); in x86_emulate_insn()
5237 if ((ctxt->d & DstMask) == ImplicitOps) in x86_emulate_insn()
5241 if ((ctxt->dst.type == OP_MEM) && !(ctxt->d & Mov)) { in x86_emulate_insn()
5243 rc = segmented_read(ctxt, ctxt->dst.addr.mem, in x86_emulate_insn()
5244 &ctxt->dst.val, ctxt->dst.bytes); in x86_emulate_insn()
5246 if (!(ctxt->d & NoWrite) && in x86_emulate_insn()
5248 ctxt->exception.vector == PF_VECTOR) in x86_emulate_insn()
5249 ctxt->exception.error_code |= PFERR_WRITE_MASK; in x86_emulate_insn()
5254 ctxt->dst.orig_val64 = ctxt->dst.val64; in x86_emulate_insn()
5258 if (unlikely(ctxt->emul_flags & X86EMUL_GUEST_MASK) && (ctxt->d & Intercept)) { in x86_emulate_insn()
5259 rc = emulator_check_intercept(ctxt, ctxt->intercept, in x86_emulate_insn()
5265 if (ctxt->rep_prefix && (ctxt->d & String)) in x86_emulate_insn()
5266 ctxt->eflags |= X86_EFLAGS_RF; in x86_emulate_insn()
5268 ctxt->eflags &= ~X86_EFLAGS_RF; in x86_emulate_insn()
5270 if (ctxt->execute) { in x86_emulate_insn()
5271 if (ctxt->d & Fastop) { in x86_emulate_insn()
5272 void (*fop)(struct fastop *) = (void *)ctxt->execute; in x86_emulate_insn()
5273 rc = fastop(ctxt, fop); in x86_emulate_insn()
5278 rc = ctxt->execute(ctxt); in x86_emulate_insn()
5284 if (ctxt->opcode_len == 2) in x86_emulate_insn()
5286 else if (ctxt->opcode_len == 3) in x86_emulate_insn()
5289 switch (ctxt->b) { in x86_emulate_insn()
5291 if (test_cc(ctxt->b, ctxt->eflags)) in x86_emulate_insn()
5292 rc = jmp_rel(ctxt, ctxt->src.val); in x86_emulate_insn()
5295 ctxt->dst.val = ctxt->src.addr.mem.ea; in x86_emulate_insn()
5298 if (ctxt->dst.addr.reg == reg_rmw(ctxt, VCPU_REGS_RAX)) in x86_emulate_insn()
5299 ctxt->dst.type = OP_NONE; in x86_emulate_insn()
5301 rc = em_xchg(ctxt); in x86_emulate_insn()
5304 switch (ctxt->op_bytes) { in x86_emulate_insn()
5305 case 2: ctxt->dst.val = (s8)ctxt->dst.val; break; in x86_emulate_insn()
5306 case 4: ctxt->dst.val = (s16)ctxt->dst.val; break; in x86_emulate_insn()
5307 case 8: ctxt->dst.val = (s32)ctxt->dst.val; break; in x86_emulate_insn()
5311 rc = emulate_int(ctxt, 3); in x86_emulate_insn()
5314 rc = emulate_int(ctxt, ctxt->src.val); in x86_emulate_insn()
5317 if (ctxt->eflags & X86_EFLAGS_OF) in x86_emulate_insn()
5318 rc = emulate_int(ctxt, 4); in x86_emulate_insn()
5322 rc = jmp_rel(ctxt, ctxt->src.val); in x86_emulate_insn()
5323 ctxt->dst.type = OP_NONE; /* Disable writeback. */ in x86_emulate_insn()
5326 ctxt->ops->halt(ctxt); in x86_emulate_insn()
5330 ctxt->eflags ^= X86_EFLAGS_CF; in x86_emulate_insn()
5333 ctxt->eflags &= ~X86_EFLAGS_CF; in x86_emulate_insn()
5336 ctxt->eflags |= X86_EFLAGS_CF; in x86_emulate_insn()
5339 ctxt->eflags &= ~X86_EFLAGS_DF; in x86_emulate_insn()
5342 ctxt->eflags |= X86_EFLAGS_DF; in x86_emulate_insn()
5352 if (ctxt->d & SrcWrite) { in x86_emulate_insn()
5353 BUG_ON(ctxt->src.type == OP_MEM || ctxt->src.type == OP_MEM_STR); in x86_emulate_insn()
5354 rc = writeback(ctxt, &ctxt->src); in x86_emulate_insn()
5358 if (!(ctxt->d & NoWrite)) { in x86_emulate_insn()
5359 rc = writeback(ctxt, &ctxt->dst); in x86_emulate_insn()
5368 ctxt->dst.type = saved_dst_type; in x86_emulate_insn()
5370 if ((ctxt->d & SrcMask) == SrcSI) in x86_emulate_insn()
5371 string_addr_inc(ctxt, VCPU_REGS_RSI, &ctxt->src); in x86_emulate_insn()
5373 if ((ctxt->d & DstMask) == DstDI) in x86_emulate_insn()
5374 string_addr_inc(ctxt, VCPU_REGS_RDI, &ctxt->dst); in x86_emulate_insn()
5376 if (ctxt->rep_prefix && (ctxt->d & String)) { in x86_emulate_insn()
5378 struct read_cache *r = &ctxt->io_read; in x86_emulate_insn()
5379 if ((ctxt->d & SrcMask) == SrcSI) in x86_emulate_insn()
5380 count = ctxt->src.count; in x86_emulate_insn()
5382 count = ctxt->dst.count; in x86_emulate_insn()
5383 register_address_increment(ctxt, VCPU_REGS_RCX, -count); in x86_emulate_insn()
5385 if (!string_insn_completed(ctxt)) { in x86_emulate_insn()
5390 if ((r->end != 0 || reg_read(ctxt, VCPU_REGS_RCX) & 0x3ff) && in x86_emulate_insn()
5397 ctxt->mem_read.end = 0; in x86_emulate_insn()
5398 writeback_registers(ctxt); in x86_emulate_insn()
5403 ctxt->eflags &= ~X86_EFLAGS_RF; in x86_emulate_insn()
5406 ctxt->eip = ctxt->_eip; in x86_emulate_insn()
5410 WARN_ON(ctxt->exception.vector > 0x1f); in x86_emulate_insn()
5411 ctxt->have_exception = true; in x86_emulate_insn()
5417 writeback_registers(ctxt); in x86_emulate_insn()
5422 switch (ctxt->b) { in x86_emulate_insn()
5424 (ctxt->ops->wbinvd)(ctxt); in x86_emulate_insn()
5432 ctxt->dst.val = ops->get_cr(ctxt, ctxt->modrm_reg); in x86_emulate_insn()
5435 ops->get_dr(ctxt, ctxt->modrm_reg, &ctxt->dst.val); in x86_emulate_insn()
5438 if (test_cc(ctxt->b, ctxt->eflags)) in x86_emulate_insn()
5439 ctxt->dst.val = ctxt->src.val; in x86_emulate_insn()
5440 else if (ctxt->op_bytes != 4) in x86_emulate_insn()
5441 ctxt->dst.type = OP_NONE; /* no writeback */ in x86_emulate_insn()
5444 if (test_cc(ctxt->b, ctxt->eflags)) in x86_emulate_insn()
5445 rc = jmp_rel(ctxt, ctxt->src.val); in x86_emulate_insn()
5448 ctxt->dst.val = test_cc(ctxt->b, ctxt->eflags); in x86_emulate_insn()
5451 ctxt->dst.bytes = ctxt->op_bytes; in x86_emulate_insn()
5452 ctxt->dst.val = (ctxt->src.bytes == 1) ? (u8) ctxt->src.val in x86_emulate_insn()
5453 : (u16) ctxt->src.val; in x86_emulate_insn()
5456 ctxt->dst.bytes = ctxt->op_bytes; in x86_emulate_insn()
5457 ctxt->dst.val = (ctxt->src.bytes == 1) ? (s8) ctxt->src.val : in x86_emulate_insn()
5458 (s16) ctxt->src.val; in x86_emulate_insn()
5475 void emulator_invalidate_register_cache(struct x86_emulate_ctxt *ctxt) in emulator_invalidate_register_cache() argument
5477 invalidate_registers(ctxt); in emulator_invalidate_register_cache()
5480 void emulator_writeback_register_cache(struct x86_emulate_ctxt *ctxt) in emulator_writeback_register_cache() argument
5482 writeback_registers(ctxt); in emulator_writeback_register_cache()