4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
34 #define PREFIX_REPZ 0x01
35 #define PREFIX_REPNZ 0x02
36 #define PREFIX_LOCK 0x04
37 #define PREFIX_DATA 0x08
38 #define PREFIX_ADR 0x10
41 #define X86_64_ONLY(x) x
42 #define X86_64_DEF(x...) x
43 #define CODE64(s) ((s)->code64)
44 #define REX_X(s) ((s)->rex_x)
45 #define REX_B(s) ((s)->rex_b)
46 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
48 #define BUGGY_64(x) NULL
51 #define X86_64_ONLY(x) NULL
52 #define X86_64_DEF(x...)
58 //#define MACRO_TEST 1
60 /* global register indexes */
61 static TCGv cpu_env, cpu_T[2], cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst;
63 /* local register indexes (only used inside old micro ops) */
64 static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
65 static TCGv cpu_tmp5, cpu_tmp6;
68 static int x86_64_hregs;
71 typedef struct DisasContext {
72 /* current insn context */
73 int override; /* -1 if no override */
76 target_ulong pc; /* pc = eip + cs_base */
77 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
78 static state change (stop translation) */
79 /* current block context */
80 target_ulong cs_base; /* base of CS segment */
81 int pe; /* protected mode */
82 int code32; /* 32 bit code segment */
84 int lma; /* long mode active */
85 int code64; /* 64 bit code segment */
88 int ss32; /* 32 bit stack segment */
89 int cc_op; /* current CC operation */
90 int addseg; /* non zero if either DS/ES/SS have a non zero base */
91 int f_st; /* currently unused */
92 int vm86; /* vm86 mode */
95 int tf; /* TF cpu flag */
96 int singlestep_enabled; /* "hardware" single step enabled */
97 int jmp_opt; /* use direct block chaining for direct jumps */
98 int mem_index; /* select memory access functions */
99 uint64_t flags; /* all execution flags */
100 struct TranslationBlock *tb;
101 int popl_esp_hack; /* for correct popl with esp base handling */
102 int rip_offset; /* only used in x86_64, but left for simplicity */
104 int cpuid_ext_features;
105 int cpuid_ext2_features;
108 static void gen_eob(DisasContext *s);
109 static void gen_jmp(DisasContext *s, target_ulong eip);
110 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
112 /* i386 arith/logic operations */
132 OP_SHL1, /* undocumented */
145 /* I386 int registers */
146 OR_EAX, /* MUST be even numbered */
155 OR_TMP0 = 16, /* temporary operand register */
157 OR_A0, /* temporary register used when doing address evaluation */
160 static inline void gen_op_movl_T0_0(void)
162 tcg_gen_movi_tl(cpu_T[0], 0);
165 static inline void gen_op_movl_T0_im(int32_t val)
167 tcg_gen_movi_tl(cpu_T[0], val);
170 static inline void gen_op_movl_T0_imu(uint32_t val)
172 tcg_gen_movi_tl(cpu_T[0], val);
175 static inline void gen_op_movl_T1_im(int32_t val)
177 tcg_gen_movi_tl(cpu_T[1], val);
180 static inline void gen_op_movl_T1_imu(uint32_t val)
182 tcg_gen_movi_tl(cpu_T[1], val);
185 static inline void gen_op_movl_A0_im(uint32_t val)
187 tcg_gen_movi_tl(cpu_A0, val);
191 static inline void gen_op_movq_A0_im(int64_t val)
193 tcg_gen_movi_tl(cpu_A0, val);
197 static inline void gen_movtl_T0_im(target_ulong val)
199 tcg_gen_movi_tl(cpu_T[0], val);
202 static inline void gen_movtl_T1_im(target_ulong val)
204 tcg_gen_movi_tl(cpu_T[1], val);
207 static inline void gen_op_andl_T0_ffff(void)
209 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
212 static inline void gen_op_andl_T0_im(uint32_t val)
214 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
217 static inline void gen_op_movl_T0_T1(void)
219 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
222 static inline void gen_op_andl_A0_ffff(void)
224 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
229 #define NB_OP_SIZES 4
231 #define DEF_REGS(prefix, suffix) \
232 prefix ## EAX ## suffix,\
233 prefix ## ECX ## suffix,\
234 prefix ## EDX ## suffix,\
235 prefix ## EBX ## suffix,\
236 prefix ## ESP ## suffix,\
237 prefix ## EBP ## suffix,\
238 prefix ## ESI ## suffix,\
239 prefix ## EDI ## suffix,\
240 prefix ## R8 ## suffix,\
241 prefix ## R9 ## suffix,\
242 prefix ## R10 ## suffix,\
243 prefix ## R11 ## suffix,\
244 prefix ## R12 ## suffix,\
245 prefix ## R13 ## suffix,\
246 prefix ## R14 ## suffix,\
247 prefix ## R15 ## suffix,
249 #else /* !TARGET_X86_64 */
251 #define NB_OP_SIZES 3
253 #define DEF_REGS(prefix, suffix) \
254 prefix ## EAX ## suffix,\
255 prefix ## ECX ## suffix,\
256 prefix ## EDX ## suffix,\
257 prefix ## EBX ## suffix,\
258 prefix ## ESP ## suffix,\
259 prefix ## EBP ## suffix,\
260 prefix ## ESI ## suffix,\
261 prefix ## EDI ## suffix,
263 #endif /* !TARGET_X86_64 */
265 #if defined(WORDS_BIGENDIAN)
266 #define REG_B_OFFSET (sizeof(target_ulong) - 1)
267 #define REG_H_OFFSET (sizeof(target_ulong) - 2)
268 #define REG_W_OFFSET (sizeof(target_ulong) - 2)
269 #define REG_L_OFFSET (sizeof(target_ulong) - 4)
270 #define REG_LH_OFFSET (sizeof(target_ulong) - 8)
272 #define REG_B_OFFSET 0
273 #define REG_H_OFFSET 1
274 #define REG_W_OFFSET 0
275 #define REG_L_OFFSET 0
276 #define REG_LH_OFFSET 4
279 static inline void gen_op_mov_reg_TN(int ot, int t_index, int reg)
283 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
284 tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
286 tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
290 tcg_gen_st16_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
294 tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
295 /* high part of register set to zero */
296 tcg_gen_movi_tl(cpu_tmp0, 0);
297 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
301 tcg_gen_st_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
306 tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
312 static inline void gen_op_mov_reg_T0(int ot, int reg)
314 gen_op_mov_reg_TN(ot, 0, reg);
317 static inline void gen_op_mov_reg_T1(int ot, int reg)
319 gen_op_mov_reg_TN(ot, 1, reg);
322 static inline void gen_op_mov_reg_A0(int size, int reg)
326 tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
330 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
331 /* high part of register set to zero */
332 tcg_gen_movi_tl(cpu_tmp0, 0);
333 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
337 tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
342 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
348 static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
352 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
355 tcg_gen_ld8u_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
360 tcg_gen_ld_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
365 static inline void gen_op_movl_A0_reg(int reg)
367 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
370 static inline void gen_op_addl_A0_im(int32_t val)
372 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
374 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
379 static inline void gen_op_addq_A0_im(int64_t val)
381 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
385 static void gen_add_A0_im(DisasContext *s, int val)
389 gen_op_addq_A0_im(val);
392 gen_op_addl_A0_im(val);
395 static inline void gen_op_addl_T0_T1(void)
397 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
400 static inline void gen_op_jmp_T0(void)
402 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
405 static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
409 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
410 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
411 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
414 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
415 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
417 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
419 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
423 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
424 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
425 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
431 static inline void gen_op_add_reg_T0(int size, int reg)
435 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
436 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
437 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
440 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
441 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
443 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
445 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
449 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
450 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
451 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
457 static inline void gen_op_set_cc_op(int32_t val)
459 tcg_gen_movi_i32(cpu_cc_op, val);
462 static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
464 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
466 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
467 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
469 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
473 static inline void gen_op_movl_A0_seg(int reg)
475 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
478 static inline void gen_op_addl_A0_seg(int reg)
480 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
481 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
483 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
488 static inline void gen_op_movq_A0_seg(int reg)
490 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
493 static inline void gen_op_addq_A0_seg(int reg)
495 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
496 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
499 static inline void gen_op_movq_A0_reg(int reg)
501 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
504 static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
506 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
508 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
509 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
513 static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
515 DEF_REGS(gen_op_cmovw_, _T1_T0)
518 DEF_REGS(gen_op_cmovl_, _T1_T0)
522 DEF_REGS(gen_op_cmovq_, _T1_T0)
527 static inline void gen_op_lds_T0_A0(int idx)
529 int mem_index = (idx >> 2) - 1;
532 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
535 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
539 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
544 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
545 static inline void gen_op_ld_T0_A0(int idx)
547 int mem_index = (idx >> 2) - 1;
550 tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0, mem_index);
553 tcg_gen_qemu_ld16u(cpu_T[0], cpu_A0, mem_index);
556 tcg_gen_qemu_ld32u(cpu_T[0], cpu_A0, mem_index);
560 tcg_gen_qemu_ld64(cpu_T[0], cpu_A0, mem_index);
565 static inline void gen_op_ldu_T0_A0(int idx)
567 gen_op_ld_T0_A0(idx);
570 static inline void gen_op_ld_T1_A0(int idx)
572 int mem_index = (idx >> 2) - 1;
575 tcg_gen_qemu_ld8u(cpu_T[1], cpu_A0, mem_index);
578 tcg_gen_qemu_ld16u(cpu_T[1], cpu_A0, mem_index);
581 tcg_gen_qemu_ld32u(cpu_T[1], cpu_A0, mem_index);
585 tcg_gen_qemu_ld64(cpu_T[1], cpu_A0, mem_index);
590 static inline void gen_op_st_T0_A0(int idx)
592 int mem_index = (idx >> 2) - 1;
595 tcg_gen_qemu_st8(cpu_T[0], cpu_A0, mem_index);
598 tcg_gen_qemu_st16(cpu_T[0], cpu_A0, mem_index);
601 tcg_gen_qemu_st32(cpu_T[0], cpu_A0, mem_index);
605 tcg_gen_qemu_st64(cpu_T[0], cpu_A0, mem_index);
610 static inline void gen_op_st_T1_A0(int idx)
612 int mem_index = (idx >> 2) - 1;
615 tcg_gen_qemu_st8(cpu_T[1], cpu_A0, mem_index);
618 tcg_gen_qemu_st16(cpu_T[1], cpu_A0, mem_index);
621 tcg_gen_qemu_st32(cpu_T[1], cpu_A0, mem_index);
625 tcg_gen_qemu_st64(cpu_T[1], cpu_A0, mem_index);
630 static inline void gen_jmp_im(target_ulong pc)
632 tcg_gen_movi_tl(cpu_tmp0, pc);
633 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
636 static inline void gen_string_movl_A0_ESI(DisasContext *s)
640 override = s->override;
644 gen_op_movq_A0_seg(override);
645 gen_op_addq_A0_reg_sN(0, R_ESI);
647 gen_op_movq_A0_reg(R_ESI);
653 if (s->addseg && override < 0)
656 gen_op_movl_A0_seg(override);
657 gen_op_addl_A0_reg_sN(0, R_ESI);
659 gen_op_movl_A0_reg(R_ESI);
662 /* 16 address, always override */
665 gen_op_movl_A0_reg(R_ESI);
666 gen_op_andl_A0_ffff();
667 gen_op_addl_A0_seg(override);
671 static inline void gen_string_movl_A0_EDI(DisasContext *s)
675 gen_op_movq_A0_reg(R_EDI);
680 gen_op_movl_A0_seg(R_ES);
681 gen_op_addl_A0_reg_sN(0, R_EDI);
683 gen_op_movl_A0_reg(R_EDI);
686 gen_op_movl_A0_reg(R_EDI);
687 gen_op_andl_A0_ffff();
688 gen_op_addl_A0_seg(R_ES);
692 static inline void gen_op_movl_T0_Dshift(int ot)
694 tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
695 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
698 static void gen_extu(int ot, TCGv reg)
702 tcg_gen_ext8u_tl(reg, reg);
705 tcg_gen_ext16u_tl(reg, reg);
708 tcg_gen_ext32u_tl(reg, reg);
715 static void gen_exts(int ot, TCGv reg)
719 tcg_gen_ext8s_tl(reg, reg);
722 tcg_gen_ext16s_tl(reg, reg);
725 tcg_gen_ext32s_tl(reg, reg);
732 static inline void gen_op_jnz_ecx(int size, int label1)
734 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
735 gen_extu(size + 1, cpu_tmp0);
736 tcg_gen_brcond_tl(TCG_COND_NE, cpu_tmp0, tcg_const_tl(0), label1);
739 static inline void gen_op_jz_ecx(int size, int label1)
741 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
742 gen_extu(size + 1, cpu_tmp0);
743 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_tmp0, tcg_const_tl(0), label1);
746 static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
751 X86_64_ONLY(gen_op_jnz_subq),
757 X86_64_ONLY(gen_op_jz_subq),
761 static void *helper_in_func[3] = {
767 static void *helper_out_func[3] = {
773 static void *gen_check_io_func[3] = {
779 static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
783 target_ulong next_eip;
786 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
787 if (s->cc_op != CC_OP_DYNAMIC)
788 gen_op_set_cc_op(s->cc_op);
791 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
792 tcg_gen_helper_0_1(gen_check_io_func[ot],
795 if(s->flags & (1ULL << INTERCEPT_IOIO_PROT)) {
797 if (s->cc_op != CC_OP_DYNAMIC)
798 gen_op_set_cc_op(s->cc_op);
802 svm_flags |= (1 << (4 + ot));
803 next_eip = s->pc - s->cs_base;
804 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
805 tcg_gen_helper_0_3(helper_svm_check_io,
807 tcg_const_i32(svm_flags),
808 tcg_const_i32(next_eip - cur_eip));
812 static inline void gen_movs(DisasContext *s, int ot)
814 gen_string_movl_A0_ESI(s);
815 gen_op_ld_T0_A0(ot + s->mem_index);
816 gen_string_movl_A0_EDI(s);
817 gen_op_st_T0_A0(ot + s->mem_index);
818 gen_op_movl_T0_Dshift(ot);
819 gen_op_add_reg_T0(s->aflag, R_ESI);
820 gen_op_add_reg_T0(s->aflag, R_EDI);
823 static inline void gen_update_cc_op(DisasContext *s)
825 if (s->cc_op != CC_OP_DYNAMIC) {
826 gen_op_set_cc_op(s->cc_op);
827 s->cc_op = CC_OP_DYNAMIC;
831 static void gen_op_update1_cc(void)
833 tcg_gen_discard_tl(cpu_cc_src);
834 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
837 static void gen_op_update2_cc(void)
839 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
840 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
843 static inline void gen_op_cmpl_T0_T1_cc(void)
845 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
846 tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
849 static inline void gen_op_testl_T0_T1_cc(void)
851 tcg_gen_discard_tl(cpu_cc_src);
852 tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
855 static void gen_op_update_neg_cc(void)
857 tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
858 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
861 /* XXX: does not work with gdbstub "ice" single step - not a
863 static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
867 l1 = gen_new_label();
868 l2 = gen_new_label();
869 gen_op_jnz_ecx(s->aflag, l1);
871 gen_jmp_tb(s, next_eip, 1);
876 static inline void gen_stos(DisasContext *s, int ot)
878 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
879 gen_string_movl_A0_EDI(s);
880 gen_op_st_T0_A0(ot + s->mem_index);
881 gen_op_movl_T0_Dshift(ot);
882 gen_op_add_reg_T0(s->aflag, R_EDI);
885 static inline void gen_lods(DisasContext *s, int ot)
887 gen_string_movl_A0_ESI(s);
888 gen_op_ld_T0_A0(ot + s->mem_index);
889 gen_op_mov_reg_T0(ot, R_EAX);
890 gen_op_movl_T0_Dshift(ot);
891 gen_op_add_reg_T0(s->aflag, R_ESI);
894 static inline void gen_scas(DisasContext *s, int ot)
896 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
897 gen_string_movl_A0_EDI(s);
898 gen_op_ld_T1_A0(ot + s->mem_index);
899 gen_op_cmpl_T0_T1_cc();
900 gen_op_movl_T0_Dshift(ot);
901 gen_op_add_reg_T0(s->aflag, R_EDI);
904 static inline void gen_cmps(DisasContext *s, int ot)
906 gen_string_movl_A0_ESI(s);
907 gen_op_ld_T0_A0(ot + s->mem_index);
908 gen_string_movl_A0_EDI(s);
909 gen_op_ld_T1_A0(ot + s->mem_index);
910 gen_op_cmpl_T0_T1_cc();
911 gen_op_movl_T0_Dshift(ot);
912 gen_op_add_reg_T0(s->aflag, R_ESI);
913 gen_op_add_reg_T0(s->aflag, R_EDI);
916 static inline void gen_ins(DisasContext *s, int ot)
918 gen_string_movl_A0_EDI(s);
919 /* Note: we must do this dummy write first to be restartable in
920 case of page fault. */
922 gen_op_st_T0_A0(ot + s->mem_index);
923 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
924 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
925 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
926 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
927 gen_op_st_T0_A0(ot + s->mem_index);
928 gen_op_movl_T0_Dshift(ot);
929 gen_op_add_reg_T0(s->aflag, R_EDI);
932 static inline void gen_outs(DisasContext *s, int ot)
934 gen_string_movl_A0_ESI(s);
935 gen_op_ld_T0_A0(ot + s->mem_index);
937 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
938 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
939 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
940 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
941 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
943 gen_op_movl_T0_Dshift(ot);
944 gen_op_add_reg_T0(s->aflag, R_ESI);
947 /* same method as Valgrind : we generate jumps to current or next
949 #define GEN_REPZ(op) \
950 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
951 target_ulong cur_eip, target_ulong next_eip) \
954 gen_update_cc_op(s); \
955 l2 = gen_jz_ecx_string(s, next_eip); \
957 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
958 /* a loop would cause two single step exceptions if ECX = 1 \
959 before rep string_insn */ \
961 gen_op_jz_ecx(s->aflag, l2); \
962 gen_jmp(s, cur_eip); \
965 #define GEN_REPZ2(op) \
966 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
967 target_ulong cur_eip, \
968 target_ulong next_eip, \
972 gen_update_cc_op(s); \
973 l2 = gen_jz_ecx_string(s, next_eip); \
975 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
976 gen_op_set_cc_op(CC_OP_SUBB + ot); \
977 gen_op_string_jnz_sub[nz][ot](l2);\
979 gen_op_jz_ecx(s->aflag, l2); \
980 gen_jmp(s, cur_eip); \
1002 static GenOpFunc1 *gen_jcc_sub[4][8] = {
1033 #ifdef TARGET_X86_64
1036 BUGGY_64(gen_op_jb_subq),
1038 BUGGY_64(gen_op_jbe_subq),
1041 BUGGY_64(gen_op_jl_subq),
1042 BUGGY_64(gen_op_jle_subq),
1047 static GenOpFunc *gen_setcc_slow[8] = {
1058 static GenOpFunc *gen_setcc_sub[4][8] = {
1061 gen_op_setb_T0_subb,
1062 gen_op_setz_T0_subb,
1063 gen_op_setbe_T0_subb,
1064 gen_op_sets_T0_subb,
1066 gen_op_setl_T0_subb,
1067 gen_op_setle_T0_subb,
1071 gen_op_setb_T0_subw,
1072 gen_op_setz_T0_subw,
1073 gen_op_setbe_T0_subw,
1074 gen_op_sets_T0_subw,
1076 gen_op_setl_T0_subw,
1077 gen_op_setle_T0_subw,
1081 gen_op_setb_T0_subl,
1082 gen_op_setz_T0_subl,
1083 gen_op_setbe_T0_subl,
1084 gen_op_sets_T0_subl,
1086 gen_op_setl_T0_subl,
1087 gen_op_setle_T0_subl,
1089 #ifdef TARGET_X86_64
1092 gen_op_setb_T0_subq,
1093 gen_op_setz_T0_subq,
1094 gen_op_setbe_T0_subq,
1095 gen_op_sets_T0_subq,
1097 gen_op_setl_T0_subq,
1098 gen_op_setle_T0_subq,
1103 static void *helper_fp_arith_ST0_FT0[8] = {
1104 helper_fadd_ST0_FT0,
1105 helper_fmul_ST0_FT0,
1106 helper_fcom_ST0_FT0,
1107 helper_fcom_ST0_FT0,
1108 helper_fsub_ST0_FT0,
1109 helper_fsubr_ST0_FT0,
1110 helper_fdiv_ST0_FT0,
1111 helper_fdivr_ST0_FT0,
1114 /* NOTE the exception in "r" op ordering */
1115 static void *helper_fp_arith_STN_ST0[8] = {
1116 helper_fadd_STN_ST0,
1117 helper_fmul_STN_ST0,
1120 helper_fsubr_STN_ST0,
1121 helper_fsub_STN_ST0,
1122 helper_fdivr_STN_ST0,
1123 helper_fdiv_STN_ST0,
1126 /* compute eflags.C to reg */
1127 static void gen_compute_eflags_c(TCGv reg)
1129 #if TCG_TARGET_REG_BITS == 32
1130 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1131 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1132 (long)cc_table + offsetof(CCTable, compute_c));
1133 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1134 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1135 1, &cpu_tmp2_i32, 0, NULL);
1137 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1138 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1139 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1140 (long)cc_table + offsetof(CCTable, compute_c));
1141 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1142 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1143 1, &cpu_tmp2_i32, 0, NULL);
1145 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1148 /* compute all eflags to cc_src */
1149 static void gen_compute_eflags(TCGv reg)
1151 #if TCG_TARGET_REG_BITS == 32
1152 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1153 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1154 (long)cc_table + offsetof(CCTable, compute_all));
1155 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1156 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1157 1, &cpu_tmp2_i32, 0, NULL);
1159 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1160 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1161 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1162 (long)cc_table + offsetof(CCTable, compute_all));
1163 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1164 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1165 1, &cpu_tmp2_i32, 0, NULL);
1167 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1170 /* if d == OR_TMP0, it means memory operand (address in A0) */
1171 static void gen_op(DisasContext *s1, int op, int ot, int d)
1174 gen_op_mov_TN_reg(ot, 0, d);
1176 gen_op_ld_T0_A0(ot + s1->mem_index);
1180 if (s1->cc_op != CC_OP_DYNAMIC)
1181 gen_op_set_cc_op(s1->cc_op);
1182 gen_compute_eflags_c(cpu_tmp4);
1183 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1184 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1186 gen_op_mov_reg_T0(ot, d);
1188 gen_op_st_T0_A0(ot + s1->mem_index);
1189 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1190 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1191 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1192 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1193 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1194 s1->cc_op = CC_OP_DYNAMIC;
1197 if (s1->cc_op != CC_OP_DYNAMIC)
1198 gen_op_set_cc_op(s1->cc_op);
1199 gen_compute_eflags_c(cpu_tmp4);
1200 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1201 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1203 gen_op_mov_reg_T0(ot, d);
1205 gen_op_st_T0_A0(ot + s1->mem_index);
1206 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1207 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1208 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1209 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1210 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1211 s1->cc_op = CC_OP_DYNAMIC;
1214 gen_op_addl_T0_T1();
1216 gen_op_mov_reg_T0(ot, d);
1218 gen_op_st_T0_A0(ot + s1->mem_index);
1219 gen_op_update2_cc();
1220 s1->cc_op = CC_OP_ADDB + ot;
1223 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1225 gen_op_mov_reg_T0(ot, d);
1227 gen_op_st_T0_A0(ot + s1->mem_index);
1228 gen_op_update2_cc();
1229 s1->cc_op = CC_OP_SUBB + ot;
1233 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1235 gen_op_mov_reg_T0(ot, d);
1237 gen_op_st_T0_A0(ot + s1->mem_index);
1238 gen_op_update1_cc();
1239 s1->cc_op = CC_OP_LOGICB + ot;
1242 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1244 gen_op_mov_reg_T0(ot, d);
1246 gen_op_st_T0_A0(ot + s1->mem_index);
1247 gen_op_update1_cc();
1248 s1->cc_op = CC_OP_LOGICB + ot;
1251 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1253 gen_op_mov_reg_T0(ot, d);
1255 gen_op_st_T0_A0(ot + s1->mem_index);
1256 gen_op_update1_cc();
1257 s1->cc_op = CC_OP_LOGICB + ot;
1260 gen_op_cmpl_T0_T1_cc();
1261 s1->cc_op = CC_OP_SUBB + ot;
1266 /* if d == OR_TMP0, it means memory operand (address in A0) */
1267 static void gen_inc(DisasContext *s1, int ot, int d, int c)
1270 gen_op_mov_TN_reg(ot, 0, d);
1272 gen_op_ld_T0_A0(ot + s1->mem_index);
1273 if (s1->cc_op != CC_OP_DYNAMIC)
1274 gen_op_set_cc_op(s1->cc_op);
1276 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1277 s1->cc_op = CC_OP_INCB + ot;
1279 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1280 s1->cc_op = CC_OP_DECB + ot;
1283 gen_op_mov_reg_T0(ot, d);
1285 gen_op_st_T0_A0(ot + s1->mem_index);
1286 gen_compute_eflags_c(cpu_cc_src);
1287 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1290 /* XXX: add faster immediate case */
1291 static void gen_shift_rm_T1(DisasContext *s, int ot, int op1,
1292 int is_right, int is_arith)
1304 gen_op_ld_T0_A0(ot + s->mem_index);
1306 gen_op_mov_TN_reg(ot, 0, op1);
1308 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1310 tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1314 gen_exts(ot, cpu_T[0]);
1315 tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1316 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1318 gen_extu(ot, cpu_T[0]);
1319 tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1320 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1323 tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1324 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1329 gen_op_st_T0_A0(ot + s->mem_index);
1331 gen_op_mov_reg_T0(ot, op1);
1333 /* update eflags if non zero shift */
1334 if (s->cc_op != CC_OP_DYNAMIC)
1335 gen_op_set_cc_op(s->cc_op);
1337 shift_label = gen_new_label();
1338 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), shift_label);
1340 tcg_gen_mov_tl(cpu_cc_src, cpu_T3);
1341 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1343 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1345 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1347 gen_set_label(shift_label);
1348 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1351 static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1354 tcg_gen_shli_tl(ret, arg1, arg2);
1356 tcg_gen_shri_tl(ret, arg1, -arg2);
1359 /* XXX: add faster immediate case */
1360 static void gen_rot_rm_T1(DisasContext *s, int ot, int op1,
1364 int label1, label2, data_bits;
1373 gen_op_ld_T0_A0(ot + s->mem_index);
1375 gen_op_mov_TN_reg(ot, 0, op1);
1377 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1379 /* Must test zero case to avoid using undefined behaviour in TCG
1381 label1 = gen_new_label();
1382 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), label1);
1385 tcg_gen_andi_tl(cpu_tmp0, cpu_T[1], (1 << (3 + ot)) - 1);
1387 tcg_gen_mov_tl(cpu_tmp0, cpu_T[1]);
1389 gen_extu(ot, cpu_T[0]);
1390 tcg_gen_mov_tl(cpu_T3, cpu_T[0]);
1392 data_bits = 8 << ot;
1393 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
1394 fix TCG definition) */
1396 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp0);
1397 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1398 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1400 tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp0);
1401 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1402 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1404 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1406 gen_set_label(label1);
1409 gen_op_st_T0_A0(ot + s->mem_index);
1411 gen_op_mov_reg_T0(ot, op1);
1414 if (s->cc_op != CC_OP_DYNAMIC)
1415 gen_op_set_cc_op(s->cc_op);
1417 label2 = gen_new_label();
1418 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), label2);
1420 gen_compute_eflags(cpu_cc_src);
1421 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
1422 tcg_gen_xor_tl(cpu_tmp0, cpu_T3, cpu_T[0]);
1423 tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
1424 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
1425 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
1427 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], data_bits - 1);
1429 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_C);
1430 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
1432 tcg_gen_discard_tl(cpu_cc_dst);
1433 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1435 gen_set_label(label2);
1436 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1439 static void *helper_rotc[8] = {
1443 X86_64_ONLY(helper_rclq),
1447 X86_64_ONLY(helper_rcrq),
1450 /* XXX: add faster immediate = 1 case */
1451 static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1,
1456 if (s->cc_op != CC_OP_DYNAMIC)
1457 gen_op_set_cc_op(s->cc_op);
1461 gen_op_ld_T0_A0(ot + s->mem_index);
1463 gen_op_mov_TN_reg(ot, 0, op1);
1465 tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
1466 cpu_T[0], cpu_T[0], cpu_T[1]);
1469 gen_op_st_T0_A0(ot + s->mem_index);
1471 gen_op_mov_reg_T0(ot, op1);
1474 label1 = gen_new_label();
1475 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(-1), label1);
1477 tcg_gen_mov_tl(cpu_cc_src, cpu_T3);
1478 tcg_gen_discard_tl(cpu_cc_dst);
1479 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1481 gen_set_label(label1);
1482 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1485 /* XXX: add faster immediate case */
1486 static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1,
1489 int label1, label2, data_bits;
1499 gen_op_ld_T0_A0(ot + s->mem_index);
1501 gen_op_mov_TN_reg(ot, 0, op1);
1503 tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
1504 /* Must test zero case to avoid using undefined behaviour in TCG
1506 label1 = gen_new_label();
1507 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label1);
1509 tcg_gen_addi_tl(cpu_tmp5, cpu_T3, -1);
1510 if (ot == OT_WORD) {
1511 /* Note: we implement the Intel behaviour for shift count > 16 */
1513 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
1514 tcg_gen_shli_tl(cpu_tmp0, cpu_T[1], 16);
1515 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1516 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
1518 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1520 /* only needed if count > 16, but a test would complicate */
1521 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), cpu_T3);
1522 tcg_gen_shl_tl(cpu_tmp0, cpu_T[0], cpu_tmp5);
1524 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T3);
1526 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1528 /* XXX: not optimal */
1529 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
1530 tcg_gen_shli_tl(cpu_T[1], cpu_T[1], 16);
1531 tcg_gen_or_tl(cpu_T[1], cpu_T[1], cpu_T[0]);
1532 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
1534 tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1535 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
1536 tcg_gen_shr_tl(cpu_tmp6, cpu_T[1], cpu_tmp0);
1537 tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
1539 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T3);
1540 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), cpu_T3);
1541 tcg_gen_shr_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1542 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1545 data_bits = 8 << ot;
1548 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
1550 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1552 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T3);
1553 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), cpu_T3);
1554 tcg_gen_shl_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1555 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1559 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
1561 tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1563 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T3);
1564 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), cpu_T3);
1565 tcg_gen_shr_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1566 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1569 tcg_gen_mov_tl(cpu_T[1], cpu_tmp4);
1571 gen_set_label(label1);
1574 gen_op_st_T0_A0(ot + s->mem_index);
1576 gen_op_mov_reg_T0(ot, op1);
1579 if (s->cc_op != CC_OP_DYNAMIC)
1580 gen_op_set_cc_op(s->cc_op);
1582 label2 = gen_new_label();
1583 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label2);
1585 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1586 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1588 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1590 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1592 gen_set_label(label2);
1593 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1596 static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1599 gen_op_mov_TN_reg(ot, 1, s);
1602 gen_rot_rm_T1(s1, ot, d, 0);
1605 gen_rot_rm_T1(s1, ot, d, 1);
1609 gen_shift_rm_T1(s1, ot, d, 0, 0);
1612 gen_shift_rm_T1(s1, ot, d, 1, 0);
1615 gen_shift_rm_T1(s1, ot, d, 1, 1);
1618 gen_rotc_rm_T1(s1, ot, d, 0);
1621 gen_rotc_rm_T1(s1, ot, d, 1);
1626 static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1628 /* currently not optimized */
1629 gen_op_movl_T1_im(c);
1630 gen_shift(s1, op, ot, d, OR_TMP1);
1633 static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1641 int mod, rm, code, override, must_add_seg;
1643 override = s->override;
1644 must_add_seg = s->addseg;
1647 mod = (modrm >> 6) & 3;
1659 code = ldub_code(s->pc++);
1660 scale = (code >> 6) & 3;
1661 index = ((code >> 3) & 7) | REX_X(s);
1668 if ((base & 7) == 5) {
1670 disp = (int32_t)ldl_code(s->pc);
1672 if (CODE64(s) && !havesib) {
1673 disp += s->pc + s->rip_offset;
1680 disp = (int8_t)ldub_code(s->pc++);
1684 disp = ldl_code(s->pc);
1690 /* for correct popl handling with esp */
1691 if (base == 4 && s->popl_esp_hack)
1692 disp += s->popl_esp_hack;
1693 #ifdef TARGET_X86_64
1694 if (s->aflag == 2) {
1695 gen_op_movq_A0_reg(base);
1697 gen_op_addq_A0_im(disp);
1702 gen_op_movl_A0_reg(base);
1704 gen_op_addl_A0_im(disp);
1707 #ifdef TARGET_X86_64
1708 if (s->aflag == 2) {
1709 gen_op_movq_A0_im(disp);
1713 gen_op_movl_A0_im(disp);
1716 /* XXX: index == 4 is always invalid */
1717 if (havesib && (index != 4 || scale != 0)) {
1718 #ifdef TARGET_X86_64
1719 if (s->aflag == 2) {
1720 gen_op_addq_A0_reg_sN(scale, index);
1724 gen_op_addl_A0_reg_sN(scale, index);
1729 if (base == R_EBP || base == R_ESP)
1734 #ifdef TARGET_X86_64
1735 if (s->aflag == 2) {
1736 gen_op_addq_A0_seg(override);
1740 gen_op_addl_A0_seg(override);
1747 disp = lduw_code(s->pc);
1749 gen_op_movl_A0_im(disp);
1750 rm = 0; /* avoid SS override */
1757 disp = (int8_t)ldub_code(s->pc++);
1761 disp = lduw_code(s->pc);
1767 gen_op_movl_A0_reg(R_EBX);
1768 gen_op_addl_A0_reg_sN(0, R_ESI);
1771 gen_op_movl_A0_reg(R_EBX);
1772 gen_op_addl_A0_reg_sN(0, R_EDI);
1775 gen_op_movl_A0_reg(R_EBP);
1776 gen_op_addl_A0_reg_sN(0, R_ESI);
1779 gen_op_movl_A0_reg(R_EBP);
1780 gen_op_addl_A0_reg_sN(0, R_EDI);
1783 gen_op_movl_A0_reg(R_ESI);
1786 gen_op_movl_A0_reg(R_EDI);
1789 gen_op_movl_A0_reg(R_EBP);
1793 gen_op_movl_A0_reg(R_EBX);
1797 gen_op_addl_A0_im(disp);
1798 gen_op_andl_A0_ffff();
1802 if (rm == 2 || rm == 3 || rm == 6)
1807 gen_op_addl_A0_seg(override);
1817 static void gen_nop_modrm(DisasContext *s, int modrm)
1819 int mod, rm, base, code;
1821 mod = (modrm >> 6) & 3;
1831 code = ldub_code(s->pc++);
1867 /* used for LEA and MOV AX, mem */
1868 static void gen_add_A0_ds_seg(DisasContext *s)
1870 int override, must_add_seg;
1871 must_add_seg = s->addseg;
1873 if (s->override >= 0) {
1874 override = s->override;
1880 #ifdef TARGET_X86_64
1882 gen_op_addq_A0_seg(override);
1886 gen_op_addl_A0_seg(override);
1891 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1893 static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1895 int mod, rm, opreg, disp;
1897 mod = (modrm >> 6) & 3;
1898 rm = (modrm & 7) | REX_B(s);
1902 gen_op_mov_TN_reg(ot, 0, reg);
1903 gen_op_mov_reg_T0(ot, rm);
1905 gen_op_mov_TN_reg(ot, 0, rm);
1907 gen_op_mov_reg_T0(ot, reg);
1910 gen_lea_modrm(s, modrm, &opreg, &disp);
1913 gen_op_mov_TN_reg(ot, 0, reg);
1914 gen_op_st_T0_A0(ot + s->mem_index);
1916 gen_op_ld_T0_A0(ot + s->mem_index);
1918 gen_op_mov_reg_T0(ot, reg);
1923 static inline uint32_t insn_get(DisasContext *s, int ot)
1929 ret = ldub_code(s->pc);
1933 ret = lduw_code(s->pc);
1938 ret = ldl_code(s->pc);
1945 static inline int insn_const_size(unsigned int ot)
1953 static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
1955 TranslationBlock *tb;
1958 pc = s->cs_base + eip;
1960 /* NOTE: we handle the case where the TB spans two pages here */
1961 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
1962 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
1963 /* jump to same page: we can use a direct jump */
1964 tcg_gen_goto_tb(tb_num);
1966 tcg_gen_exit_tb((long)tb + tb_num);
1968 /* jump to another page: currently not optimized */
1974 static inline void gen_jcc(DisasContext *s, int b,
1975 target_ulong val, target_ulong next_eip)
1977 TranslationBlock *tb;
1984 jcc_op = (b >> 1) & 7;
1988 /* we optimize the cmp/jcc case */
1993 func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1996 /* some jumps are easy to compute */
2038 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2041 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2053 if (s->cc_op != CC_OP_DYNAMIC) {
2054 gen_op_set_cc_op(s->cc_op);
2055 s->cc_op = CC_OP_DYNAMIC;
2059 gen_setcc_slow[jcc_op]();
2060 func = gen_op_jnz_T0_label;
2070 l1 = gen_new_label();
2073 gen_goto_tb(s, 0, next_eip);
2076 gen_goto_tb(s, 1, val);
2081 if (s->cc_op != CC_OP_DYNAMIC) {
2082 gen_op_set_cc_op(s->cc_op);
2083 s->cc_op = CC_OP_DYNAMIC;
2085 gen_setcc_slow[jcc_op]();
2091 l1 = gen_new_label();
2092 l2 = gen_new_label();
2093 gen_op_jnz_T0_label(l1);
2094 gen_jmp_im(next_eip);
2095 gen_op_jmp_label(l2);
2103 static void gen_setcc(DisasContext *s, int b)
2109 jcc_op = (b >> 1) & 7;
2111 /* we optimize the cmp/jcc case */
2116 func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
2121 /* some jumps are easy to compute */
2148 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2151 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2159 if (s->cc_op != CC_OP_DYNAMIC)
2160 gen_op_set_cc_op(s->cc_op);
2161 func = gen_setcc_slow[jcc_op];
2170 static inline void gen_op_movl_T0_seg(int seg_reg)
2172 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2173 offsetof(CPUX86State,segs[seg_reg].selector));
2176 static inline void gen_op_movl_seg_T0_vm(int seg_reg)
2178 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
2179 tcg_gen_st32_tl(cpu_T[0], cpu_env,
2180 offsetof(CPUX86State,segs[seg_reg].selector));
2181 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], 4);
2182 tcg_gen_st_tl(cpu_T[0], cpu_env,
2183 offsetof(CPUX86State,segs[seg_reg].base));
2186 /* move T0 to seg_reg and compute if the CPU state may change. Never
2187 call this function with seg_reg == R_CS */
2188 static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2190 if (s->pe && !s->vm86) {
2191 /* XXX: optimize by finding processor state dynamically */
2192 if (s->cc_op != CC_OP_DYNAMIC)
2193 gen_op_set_cc_op(s->cc_op);
2194 gen_jmp_im(cur_eip);
2195 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2196 tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2197 /* abort translation because the addseg value may change or
2198 because ss32 may change. For R_SS, translation must always
2199 stop as a special handling must be done to disable hardware
2200 interrupts for the next instruction */
2201 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2204 gen_op_movl_seg_T0_vm(seg_reg);
2205 if (seg_reg == R_SS)
2210 static inline int svm_is_rep(int prefixes)
2212 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2216 gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2217 uint32_t type, uint64_t param)
2219 if(!(s->flags & (INTERCEPT_SVM_MASK)))
2220 /* no SVM activated */
2223 /* CRx and DRx reads/writes */
2224 case SVM_EXIT_READ_CR0 ... SVM_EXIT_EXCP_BASE - 1:
2225 if (s->cc_op != CC_OP_DYNAMIC) {
2226 gen_op_set_cc_op(s->cc_op);
2228 gen_jmp_im(pc_start - s->cs_base);
2229 tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2230 tcg_const_i32(type), tcg_const_i64(param));
2231 /* this is a special case as we do not know if the interception occurs
2232 so we assume there was none */
2235 if(s->flags & (1ULL << INTERCEPT_MSR_PROT)) {
2236 if (s->cc_op != CC_OP_DYNAMIC) {
2237 gen_op_set_cc_op(s->cc_op);
2239 gen_jmp_im(pc_start - s->cs_base);
2240 tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2241 tcg_const_i32(type), tcg_const_i64(param));
2242 /* this is a special case as we do not know if the interception occurs
2243 so we assume there was none */
2248 if(s->flags & (1ULL << ((type - SVM_EXIT_INTR) + INTERCEPT_INTR))) {
2249 if (s->cc_op != CC_OP_DYNAMIC) {
2250 gen_op_set_cc_op(s->cc_op);
2252 gen_jmp_im(pc_start - s->cs_base);
2253 tcg_gen_helper_0_2(helper_vmexit,
2254 tcg_const_i32(type), tcg_const_i64(param));
2255 /* we can optimize this one so TBs don't get longer
2256 than up to vmexit */
2265 gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2267 return gen_svm_check_intercept_param(s, pc_start, type, 0);
2270 static inline void gen_stack_update(DisasContext *s, int addend)
2272 #ifdef TARGET_X86_64
2274 gen_op_add_reg_im(2, R_ESP, addend);
2278 gen_op_add_reg_im(1, R_ESP, addend);
2280 gen_op_add_reg_im(0, R_ESP, addend);
2284 /* generate a push. It depends on ss32, addseg and dflag */
2285 static void gen_push_T0(DisasContext *s)
2287 #ifdef TARGET_X86_64
2289 gen_op_movq_A0_reg(R_ESP);
2291 gen_op_addq_A0_im(-8);
2292 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2294 gen_op_addq_A0_im(-2);
2295 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2297 gen_op_mov_reg_A0(2, R_ESP);
2301 gen_op_movl_A0_reg(R_ESP);
2303 gen_op_addl_A0_im(-2);
2305 gen_op_addl_A0_im(-4);
2308 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2309 gen_op_addl_A0_seg(R_SS);
2312 gen_op_andl_A0_ffff();
2313 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2314 gen_op_addl_A0_seg(R_SS);
2316 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2317 if (s->ss32 && !s->addseg)
2318 gen_op_mov_reg_A0(1, R_ESP);
2320 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2324 /* generate a push. It depends on ss32, addseg and dflag */
2325 /* slower version for T1, only used for call Ev */
2326 static void gen_push_T1(DisasContext *s)
2328 #ifdef TARGET_X86_64
2330 gen_op_movq_A0_reg(R_ESP);
2332 gen_op_addq_A0_im(-8);
2333 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2335 gen_op_addq_A0_im(-2);
2336 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2338 gen_op_mov_reg_A0(2, R_ESP);
2342 gen_op_movl_A0_reg(R_ESP);
2344 gen_op_addl_A0_im(-2);
2346 gen_op_addl_A0_im(-4);
2349 gen_op_addl_A0_seg(R_SS);
2352 gen_op_andl_A0_ffff();
2353 gen_op_addl_A0_seg(R_SS);
2355 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2357 if (s->ss32 && !s->addseg)
2358 gen_op_mov_reg_A0(1, R_ESP);
2360 gen_stack_update(s, (-2) << s->dflag);
2364 /* two step pop is necessary for precise exceptions */
2365 static void gen_pop_T0(DisasContext *s)
2367 #ifdef TARGET_X86_64
2369 gen_op_movq_A0_reg(R_ESP);
2370 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2374 gen_op_movl_A0_reg(R_ESP);
2377 gen_op_addl_A0_seg(R_SS);
2379 gen_op_andl_A0_ffff();
2380 gen_op_addl_A0_seg(R_SS);
2382 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2386 static void gen_pop_update(DisasContext *s)
2388 #ifdef TARGET_X86_64
2389 if (CODE64(s) && s->dflag) {
2390 gen_stack_update(s, 8);
2394 gen_stack_update(s, 2 << s->dflag);
2398 static void gen_stack_A0(DisasContext *s)
2400 gen_op_movl_A0_reg(R_ESP);
2402 gen_op_andl_A0_ffff();
2403 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2405 gen_op_addl_A0_seg(R_SS);
2408 /* NOTE: wrap around in 16 bit not fully handled */
2409 static void gen_pusha(DisasContext *s)
2412 gen_op_movl_A0_reg(R_ESP);
2413 gen_op_addl_A0_im(-16 << s->dflag);
2415 gen_op_andl_A0_ffff();
2416 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2418 gen_op_addl_A0_seg(R_SS);
2419 for(i = 0;i < 8; i++) {
2420 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2421 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2422 gen_op_addl_A0_im(2 << s->dflag);
2424 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2427 /* NOTE: wrap around in 16 bit not fully handled */
2428 static void gen_popa(DisasContext *s)
2431 gen_op_movl_A0_reg(R_ESP);
2433 gen_op_andl_A0_ffff();
2434 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2435 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 << s->dflag);
2437 gen_op_addl_A0_seg(R_SS);
2438 for(i = 0;i < 8; i++) {
2439 /* ESP is not reloaded */
2441 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2442 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2444 gen_op_addl_A0_im(2 << s->dflag);
2446 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2449 static void gen_enter(DisasContext *s, int esp_addend, int level)
2454 #ifdef TARGET_X86_64
2456 ot = s->dflag ? OT_QUAD : OT_WORD;
2459 gen_op_movl_A0_reg(R_ESP);
2460 gen_op_addq_A0_im(-opsize);
2461 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2464 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2465 gen_op_st_T0_A0(ot + s->mem_index);
2467 /* XXX: must save state */
2468 tcg_gen_helper_0_3(helper_enter64_level,
2469 tcg_const_i32(level),
2470 tcg_const_i32((ot == OT_QUAD)),
2473 gen_op_mov_reg_T1(ot, R_EBP);
2474 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2475 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2479 ot = s->dflag + OT_WORD;
2480 opsize = 2 << s->dflag;
2482 gen_op_movl_A0_reg(R_ESP);
2483 gen_op_addl_A0_im(-opsize);
2485 gen_op_andl_A0_ffff();
2486 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2488 gen_op_addl_A0_seg(R_SS);
2490 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2491 gen_op_st_T0_A0(ot + s->mem_index);
2493 /* XXX: must save state */
2494 tcg_gen_helper_0_3(helper_enter_level,
2495 tcg_const_i32(level),
2496 tcg_const_i32(s->dflag),
2499 gen_op_mov_reg_T1(ot, R_EBP);
2500 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2501 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2505 static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2507 if (s->cc_op != CC_OP_DYNAMIC)
2508 gen_op_set_cc_op(s->cc_op);
2509 gen_jmp_im(cur_eip);
2510 tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
2514 /* an interrupt is different from an exception because of the
2516 static void gen_interrupt(DisasContext *s, int intno,
2517 target_ulong cur_eip, target_ulong next_eip)
2519 if (s->cc_op != CC_OP_DYNAMIC)
2520 gen_op_set_cc_op(s->cc_op);
2521 gen_jmp_im(cur_eip);
2522 tcg_gen_helper_0_2(helper_raise_interrupt,
2523 tcg_const_i32(intno),
2524 tcg_const_i32(next_eip - cur_eip));
2528 static void gen_debug(DisasContext *s, target_ulong cur_eip)
2530 if (s->cc_op != CC_OP_DYNAMIC)
2531 gen_op_set_cc_op(s->cc_op);
2532 gen_jmp_im(cur_eip);
2533 tcg_gen_helper_0_0(helper_debug);
2537 /* generate a generic end of block. Trace exception is also generated
2539 static void gen_eob(DisasContext *s)
2541 if (s->cc_op != CC_OP_DYNAMIC)
2542 gen_op_set_cc_op(s->cc_op);
2543 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2544 tcg_gen_helper_0_0(helper_reset_inhibit_irq);
2546 if (s->singlestep_enabled) {
2547 tcg_gen_helper_0_0(helper_debug);
2549 tcg_gen_helper_0_0(helper_single_step);
2556 /* generate a jump to eip. No segment change must happen before as a
2557 direct call to the next block may occur */
2558 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2561 if (s->cc_op != CC_OP_DYNAMIC) {
2562 gen_op_set_cc_op(s->cc_op);
2563 s->cc_op = CC_OP_DYNAMIC;
2565 gen_goto_tb(s, tb_num, eip);
2573 static void gen_jmp(DisasContext *s, target_ulong eip)
2575 gen_jmp_tb(s, eip, 0);
2578 static inline void gen_ldq_env_A0(int idx, int offset)
2580 int mem_index = (idx >> 2) - 1;
2581 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2582 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
2585 static inline void gen_stq_env_A0(int idx, int offset)
2587 int mem_index = (idx >> 2) - 1;
2588 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
2589 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2592 static inline void gen_ldo_env_A0(int idx, int offset)
2594 int mem_index = (idx >> 2) - 1;
2595 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2596 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2597 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2598 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2599 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2602 static inline void gen_sto_env_A0(int idx, int offset)
2604 int mem_index = (idx >> 2) - 1;
2605 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2606 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2607 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2608 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2609 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2612 static inline void gen_op_movo(int d_offset, int s_offset)
2614 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2615 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2616 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
2617 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
2620 static inline void gen_op_movq(int d_offset, int s_offset)
2622 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2623 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2626 static inline void gen_op_movl(int d_offset, int s_offset)
2628 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
2629 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
2632 static inline void gen_op_movq_env_0(int d_offset)
2634 tcg_gen_movi_i64(cpu_tmp1_i64, 0);
2635 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2638 #define SSE_SPECIAL ((void *)1)
2639 #define SSE_DUMMY ((void *)2)
2641 #define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
2642 #define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
2643 helper_ ## x ## ss, helper_ ## x ## sd, }
2645 static void *sse_op_table1[256][4] = {
2646 /* 3DNow! extensions */
2647 [0x0e] = { SSE_DUMMY }, /* femms */
2648 [0x0f] = { SSE_DUMMY }, /* pf... */
2649 /* pure SSE operations */
2650 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2651 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2652 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2653 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2654 [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
2655 [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
2656 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2657 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2659 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2660 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2661 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2662 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
2663 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2664 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2665 [0x2e] = { helper_ucomiss, helper_ucomisd },
2666 [0x2f] = { helper_comiss, helper_comisd },
2667 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2668 [0x51] = SSE_FOP(sqrt),
2669 [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
2670 [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
2671 [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
2672 [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
2673 [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
2674 [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
2675 [0x58] = SSE_FOP(add),
2676 [0x59] = SSE_FOP(mul),
2677 [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
2678 helper_cvtss2sd, helper_cvtsd2ss },
2679 [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
2680 [0x5c] = SSE_FOP(sub),
2681 [0x5d] = SSE_FOP(min),
2682 [0x5e] = SSE_FOP(div),
2683 [0x5f] = SSE_FOP(max),
2685 [0xc2] = SSE_FOP(cmpeq),
2686 [0xc6] = { helper_shufps, helper_shufpd },
2688 /* MMX ops and their SSE extensions */
2689 [0x60] = MMX_OP2(punpcklbw),
2690 [0x61] = MMX_OP2(punpcklwd),
2691 [0x62] = MMX_OP2(punpckldq),
2692 [0x63] = MMX_OP2(packsswb),
2693 [0x64] = MMX_OP2(pcmpgtb),
2694 [0x65] = MMX_OP2(pcmpgtw),
2695 [0x66] = MMX_OP2(pcmpgtl),
2696 [0x67] = MMX_OP2(packuswb),
2697 [0x68] = MMX_OP2(punpckhbw),
2698 [0x69] = MMX_OP2(punpckhwd),
2699 [0x6a] = MMX_OP2(punpckhdq),
2700 [0x6b] = MMX_OP2(packssdw),
2701 [0x6c] = { NULL, helper_punpcklqdq_xmm },
2702 [0x6d] = { NULL, helper_punpckhqdq_xmm },
2703 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2704 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2705 [0x70] = { helper_pshufw_mmx,
2708 helper_pshuflw_xmm },
2709 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2710 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2711 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2712 [0x74] = MMX_OP2(pcmpeqb),
2713 [0x75] = MMX_OP2(pcmpeqw),
2714 [0x76] = MMX_OP2(pcmpeql),
2715 [0x77] = { SSE_DUMMY }, /* emms */
2716 [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
2717 [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
2718 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2719 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2720 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2721 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2722 [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
2723 [0xd1] = MMX_OP2(psrlw),
2724 [0xd2] = MMX_OP2(psrld),
2725 [0xd3] = MMX_OP2(psrlq),
2726 [0xd4] = MMX_OP2(paddq),
2727 [0xd5] = MMX_OP2(pmullw),
2728 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2729 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2730 [0xd8] = MMX_OP2(psubusb),
2731 [0xd9] = MMX_OP2(psubusw),
2732 [0xda] = MMX_OP2(pminub),
2733 [0xdb] = MMX_OP2(pand),
2734 [0xdc] = MMX_OP2(paddusb),
2735 [0xdd] = MMX_OP2(paddusw),
2736 [0xde] = MMX_OP2(pmaxub),
2737 [0xdf] = MMX_OP2(pandn),
2738 [0xe0] = MMX_OP2(pavgb),
2739 [0xe1] = MMX_OP2(psraw),
2740 [0xe2] = MMX_OP2(psrad),
2741 [0xe3] = MMX_OP2(pavgw),
2742 [0xe4] = MMX_OP2(pmulhuw),
2743 [0xe5] = MMX_OP2(pmulhw),
2744 [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
2745 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2746 [0xe8] = MMX_OP2(psubsb),
2747 [0xe9] = MMX_OP2(psubsw),
2748 [0xea] = MMX_OP2(pminsw),
2749 [0xeb] = MMX_OP2(por),
2750 [0xec] = MMX_OP2(paddsb),
2751 [0xed] = MMX_OP2(paddsw),
2752 [0xee] = MMX_OP2(pmaxsw),
2753 [0xef] = MMX_OP2(pxor),
2754 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2755 [0xf1] = MMX_OP2(psllw),
2756 [0xf2] = MMX_OP2(pslld),
2757 [0xf3] = MMX_OP2(psllq),
2758 [0xf4] = MMX_OP2(pmuludq),
2759 [0xf5] = MMX_OP2(pmaddwd),
2760 [0xf6] = MMX_OP2(psadbw),
2761 [0xf7] = MMX_OP2(maskmov),
2762 [0xf8] = MMX_OP2(psubb),
2763 [0xf9] = MMX_OP2(psubw),
2764 [0xfa] = MMX_OP2(psubl),
2765 [0xfb] = MMX_OP2(psubq),
2766 [0xfc] = MMX_OP2(paddb),
2767 [0xfd] = MMX_OP2(paddw),
2768 [0xfe] = MMX_OP2(paddl),
2771 static void *sse_op_table2[3 * 8][2] = {
2772 [0 + 2] = MMX_OP2(psrlw),
2773 [0 + 4] = MMX_OP2(psraw),
2774 [0 + 6] = MMX_OP2(psllw),
2775 [8 + 2] = MMX_OP2(psrld),
2776 [8 + 4] = MMX_OP2(psrad),
2777 [8 + 6] = MMX_OP2(pslld),
2778 [16 + 2] = MMX_OP2(psrlq),
2779 [16 + 3] = { NULL, helper_psrldq_xmm },
2780 [16 + 6] = MMX_OP2(psllq),
2781 [16 + 7] = { NULL, helper_pslldq_xmm },
2784 static void *sse_op_table3[4 * 3] = {
2787 X86_64_ONLY(helper_cvtsq2ss),
2788 X86_64_ONLY(helper_cvtsq2sd),
2792 X86_64_ONLY(helper_cvttss2sq),
2793 X86_64_ONLY(helper_cvttsd2sq),
2797 X86_64_ONLY(helper_cvtss2sq),
2798 X86_64_ONLY(helper_cvtsd2sq),
2801 static void *sse_op_table4[8][4] = {
2812 static void *sse_op_table5[256] = {
2813 [0x0c] = helper_pi2fw,
2814 [0x0d] = helper_pi2fd,
2815 [0x1c] = helper_pf2iw,
2816 [0x1d] = helper_pf2id,
2817 [0x8a] = helper_pfnacc,
2818 [0x8e] = helper_pfpnacc,
2819 [0x90] = helper_pfcmpge,
2820 [0x94] = helper_pfmin,
2821 [0x96] = helper_pfrcp,
2822 [0x97] = helper_pfrsqrt,
2823 [0x9a] = helper_pfsub,
2824 [0x9e] = helper_pfadd,
2825 [0xa0] = helper_pfcmpgt,
2826 [0xa4] = helper_pfmax,
2827 [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
2828 [0xa7] = helper_movq, /* pfrsqit1 */
2829 [0xaa] = helper_pfsubr,
2830 [0xae] = helper_pfacc,
2831 [0xb0] = helper_pfcmpeq,
2832 [0xb4] = helper_pfmul,
2833 [0xb6] = helper_movq, /* pfrcpit2 */
2834 [0xb7] = helper_pmulhrw_mmx,
2835 [0xbb] = helper_pswapd,
2836 [0xbf] = helper_pavgb_mmx /* pavgusb */
2839 static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2841 int b1, op1_offset, op2_offset, is_xmm, val, ot;
2842 int modrm, mod, rm, reg, reg_addr, offset_addr;
2846 if (s->prefix & PREFIX_DATA)
2848 else if (s->prefix & PREFIX_REPZ)
2850 else if (s->prefix & PREFIX_REPNZ)
2854 sse_op2 = sse_op_table1[b][b1];
2857 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
2867 /* simple MMX/SSE operation */
2868 if (s->flags & HF_TS_MASK) {
2869 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2872 if (s->flags & HF_EM_MASK) {
2874 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2877 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2880 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
2883 tcg_gen_helper_0_0(helper_emms);
2888 tcg_gen_helper_0_0(helper_emms);
2891 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2892 the static cpu state) */
2894 tcg_gen_helper_0_0(helper_enter_mmx);
2897 modrm = ldub_code(s->pc++);
2898 reg = ((modrm >> 3) & 7);
2901 mod = (modrm >> 6) & 3;
2902 if (sse_op2 == SSE_SPECIAL) {
2905 case 0x0e7: /* movntq */
2908 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2909 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2911 case 0x1e7: /* movntdq */
2912 case 0x02b: /* movntps */
2913 case 0x12b: /* movntps */
2914 case 0x3f0: /* lddqu */
2917 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2918 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2920 case 0x6e: /* movd mm, ea */
2921 #ifdef TARGET_X86_64
2922 if (s->dflag == 2) {
2923 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2924 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
2928 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2929 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
2930 offsetof(CPUX86State,fpregs[reg].mmx));
2931 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
2934 case 0x16e: /* movd xmm, ea */
2935 #ifdef TARGET_X86_64
2936 if (s->dflag == 2) {
2937 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2938 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
2939 offsetof(CPUX86State,xmm_regs[reg]));
2940 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
2944 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2945 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
2946 offsetof(CPUX86State,xmm_regs[reg]));
2947 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2948 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
2951 case 0x6f: /* movq mm, ea */
2953 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2954 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2957 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
2958 offsetof(CPUX86State,fpregs[rm].mmx));
2959 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
2960 offsetof(CPUX86State,fpregs[reg].mmx));
2963 case 0x010: /* movups */
2964 case 0x110: /* movupd */
2965 case 0x028: /* movaps */
2966 case 0x128: /* movapd */
2967 case 0x16f: /* movdqa xmm, ea */
2968 case 0x26f: /* movdqu xmm, ea */
2970 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2971 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2973 rm = (modrm & 7) | REX_B(s);
2974 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2975 offsetof(CPUX86State,xmm_regs[rm]));
2978 case 0x210: /* movss xmm, ea */
2980 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2981 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
2982 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2984 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2985 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2986 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2988 rm = (modrm & 7) | REX_B(s);
2989 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2990 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2993 case 0x310: /* movsd xmm, ea */
2995 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2996 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2998 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2999 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3001 rm = (modrm & 7) | REX_B(s);
3002 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3003 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3006 case 0x012: /* movlps */
3007 case 0x112: /* movlpd */
3009 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3010 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3013 rm = (modrm & 7) | REX_B(s);
3014 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3015 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3018 case 0x212: /* movsldup */
3020 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3021 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3023 rm = (modrm & 7) | REX_B(s);
3024 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3025 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3026 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3027 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3029 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3030 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3031 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3032 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3034 case 0x312: /* movddup */
3036 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3037 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3039 rm = (modrm & 7) | REX_B(s);
3040 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3041 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3043 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3044 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3046 case 0x016: /* movhps */
3047 case 0x116: /* movhpd */
3049 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3050 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3053 rm = (modrm & 7) | REX_B(s);
3054 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3055 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3058 case 0x216: /* movshdup */
3060 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3061 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3063 rm = (modrm & 7) | REX_B(s);
3064 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3065 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3066 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3067 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3069 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3070 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3071 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3072 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3074 case 0x7e: /* movd ea, mm */
3075 #ifdef TARGET_X86_64
3076 if (s->dflag == 2) {
3077 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3078 offsetof(CPUX86State,fpregs[reg].mmx));
3079 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3083 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3084 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3085 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3088 case 0x17e: /* movd ea, xmm */
3089 #ifdef TARGET_X86_64
3090 if (s->dflag == 2) {
3091 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3092 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3093 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3097 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3098 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3099 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3102 case 0x27e: /* movq xmm, ea */
3104 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3105 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3107 rm = (modrm & 7) | REX_B(s);
3108 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3109 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3111 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3113 case 0x7f: /* movq ea, mm */
3115 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3116 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3119 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3120 offsetof(CPUX86State,fpregs[reg].mmx));
3123 case 0x011: /* movups */
3124 case 0x111: /* movupd */
3125 case 0x029: /* movaps */
3126 case 0x129: /* movapd */
3127 case 0x17f: /* movdqa ea, xmm */
3128 case 0x27f: /* movdqu ea, xmm */
3130 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3131 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3133 rm = (modrm & 7) | REX_B(s);
3134 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3135 offsetof(CPUX86State,xmm_regs[reg]));
3138 case 0x211: /* movss ea, xmm */
3140 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3141 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3142 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3144 rm = (modrm & 7) | REX_B(s);
3145 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3146 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3149 case 0x311: /* movsd ea, xmm */
3151 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3152 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3154 rm = (modrm & 7) | REX_B(s);
3155 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3156 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3159 case 0x013: /* movlps */
3160 case 0x113: /* movlpd */
3162 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3163 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3168 case 0x017: /* movhps */
3169 case 0x117: /* movhpd */
3171 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3172 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3177 case 0x71: /* shift mm, im */
3180 case 0x171: /* shift xmm, im */
3183 val = ldub_code(s->pc++);
3185 gen_op_movl_T0_im(val);
3186 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3188 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3189 op1_offset = offsetof(CPUX86State,xmm_t0);
3191 gen_op_movl_T0_im(val);
3192 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3194 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3195 op1_offset = offsetof(CPUX86State,mmx_t0);
3197 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3201 rm = (modrm & 7) | REX_B(s);
3202 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3205 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3207 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3208 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3209 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3211 case 0x050: /* movmskps */
3212 rm = (modrm & 7) | REX_B(s);
3213 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3214 offsetof(CPUX86State,xmm_regs[rm]));
3215 tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3216 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3217 gen_op_mov_reg_T0(OT_LONG, reg);
3219 case 0x150: /* movmskpd */
3220 rm = (modrm & 7) | REX_B(s);
3221 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3222 offsetof(CPUX86State,xmm_regs[rm]));
3223 tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3224 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3225 gen_op_mov_reg_T0(OT_LONG, reg);
3227 case 0x02a: /* cvtpi2ps */
3228 case 0x12a: /* cvtpi2pd */
3229 tcg_gen_helper_0_0(helper_enter_mmx);
3231 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3232 op2_offset = offsetof(CPUX86State,mmx_t0);
3233 gen_ldq_env_A0(s->mem_index, op2_offset);
3236 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3238 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3239 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3240 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3243 tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3247 tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3251 case 0x22a: /* cvtsi2ss */
3252 case 0x32a: /* cvtsi2sd */
3253 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3254 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3255 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3256 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3257 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3258 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3259 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
3261 case 0x02c: /* cvttps2pi */
3262 case 0x12c: /* cvttpd2pi */
3263 case 0x02d: /* cvtps2pi */
3264 case 0x12d: /* cvtpd2pi */
3265 tcg_gen_helper_0_0(helper_enter_mmx);
3267 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3268 op2_offset = offsetof(CPUX86State,xmm_t0);
3269 gen_ldo_env_A0(s->mem_index, op2_offset);
3271 rm = (modrm & 7) | REX_B(s);
3272 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3274 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3275 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3276 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3279 tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3282 tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3285 tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3288 tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
3292 case 0x22c: /* cvttss2si */
3293 case 0x32c: /* cvttsd2si */
3294 case 0x22d: /* cvtss2si */
3295 case 0x32d: /* cvtsd2si */
3296 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3298 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3300 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3302 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3303 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3305 op2_offset = offsetof(CPUX86State,xmm_t0);
3307 rm = (modrm & 7) | REX_B(s);
3308 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3310 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3312 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3313 if (ot == OT_LONG) {
3314 tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
3315 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3317 tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
3319 gen_op_mov_reg_T0(ot, reg);
3321 case 0xc4: /* pinsrw */
3324 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3325 val = ldub_code(s->pc++);
3328 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3329 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
3332 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3333 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
3336 case 0xc5: /* pextrw */
3340 val = ldub_code(s->pc++);
3343 rm = (modrm & 7) | REX_B(s);
3344 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3345 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
3349 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3350 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
3352 reg = ((modrm >> 3) & 7) | rex_r;
3353 gen_op_mov_reg_T0(OT_LONG, reg);
3355 case 0x1d6: /* movq ea, xmm */
3357 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3358 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3360 rm = (modrm & 7) | REX_B(s);
3361 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3362 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3363 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3366 case 0x2d6: /* movq2dq */
3367 tcg_gen_helper_0_0(helper_enter_mmx);
3369 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3370 offsetof(CPUX86State,fpregs[rm].mmx));
3371 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3373 case 0x3d6: /* movdq2q */
3374 tcg_gen_helper_0_0(helper_enter_mmx);
3375 rm = (modrm & 7) | REX_B(s);
3376 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3377 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3379 case 0xd7: /* pmovmskb */
3384 rm = (modrm & 7) | REX_B(s);
3385 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
3386 tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
3389 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
3390 tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
3392 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3393 reg = ((modrm >> 3) & 7) | rex_r;
3394 gen_op_mov_reg_T0(OT_LONG, reg);
3400 /* generic MMX or SSE operation */
3402 case 0x70: /* pshufx insn */
3403 case 0xc6: /* pshufx insn */
3404 case 0xc2: /* compare insns */
3411 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3413 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3414 op2_offset = offsetof(CPUX86State,xmm_t0);
3415 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3417 /* specific case for SSE single instructions */
3420 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3421 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3424 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3427 gen_ldo_env_A0(s->mem_index, op2_offset);
3430 rm = (modrm & 7) | REX_B(s);
3431 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3434 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3436 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3437 op2_offset = offsetof(CPUX86State,mmx_t0);
3438 gen_ldq_env_A0(s->mem_index, op2_offset);
3441 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3445 case 0x0f: /* 3DNow! data insns */
3446 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3448 val = ldub_code(s->pc++);
3449 sse_op2 = sse_op_table5[val];
3452 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3453 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3454 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3456 case 0x70: /* pshufx insn */
3457 case 0xc6: /* pshufx insn */
3458 val = ldub_code(s->pc++);
3459 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3460 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3461 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
3465 val = ldub_code(s->pc++);
3468 sse_op2 = sse_op_table4[val][b1];
3469 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3470 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3471 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3474 /* maskmov : we must prepare A0 */
3477 #ifdef TARGET_X86_64
3478 if (s->aflag == 2) {
3479 gen_op_movq_A0_reg(R_EDI);
3483 gen_op_movl_A0_reg(R_EDI);
3485 gen_op_andl_A0_ffff();
3487 gen_add_A0_ds_seg(s);
3489 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3490 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3491 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
3494 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3495 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3496 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3499 if (b == 0x2e || b == 0x2f) {
3500 s->cc_op = CC_OP_EFLAGS;
3505 /* convert one instruction. s->is_jmp is set if the translation must
3506 be stopped. Return the next pc value */
3507 static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3509 int b, prefixes, aflag, dflag;
3511 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3512 target_ulong next_eip, tval;
3522 #ifdef TARGET_X86_64
3527 s->rip_offset = 0; /* for relative ip address */
3529 b = ldub_code(s->pc);
3531 /* check prefixes */
3532 #ifdef TARGET_X86_64
3536 prefixes |= PREFIX_REPZ;
3539 prefixes |= PREFIX_REPNZ;
3542 prefixes |= PREFIX_LOCK;
3563 prefixes |= PREFIX_DATA;
3566 prefixes |= PREFIX_ADR;
3570 rex_w = (b >> 3) & 1;
3571 rex_r = (b & 0x4) << 1;
3572 s->rex_x = (b & 0x2) << 2;
3573 REX_B(s) = (b & 0x1) << 3;
3574 x86_64_hregs = 1; /* select uniform byte register addressing */
3578 /* 0x66 is ignored if rex.w is set */
3581 if (prefixes & PREFIX_DATA)
3584 if (!(prefixes & PREFIX_ADR))
3591 prefixes |= PREFIX_REPZ;
3594 prefixes |= PREFIX_REPNZ;
3597 prefixes |= PREFIX_LOCK;
3618 prefixes |= PREFIX_DATA;
3621 prefixes |= PREFIX_ADR;
3624 if (prefixes & PREFIX_DATA)
3626 if (prefixes & PREFIX_ADR)
3630 s->prefix = prefixes;
3634 /* lock generation */
3635 if (prefixes & PREFIX_LOCK)
3636 tcg_gen_helper_0_0(helper_lock);
3638 /* now check op code */
3642 /**************************/
3643 /* extended op code */
3644 b = ldub_code(s->pc++) | 0x100;
3647 /**************************/
3665 ot = dflag + OT_WORD;
3668 case 0: /* OP Ev, Gv */
3669 modrm = ldub_code(s->pc++);
3670 reg = ((modrm >> 3) & 7) | rex_r;
3671 mod = (modrm >> 6) & 3;
3672 rm = (modrm & 7) | REX_B(s);
3674 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3676 } else if (op == OP_XORL && rm == reg) {
3678 /* xor reg, reg optimisation */
3680 s->cc_op = CC_OP_LOGICB + ot;
3681 gen_op_mov_reg_T0(ot, reg);
3682 gen_op_update1_cc();
3687 gen_op_mov_TN_reg(ot, 1, reg);
3688 gen_op(s, op, ot, opreg);
3690 case 1: /* OP Gv, Ev */
3691 modrm = ldub_code(s->pc++);
3692 mod = (modrm >> 6) & 3;
3693 reg = ((modrm >> 3) & 7) | rex_r;
3694 rm = (modrm & 7) | REX_B(s);
3696 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3697 gen_op_ld_T1_A0(ot + s->mem_index);
3698 } else if (op == OP_XORL && rm == reg) {
3701 gen_op_mov_TN_reg(ot, 1, rm);
3703 gen_op(s, op, ot, reg);
3705 case 2: /* OP A, Iv */
3706 val = insn_get(s, ot);
3707 gen_op_movl_T1_im(val);
3708 gen_op(s, op, ot, OR_EAX);
3714 case 0x80: /* GRP1 */
3724 ot = dflag + OT_WORD;
3726 modrm = ldub_code(s->pc++);
3727 mod = (modrm >> 6) & 3;
3728 rm = (modrm & 7) | REX_B(s);
3729 op = (modrm >> 3) & 7;
3735 s->rip_offset = insn_const_size(ot);
3736 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3747 val = insn_get(s, ot);
3750 val = (int8_t)insn_get(s, OT_BYTE);
3753 gen_op_movl_T1_im(val);
3754 gen_op(s, op, ot, opreg);
3758 /**************************/
3759 /* inc, dec, and other misc arith */
3760 case 0x40 ... 0x47: /* inc Gv */
3761 ot = dflag ? OT_LONG : OT_WORD;
3762 gen_inc(s, ot, OR_EAX + (b & 7), 1);
3764 case 0x48 ... 0x4f: /* dec Gv */
3765 ot = dflag ? OT_LONG : OT_WORD;
3766 gen_inc(s, ot, OR_EAX + (b & 7), -1);
3768 case 0xf6: /* GRP3 */
3773 ot = dflag + OT_WORD;
3775 modrm = ldub_code(s->pc++);
3776 mod = (modrm >> 6) & 3;
3777 rm = (modrm & 7) | REX_B(s);
3778 op = (modrm >> 3) & 7;
3781 s->rip_offset = insn_const_size(ot);
3782 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3783 gen_op_ld_T0_A0(ot + s->mem_index);
3785 gen_op_mov_TN_reg(ot, 0, rm);
3790 val = insn_get(s, ot);
3791 gen_op_movl_T1_im(val);
3792 gen_op_testl_T0_T1_cc();
3793 s->cc_op = CC_OP_LOGICB + ot;
3796 tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
3798 gen_op_st_T0_A0(ot + s->mem_index);
3800 gen_op_mov_reg_T0(ot, rm);
3804 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
3806 gen_op_st_T0_A0(ot + s->mem_index);
3808 gen_op_mov_reg_T0(ot, rm);
3810 gen_op_update_neg_cc();
3811 s->cc_op = CC_OP_SUBB + ot;
3816 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
3817 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
3818 tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
3819 /* XXX: use 32 bit mul which could be faster */
3820 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3821 gen_op_mov_reg_T0(OT_WORD, R_EAX);
3822 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3823 tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
3824 s->cc_op = CC_OP_MULB;
3827 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
3828 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
3829 tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
3830 /* XXX: use 32 bit mul which could be faster */
3831 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3832 gen_op_mov_reg_T0(OT_WORD, R_EAX);
3833 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3834 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
3835 gen_op_mov_reg_T0(OT_WORD, R_EDX);
3836 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
3837 s->cc_op = CC_OP_MULW;
3841 #ifdef TARGET_X86_64
3842 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3843 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
3844 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
3845 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3846 gen_op_mov_reg_T0(OT_LONG, R_EAX);
3847 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3848 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
3849 gen_op_mov_reg_T0(OT_LONG, R_EDX);
3850 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
3854 t0 = tcg_temp_new(TCG_TYPE_I64);
3855 t1 = tcg_temp_new(TCG_TYPE_I64);
3856 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3857 tcg_gen_extu_i32_i64(t0, cpu_T[0]);
3858 tcg_gen_extu_i32_i64(t1, cpu_T[1]);
3859 tcg_gen_mul_i64(t0, t0, t1);
3860 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
3861 gen_op_mov_reg_T0(OT_LONG, R_EAX);
3862 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3863 tcg_gen_shri_i64(t0, t0, 32);
3864 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
3865 gen_op_mov_reg_T0(OT_LONG, R_EDX);
3866 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
3869 s->cc_op = CC_OP_MULL;
3871 #ifdef TARGET_X86_64
3873 tcg_gen_helper_0_1(helper_mulq_EAX_T0, cpu_T[0]);
3874 s->cc_op = CC_OP_MULQ;
3882 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
3883 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
3884 tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
3885 /* XXX: use 32 bit mul which could be faster */
3886 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3887 gen_op_mov_reg_T0(OT_WORD, R_EAX);
3888 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3889 tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
3890 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3891 s->cc_op = CC_OP_MULB;
3894 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
3895 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
3896 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
3897 /* XXX: use 32 bit mul which could be faster */
3898 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3899 gen_op_mov_reg_T0(OT_WORD, R_EAX);
3900 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3901 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
3902 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3903 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
3904 gen_op_mov_reg_T0(OT_WORD, R_EDX);
3905 s->cc_op = CC_OP_MULW;
3909 #ifdef TARGET_X86_64
3910 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3911 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
3912 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
3913 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3914 gen_op_mov_reg_T0(OT_LONG, R_EAX);
3915 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3916 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
3917 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3918 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
3919 gen_op_mov_reg_T0(OT_LONG, R_EDX);
3923 t0 = tcg_temp_new(TCG_TYPE_I64);
3924 t1 = tcg_temp_new(TCG_TYPE_I64);
3925 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3926 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
3927 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
3928 tcg_gen_mul_i64(t0, t0, t1);
3929 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
3930 gen_op_mov_reg_T0(OT_LONG, R_EAX);
3931 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3932 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
3933 tcg_gen_shri_i64(t0, t0, 32);
3934 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
3935 gen_op_mov_reg_T0(OT_LONG, R_EDX);
3936 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3939 s->cc_op = CC_OP_MULL;
3941 #ifdef TARGET_X86_64
3943 tcg_gen_helper_0_1(helper_imulq_EAX_T0, cpu_T[0]);
3944 s->cc_op = CC_OP_MULQ;
3952 gen_jmp_im(pc_start - s->cs_base);
3953 tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
3956 gen_jmp_im(pc_start - s->cs_base);
3957 tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
3961 gen_jmp_im(pc_start - s->cs_base);
3962 tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
3964 #ifdef TARGET_X86_64
3966 gen_jmp_im(pc_start - s->cs_base);
3967 tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
3975 gen_jmp_im(pc_start - s->cs_base);
3976 tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
3979 gen_jmp_im(pc_start - s->cs_base);
3980 tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
3984 gen_jmp_im(pc_start - s->cs_base);
3985 tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
3987 #ifdef TARGET_X86_64
3989 gen_jmp_im(pc_start - s->cs_base);
3990 tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
4000 case 0xfe: /* GRP4 */
4001 case 0xff: /* GRP5 */
4005 ot = dflag + OT_WORD;
4007 modrm = ldub_code(s->pc++);
4008 mod = (modrm >> 6) & 3;
4009 rm = (modrm & 7) | REX_B(s);
4010 op = (modrm >> 3) & 7;
4011 if (op >= 2 && b == 0xfe) {
4015 if (op == 2 || op == 4) {
4016 /* operand size for jumps is 64 bit */
4018 } else if (op == 3 || op == 5) {
4019 /* for call calls, the operand is 16 or 32 bit, even
4021 ot = dflag ? OT_LONG : OT_WORD;
4022 } else if (op == 6) {
4023 /* default push size is 64 bit */
4024 ot = dflag ? OT_QUAD : OT_WORD;
4028 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4029 if (op >= 2 && op != 3 && op != 5)
4030 gen_op_ld_T0_A0(ot + s->mem_index);
4032 gen_op_mov_TN_reg(ot, 0, rm);
4036 case 0: /* inc Ev */
4041 gen_inc(s, ot, opreg, 1);
4043 case 1: /* dec Ev */
4048 gen_inc(s, ot, opreg, -1);
4050 case 2: /* call Ev */
4051 /* XXX: optimize if memory (no 'and' is necessary) */
4053 gen_op_andl_T0_ffff();
4054 next_eip = s->pc - s->cs_base;
4055 gen_movtl_T1_im(next_eip);
4060 case 3: /* lcall Ev */
4061 gen_op_ld_T1_A0(ot + s->mem_index);
4062 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4063 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4065 if (s->pe && !s->vm86) {
4066 if (s->cc_op != CC_OP_DYNAMIC)
4067 gen_op_set_cc_op(s->cc_op);
4068 gen_jmp_im(pc_start - s->cs_base);
4069 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4070 tcg_gen_helper_0_4(helper_lcall_protected,
4071 cpu_tmp2_i32, cpu_T[1],
4072 tcg_const_i32(dflag),
4073 tcg_const_i32(s->pc - pc_start));
4075 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4076 tcg_gen_helper_0_4(helper_lcall_real,
4077 cpu_tmp2_i32, cpu_T[1],
4078 tcg_const_i32(dflag),
4079 tcg_const_i32(s->pc - s->cs_base));
4083 case 4: /* jmp Ev */
4085 gen_op_andl_T0_ffff();
4089 case 5: /* ljmp Ev */
4090 gen_op_ld_T1_A0(ot + s->mem_index);
4091 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4092 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4094 if (s->pe && !s->vm86) {
4095 if (s->cc_op != CC_OP_DYNAMIC)
4096 gen_op_set_cc_op(s->cc_op);
4097 gen_jmp_im(pc_start - s->cs_base);
4098 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4099 tcg_gen_helper_0_3(helper_ljmp_protected,
4102 tcg_const_i32(s->pc - pc_start));
4104 gen_op_movl_seg_T0_vm(R_CS);
4105 gen_op_movl_T0_T1();
4110 case 6: /* push Ev */
4118 case 0x84: /* test Ev, Gv */
4123 ot = dflag + OT_WORD;
4125 modrm = ldub_code(s->pc++);
4126 mod = (modrm >> 6) & 3;
4127 rm = (modrm & 7) | REX_B(s);
4128 reg = ((modrm >> 3) & 7) | rex_r;
4130 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4131 gen_op_mov_TN_reg(ot, 1, reg);
4132 gen_op_testl_T0_T1_cc();
4133 s->cc_op = CC_OP_LOGICB + ot;
4136 case 0xa8: /* test eAX, Iv */
4141 ot = dflag + OT_WORD;
4142 val = insn_get(s, ot);
4144 gen_op_mov_TN_reg(ot, 0, OR_EAX);
4145 gen_op_movl_T1_im(val);
4146 gen_op_testl_T0_T1_cc();
4147 s->cc_op = CC_OP_LOGICB + ot;
4150 case 0x98: /* CWDE/CBW */
4151 #ifdef TARGET_X86_64
4153 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4154 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4155 gen_op_mov_reg_T0(OT_QUAD, R_EAX);
4159 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4160 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4161 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4163 gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
4164 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4165 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4168 case 0x99: /* CDQ/CWD */
4169 #ifdef TARGET_X86_64
4171 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
4172 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
4173 gen_op_mov_reg_T0(OT_QUAD, R_EDX);
4177 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4178 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4179 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
4180 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4182 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4183 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4184 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
4185 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4188 case 0x1af: /* imul Gv, Ev */
4189 case 0x69: /* imul Gv, Ev, I */
4191 ot = dflag + OT_WORD;
4192 modrm = ldub_code(s->pc++);
4193 reg = ((modrm >> 3) & 7) | rex_r;
4195 s->rip_offset = insn_const_size(ot);
4198 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4200 val = insn_get(s, ot);
4201 gen_op_movl_T1_im(val);
4202 } else if (b == 0x6b) {
4203 val = (int8_t)insn_get(s, OT_BYTE);
4204 gen_op_movl_T1_im(val);
4206 gen_op_mov_TN_reg(ot, 1, reg);
4209 #ifdef TARGET_X86_64
4210 if (ot == OT_QUAD) {
4211 tcg_gen_helper_1_2(helper_imulq_T0_T1, cpu_T[0], cpu_T[0], cpu_T[1]);
4214 if (ot == OT_LONG) {
4215 #ifdef TARGET_X86_64
4216 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4217 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
4218 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4219 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4220 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
4221 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4225 t0 = tcg_temp_new(TCG_TYPE_I64);
4226 t1 = tcg_temp_new(TCG_TYPE_I64);
4227 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
4228 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
4229 tcg_gen_mul_i64(t0, t0, t1);
4230 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4231 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4232 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
4233 tcg_gen_shri_i64(t0, t0, 32);
4234 tcg_gen_trunc_i64_i32(cpu_T[1], t0);
4235 tcg_gen_sub_tl(cpu_cc_src, cpu_T[1], cpu_tmp0);
4239 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4240 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
4241 /* XXX: use 32 bit mul which could be faster */
4242 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4243 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4244 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
4245 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4247 gen_op_mov_reg_T0(ot, reg);
4248 s->cc_op = CC_OP_MULB + ot;
4251 case 0x1c1: /* xadd Ev, Gv */
4255 ot = dflag + OT_WORD;
4256 modrm = ldub_code(s->pc++);
4257 reg = ((modrm >> 3) & 7) | rex_r;
4258 mod = (modrm >> 6) & 3;
4260 rm = (modrm & 7) | REX_B(s);
4261 gen_op_mov_TN_reg(ot, 0, reg);
4262 gen_op_mov_TN_reg(ot, 1, rm);
4263 gen_op_addl_T0_T1();
4264 gen_op_mov_reg_T1(ot, reg);
4265 gen_op_mov_reg_T0(ot, rm);
4267 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4268 gen_op_mov_TN_reg(ot, 0, reg);
4269 gen_op_ld_T1_A0(ot + s->mem_index);
4270 gen_op_addl_T0_T1();
4271 gen_op_st_T0_A0(ot + s->mem_index);
4272 gen_op_mov_reg_T1(ot, reg);
4274 gen_op_update2_cc();
4275 s->cc_op = CC_OP_ADDB + ot;
4278 case 0x1b1: /* cmpxchg Ev, Gv */
4285 ot = dflag + OT_WORD;
4286 modrm = ldub_code(s->pc++);
4287 reg = ((modrm >> 3) & 7) | rex_r;
4288 mod = (modrm >> 6) & 3;
4289 gen_op_mov_TN_reg(ot, 1, reg);
4291 rm = (modrm & 7) | REX_B(s);
4292 gen_op_mov_TN_reg(ot, 0, rm);
4294 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4295 gen_op_ld_T0_A0(ot + s->mem_index);
4296 rm = 0; /* avoid warning */
4298 label1 = gen_new_label();
4299 tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_EAX]));
4300 tcg_gen_sub_tl(cpu_T3, cpu_T3, cpu_T[0]);
4301 gen_extu(ot, cpu_T3);
4302 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label1);
4303 tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
4304 gen_op_mov_reg_T0(ot, R_EAX);
4305 gen_set_label(label1);
4307 gen_op_mov_reg_T1(ot, rm);
4309 gen_op_st_T1_A0(ot + s->mem_index);
4311 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4312 tcg_gen_mov_tl(cpu_cc_dst, cpu_T3);
4313 s->cc_op = CC_OP_SUBB + ot;
4316 case 0x1c7: /* cmpxchg8b */
4317 modrm = ldub_code(s->pc++);
4318 mod = (modrm >> 6) & 3;
4319 if ((mod == 3) || ((modrm & 0x38) != 0x8))
4321 gen_jmp_im(pc_start - s->cs_base);
4322 if (s->cc_op != CC_OP_DYNAMIC)
4323 gen_op_set_cc_op(s->cc_op);
4324 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4325 tcg_gen_helper_0_1(helper_cmpxchg8b, cpu_A0);
4326 s->cc_op = CC_OP_EFLAGS;
4329 /**************************/
4331 case 0x50 ... 0x57: /* push */
4332 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
4335 case 0x58 ... 0x5f: /* pop */
4337 ot = dflag ? OT_QUAD : OT_WORD;
4339 ot = dflag + OT_WORD;
4342 /* NOTE: order is important for pop %sp */
4344 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
4346 case 0x60: /* pusha */
4351 case 0x61: /* popa */
4356 case 0x68: /* push Iv */
4359 ot = dflag ? OT_QUAD : OT_WORD;
4361 ot = dflag + OT_WORD;
4364 val = insn_get(s, ot);
4366 val = (int8_t)insn_get(s, OT_BYTE);
4367 gen_op_movl_T0_im(val);
4370 case 0x8f: /* pop Ev */
4372 ot = dflag ? OT_QUAD : OT_WORD;
4374 ot = dflag + OT_WORD;
4376 modrm = ldub_code(s->pc++);
4377 mod = (modrm >> 6) & 3;
4380 /* NOTE: order is important for pop %sp */
4382 rm = (modrm & 7) | REX_B(s);
4383 gen_op_mov_reg_T0(ot, rm);
4385 /* NOTE: order is important too for MMU exceptions */
4386 s->popl_esp_hack = 1 << ot;
4387 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4388 s->popl_esp_hack = 0;
4392 case 0xc8: /* enter */
4395 val = lduw_code(s->pc);
4397 level = ldub_code(s->pc++);
4398 gen_enter(s, val, level);
4401 case 0xc9: /* leave */
4402 /* XXX: exception not precise (ESP is updated before potential exception) */
4404 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
4405 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
4406 } else if (s->ss32) {
4407 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
4408 gen_op_mov_reg_T0(OT_LONG, R_ESP);
4410 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
4411 gen_op_mov_reg_T0(OT_WORD, R_ESP);
4415 ot = dflag ? OT_QUAD : OT_WORD;
4417 ot = dflag + OT_WORD;
4419 gen_op_mov_reg_T0(ot, R_EBP);
4422 case 0x06: /* push es */
4423 case 0x0e: /* push cs */
4424 case 0x16: /* push ss */
4425 case 0x1e: /* push ds */
4428 gen_op_movl_T0_seg(b >> 3);
4431 case 0x1a0: /* push fs */
4432 case 0x1a8: /* push gs */
4433 gen_op_movl_T0_seg((b >> 3) & 7);
4436 case 0x07: /* pop es */
4437 case 0x17: /* pop ss */
4438 case 0x1f: /* pop ds */
4443 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4446 /* if reg == SS, inhibit interrupts/trace. */
4447 /* If several instructions disable interrupts, only the
4449 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4450 tcg_gen_helper_0_0(helper_set_inhibit_irq);
4454 gen_jmp_im(s->pc - s->cs_base);
4458 case 0x1a1: /* pop fs */
4459 case 0x1a9: /* pop gs */
4461 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4464 gen_jmp_im(s->pc - s->cs_base);
4469 /**************************/
4472 case 0x89: /* mov Gv, Ev */
4476 ot = dflag + OT_WORD;
4477 modrm = ldub_code(s->pc++);
4478 reg = ((modrm >> 3) & 7) | rex_r;
4480 /* generate a generic store */
4481 gen_ldst_modrm(s, modrm, ot, reg, 1);
4484 case 0xc7: /* mov Ev, Iv */
4488 ot = dflag + OT_WORD;
4489 modrm = ldub_code(s->pc++);
4490 mod = (modrm >> 6) & 3;
4492 s->rip_offset = insn_const_size(ot);
4493 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4495 val = insn_get(s, ot);
4496 gen_op_movl_T0_im(val);
4498 gen_op_st_T0_A0(ot + s->mem_index);
4500 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
4503 case 0x8b: /* mov Ev, Gv */
4507 ot = OT_WORD + dflag;
4508 modrm = ldub_code(s->pc++);
4509 reg = ((modrm >> 3) & 7) | rex_r;
4511 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4512 gen_op_mov_reg_T0(ot, reg);
4514 case 0x8e: /* mov seg, Gv */
4515 modrm = ldub_code(s->pc++);
4516 reg = (modrm >> 3) & 7;
4517 if (reg >= 6 || reg == R_CS)
4519 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4520 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4522 /* if reg == SS, inhibit interrupts/trace */
4523 /* If several instructions disable interrupts, only the
4525 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4526 tcg_gen_helper_0_0(helper_set_inhibit_irq);
4530 gen_jmp_im(s->pc - s->cs_base);
4534 case 0x8c: /* mov Gv, seg */
4535 modrm = ldub_code(s->pc++);
4536 reg = (modrm >> 3) & 7;
4537 mod = (modrm >> 6) & 3;
4540 gen_op_movl_T0_seg(reg);
4542 ot = OT_WORD + dflag;
4545 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4548 case 0x1b6: /* movzbS Gv, Eb */
4549 case 0x1b7: /* movzwS Gv, Eb */
4550 case 0x1be: /* movsbS Gv, Eb */
4551 case 0x1bf: /* movswS Gv, Eb */
4554 /* d_ot is the size of destination */
4555 d_ot = dflag + OT_WORD;
4556 /* ot is the size of source */
4557 ot = (b & 1) + OT_BYTE;
4558 modrm = ldub_code(s->pc++);
4559 reg = ((modrm >> 3) & 7) | rex_r;
4560 mod = (modrm >> 6) & 3;
4561 rm = (modrm & 7) | REX_B(s);
4564 gen_op_mov_TN_reg(ot, 0, rm);
4565 switch(ot | (b & 8)) {
4567 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4570 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4573 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
4577 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4580 gen_op_mov_reg_T0(d_ot, reg);
4582 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4584 gen_op_lds_T0_A0(ot + s->mem_index);
4586 gen_op_ldu_T0_A0(ot + s->mem_index);
4588 gen_op_mov_reg_T0(d_ot, reg);
4593 case 0x8d: /* lea */
4594 ot = dflag + OT_WORD;
4595 modrm = ldub_code(s->pc++);
4596 mod = (modrm >> 6) & 3;
4599 reg = ((modrm >> 3) & 7) | rex_r;
4600 /* we must ensure that no segment is added */
4604 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4606 gen_op_mov_reg_A0(ot - OT_WORD, reg);
4609 case 0xa0: /* mov EAX, Ov */
4611 case 0xa2: /* mov Ov, EAX */
4614 target_ulong offset_addr;
4619 ot = dflag + OT_WORD;
4620 #ifdef TARGET_X86_64
4621 if (s->aflag == 2) {
4622 offset_addr = ldq_code(s->pc);
4624 gen_op_movq_A0_im(offset_addr);
4629 offset_addr = insn_get(s, OT_LONG);
4631 offset_addr = insn_get(s, OT_WORD);
4633 gen_op_movl_A0_im(offset_addr);
4635 gen_add_A0_ds_seg(s);
4637 gen_op_ld_T0_A0(ot + s->mem_index);
4638 gen_op_mov_reg_T0(ot, R_EAX);
4640 gen_op_mov_TN_reg(ot, 0, R_EAX);
4641 gen_op_st_T0_A0(ot + s->mem_index);
4645 case 0xd7: /* xlat */
4646 #ifdef TARGET_X86_64
4647 if (s->aflag == 2) {
4648 gen_op_movq_A0_reg(R_EBX);
4649 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
4650 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
4651 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
4655 gen_op_movl_A0_reg(R_EBX);
4656 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4657 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
4658 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
4660 gen_op_andl_A0_ffff();
4662 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
4664 gen_add_A0_ds_seg(s);
4665 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
4666 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
4668 case 0xb0 ... 0xb7: /* mov R, Ib */
4669 val = insn_get(s, OT_BYTE);
4670 gen_op_movl_T0_im(val);
4671 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
4673 case 0xb8 ... 0xbf: /* mov R, Iv */
4674 #ifdef TARGET_X86_64
4678 tmp = ldq_code(s->pc);
4680 reg = (b & 7) | REX_B(s);
4681 gen_movtl_T0_im(tmp);
4682 gen_op_mov_reg_T0(OT_QUAD, reg);
4686 ot = dflag ? OT_LONG : OT_WORD;
4687 val = insn_get(s, ot);
4688 reg = (b & 7) | REX_B(s);
4689 gen_op_movl_T0_im(val);
4690 gen_op_mov_reg_T0(ot, reg);
4694 case 0x91 ... 0x97: /* xchg R, EAX */
4695 ot = dflag + OT_WORD;
4696 reg = (b & 7) | REX_B(s);
4700 case 0x87: /* xchg Ev, Gv */
4704 ot = dflag + OT_WORD;
4705 modrm = ldub_code(s->pc++);
4706 reg = ((modrm >> 3) & 7) | rex_r;
4707 mod = (modrm >> 6) & 3;
4709 rm = (modrm & 7) | REX_B(s);
4711 gen_op_mov_TN_reg(ot, 0, reg);
4712 gen_op_mov_TN_reg(ot, 1, rm);
4713 gen_op_mov_reg_T0(ot, rm);
4714 gen_op_mov_reg_T1(ot, reg);
4716 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4717 gen_op_mov_TN_reg(ot, 0, reg);
4718 /* for xchg, lock is implicit */
4719 if (!(prefixes & PREFIX_LOCK))
4720 tcg_gen_helper_0_0(helper_lock);
4721 gen_op_ld_T1_A0(ot + s->mem_index);
4722 gen_op_st_T0_A0(ot + s->mem_index);
4723 if (!(prefixes & PREFIX_LOCK))
4724 tcg_gen_helper_0_0(helper_unlock);
4725 gen_op_mov_reg_T1(ot, reg);
4728 case 0xc4: /* les Gv */
4733 case 0xc5: /* lds Gv */
4738 case 0x1b2: /* lss Gv */
4741 case 0x1b4: /* lfs Gv */
4744 case 0x1b5: /* lgs Gv */
4747 ot = dflag ? OT_LONG : OT_WORD;
4748 modrm = ldub_code(s->pc++);
4749 reg = ((modrm >> 3) & 7) | rex_r;
4750 mod = (modrm >> 6) & 3;
4753 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4754 gen_op_ld_T1_A0(ot + s->mem_index);
4755 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4756 /* load the segment first to handle exceptions properly */
4757 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4758 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4759 /* then put the data */
4760 gen_op_mov_reg_T1(ot, reg);
4762 gen_jmp_im(s->pc - s->cs_base);
4767 /************************/
4778 ot = dflag + OT_WORD;
4780 modrm = ldub_code(s->pc++);
4781 mod = (modrm >> 6) & 3;
4782 op = (modrm >> 3) & 7;
4788 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4791 opreg = (modrm & 7) | REX_B(s);
4796 gen_shift(s, op, ot, opreg, OR_ECX);
4799 shift = ldub_code(s->pc++);
4801 gen_shifti(s, op, ot, opreg, shift);
4816 case 0x1a4: /* shld imm */
4820 case 0x1a5: /* shld cl */
4824 case 0x1ac: /* shrd imm */
4828 case 0x1ad: /* shrd cl */
4832 ot = dflag + OT_WORD;
4833 modrm = ldub_code(s->pc++);
4834 mod = (modrm >> 6) & 3;
4835 rm = (modrm & 7) | REX_B(s);
4836 reg = ((modrm >> 3) & 7) | rex_r;
4838 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4843 gen_op_mov_TN_reg(ot, 1, reg);
4846 val = ldub_code(s->pc++);
4847 tcg_gen_movi_tl(cpu_T3, val);
4849 tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
4851 gen_shiftd_rm_T1_T3(s, ot, opreg, op);
4854 /************************/
4857 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4858 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4859 /* XXX: what to do if illegal op ? */
4860 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4863 modrm = ldub_code(s->pc++);
4864 mod = (modrm >> 6) & 3;
4866 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4869 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4871 case 0x00 ... 0x07: /* fxxxs */
4872 case 0x10 ... 0x17: /* fixxxl */
4873 case 0x20 ... 0x27: /* fxxxl */
4874 case 0x30 ... 0x37: /* fixxx */
4881 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4882 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4883 tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
4886 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4887 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4888 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
4891 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
4892 (s->mem_index >> 2) - 1);
4893 tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
4897 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
4898 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4899 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
4903 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
4905 /* fcomp needs pop */
4906 tcg_gen_helper_0_0(helper_fpop);
4910 case 0x08: /* flds */
4911 case 0x0a: /* fsts */
4912 case 0x0b: /* fstps */
4913 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4914 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4915 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4920 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4921 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4922 tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
4925 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4926 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4927 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
4930 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
4931 (s->mem_index >> 2) - 1);
4932 tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
4936 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
4937 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4938 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
4943 /* XXX: the corresponding CPUID bit must be tested ! */
4946 tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
4947 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4948 gen_op_st_T0_A0(OT_LONG + s->mem_index);
4951 tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
4952 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
4953 (s->mem_index >> 2) - 1);
4957 tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
4958 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4959 gen_op_st_T0_A0(OT_WORD + s->mem_index);
4962 tcg_gen_helper_0_0(helper_fpop);
4967 tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
4968 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4969 gen_op_st_T0_A0(OT_LONG + s->mem_index);
4972 tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
4973 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4974 gen_op_st_T0_A0(OT_LONG + s->mem_index);
4977 tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
4978 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
4979 (s->mem_index >> 2) - 1);
4983 tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
4984 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4985 gen_op_st_T0_A0(OT_WORD + s->mem_index);
4989 tcg_gen_helper_0_0(helper_fpop);
4993 case 0x0c: /* fldenv mem */
4994 if (s->cc_op != CC_OP_DYNAMIC)
4995 gen_op_set_cc_op(s->cc_op);
4996 gen_jmp_im(pc_start - s->cs_base);
4997 tcg_gen_helper_0_2(helper_fldenv,
4998 cpu_A0, tcg_const_i32(s->dflag));
5000 case 0x0d: /* fldcw mem */
5001 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
5002 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5003 tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
5005 case 0x0e: /* fnstenv mem */
5006 if (s->cc_op != CC_OP_DYNAMIC)
5007 gen_op_set_cc_op(s->cc_op);
5008 gen_jmp_im(pc_start - s->cs_base);
5009 tcg_gen_helper_0_2(helper_fstenv,
5010 cpu_A0, tcg_const_i32(s->dflag));
5012 case 0x0f: /* fnstcw mem */
5013 tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
5014 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5015 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5017 case 0x1d: /* fldt mem */
5018 if (s->cc_op != CC_OP_DYNAMIC)
5019 gen_op_set_cc_op(s->cc_op);
5020 gen_jmp_im(pc_start - s->cs_base);
5021 tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
5023 case 0x1f: /* fstpt mem */
5024 if (s->cc_op != CC_OP_DYNAMIC)
5025 gen_op_set_cc_op(s->cc_op);
5026 gen_jmp_im(pc_start - s->cs_base);
5027 tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
5028 tcg_gen_helper_0_0(helper_fpop);
5030 case 0x2c: /* frstor mem */
5031 if (s->cc_op != CC_OP_DYNAMIC)
5032 gen_op_set_cc_op(s->cc_op);
5033 gen_jmp_im(pc_start - s->cs_base);
5034 tcg_gen_helper_0_2(helper_frstor,
5035 cpu_A0, tcg_const_i32(s->dflag));
5037 case 0x2e: /* fnsave mem */
5038 if (s->cc_op != CC_OP_DYNAMIC)
5039 gen_op_set_cc_op(s->cc_op);
5040 gen_jmp_im(pc_start - s->cs_base);
5041 tcg_gen_helper_0_2(helper_fsave,
5042 cpu_A0, tcg_const_i32(s->dflag));
5044 case 0x2f: /* fnstsw mem */
5045 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
5046 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5047 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5049 case 0x3c: /* fbld */
5050 if (s->cc_op != CC_OP_DYNAMIC)
5051 gen_op_set_cc_op(s->cc_op);
5052 gen_jmp_im(pc_start - s->cs_base);
5053 tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
5055 case 0x3e: /* fbstp */
5056 if (s->cc_op != CC_OP_DYNAMIC)
5057 gen_op_set_cc_op(s->cc_op);
5058 gen_jmp_im(pc_start - s->cs_base);
5059 tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
5060 tcg_gen_helper_0_0(helper_fpop);
5062 case 0x3d: /* fildll */
5063 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
5064 (s->mem_index >> 2) - 1);
5065 tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
5067 case 0x3f: /* fistpll */
5068 tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
5069 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
5070 (s->mem_index >> 2) - 1);
5071 tcg_gen_helper_0_0(helper_fpop);
5077 /* register float ops */
5081 case 0x08: /* fld sti */
5082 tcg_gen_helper_0_0(helper_fpush);
5083 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
5085 case 0x09: /* fxchg sti */
5086 case 0x29: /* fxchg4 sti, undocumented op */
5087 case 0x39: /* fxchg7 sti, undocumented op */
5088 tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
5090 case 0x0a: /* grp d9/2 */
5093 /* check exceptions (FreeBSD FPU probe) */
5094 if (s->cc_op != CC_OP_DYNAMIC)
5095 gen_op_set_cc_op(s->cc_op);
5096 gen_jmp_im(pc_start - s->cs_base);
5097 tcg_gen_helper_0_0(helper_fwait);
5103 case 0x0c: /* grp d9/4 */
5106 tcg_gen_helper_0_0(helper_fchs_ST0);
5109 tcg_gen_helper_0_0(helper_fabs_ST0);
5112 tcg_gen_helper_0_0(helper_fldz_FT0);
5113 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5116 tcg_gen_helper_0_0(helper_fxam_ST0);
5122 case 0x0d: /* grp d9/5 */
5126 tcg_gen_helper_0_0(helper_fpush);
5127 tcg_gen_helper_0_0(helper_fld1_ST0);
5130 tcg_gen_helper_0_0(helper_fpush);
5131 tcg_gen_helper_0_0(helper_fldl2t_ST0);
5134 tcg_gen_helper_0_0(helper_fpush);
5135 tcg_gen_helper_0_0(helper_fldl2e_ST0);
5138 tcg_gen_helper_0_0(helper_fpush);
5139 tcg_gen_helper_0_0(helper_fldpi_ST0);
5142 tcg_gen_helper_0_0(helper_fpush);
5143 tcg_gen_helper_0_0(helper_fldlg2_ST0);
5146 tcg_gen_helper_0_0(helper_fpush);
5147 tcg_gen_helper_0_0(helper_fldln2_ST0);
5150 tcg_gen_helper_0_0(helper_fpush);
5151 tcg_gen_helper_0_0(helper_fldz_ST0);
5158 case 0x0e: /* grp d9/6 */
5161 tcg_gen_helper_0_0(helper_f2xm1);
5164 tcg_gen_helper_0_0(helper_fyl2x);
5167 tcg_gen_helper_0_0(helper_fptan);
5169 case 3: /* fpatan */
5170 tcg_gen_helper_0_0(helper_fpatan);
5172 case 4: /* fxtract */
5173 tcg_gen_helper_0_0(helper_fxtract);
5175 case 5: /* fprem1 */
5176 tcg_gen_helper_0_0(helper_fprem1);
5178 case 6: /* fdecstp */
5179 tcg_gen_helper_0_0(helper_fdecstp);
5182 case 7: /* fincstp */
5183 tcg_gen_helper_0_0(helper_fincstp);
5187 case 0x0f: /* grp d9/7 */
5190 tcg_gen_helper_0_0(helper_fprem);
5192 case 1: /* fyl2xp1 */
5193 tcg_gen_helper_0_0(helper_fyl2xp1);
5196 tcg_gen_helper_0_0(helper_fsqrt);
5198 case 3: /* fsincos */
5199 tcg_gen_helper_0_0(helper_fsincos);
5201 case 5: /* fscale */
5202 tcg_gen_helper_0_0(helper_fscale);
5204 case 4: /* frndint */
5205 tcg_gen_helper_0_0(helper_frndint);
5208 tcg_gen_helper_0_0(helper_fsin);
5212 tcg_gen_helper_0_0(helper_fcos);
5216 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
5217 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
5218 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
5224 tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
5226 tcg_gen_helper_0_0(helper_fpop);
5228 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5229 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
5233 case 0x02: /* fcom */
5234 case 0x22: /* fcom2, undocumented op */
5235 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5236 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5238 case 0x03: /* fcomp */
5239 case 0x23: /* fcomp3, undocumented op */
5240 case 0x32: /* fcomp5, undocumented op */
5241 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5242 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5243 tcg_gen_helper_0_0(helper_fpop);
5245 case 0x15: /* da/5 */
5247 case 1: /* fucompp */
5248 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5249 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5250 tcg_gen_helper_0_0(helper_fpop);
5251 tcg_gen_helper_0_0(helper_fpop);
5259 case 0: /* feni (287 only, just do nop here) */
5261 case 1: /* fdisi (287 only, just do nop here) */
5264 tcg_gen_helper_0_0(helper_fclex);
5266 case 3: /* fninit */
5267 tcg_gen_helper_0_0(helper_fninit);
5269 case 4: /* fsetpm (287 only, just do nop here) */
5275 case 0x1d: /* fucomi */
5276 if (s->cc_op != CC_OP_DYNAMIC)
5277 gen_op_set_cc_op(s->cc_op);
5278 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5279 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5280 s->cc_op = CC_OP_EFLAGS;
5282 case 0x1e: /* fcomi */
5283 if (s->cc_op != CC_OP_DYNAMIC)
5284 gen_op_set_cc_op(s->cc_op);
5285 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5286 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5287 s->cc_op = CC_OP_EFLAGS;
5289 case 0x28: /* ffree sti */
5290 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5292 case 0x2a: /* fst sti */
5293 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5295 case 0x2b: /* fstp sti */
5296 case 0x0b: /* fstp1 sti, undocumented op */
5297 case 0x3a: /* fstp8 sti, undocumented op */
5298 case 0x3b: /* fstp9 sti, undocumented op */
5299 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5300 tcg_gen_helper_0_0(helper_fpop);
5302 case 0x2c: /* fucom st(i) */
5303 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5304 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5306 case 0x2d: /* fucomp st(i) */
5307 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5308 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5309 tcg_gen_helper_0_0(helper_fpop);
5311 case 0x33: /* de/3 */
5313 case 1: /* fcompp */
5314 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5315 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5316 tcg_gen_helper_0_0(helper_fpop);
5317 tcg_gen_helper_0_0(helper_fpop);
5323 case 0x38: /* ffreep sti, undocumented op */
5324 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5325 tcg_gen_helper_0_0(helper_fpop);
5327 case 0x3c: /* df/4 */
5330 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
5331 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5332 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5338 case 0x3d: /* fucomip */
5339 if (s->cc_op != CC_OP_DYNAMIC)
5340 gen_op_set_cc_op(s->cc_op);
5341 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5342 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5343 tcg_gen_helper_0_0(helper_fpop);
5344 s->cc_op = CC_OP_EFLAGS;
5346 case 0x3e: /* fcomip */
5347 if (s->cc_op != CC_OP_DYNAMIC)
5348 gen_op_set_cc_op(s->cc_op);
5349 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5350 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5351 tcg_gen_helper_0_0(helper_fpop);
5352 s->cc_op = CC_OP_EFLAGS;
5354 case 0x10 ... 0x13: /* fcmovxx */
5358 const static uint8_t fcmov_cc[8] = {
5364 op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
5366 l1 = gen_new_label();
5367 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[0], tcg_const_tl(0), l1);
5368 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
5377 /************************/
5380 case 0xa4: /* movsS */
5385 ot = dflag + OT_WORD;
5387 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5388 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5394 case 0xaa: /* stosS */
5399 ot = dflag + OT_WORD;
5401 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5402 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5407 case 0xac: /* lodsS */
5412 ot = dflag + OT_WORD;
5413 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5414 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5419 case 0xae: /* scasS */
5424 ot = dflag + OT_WORD;
5425 if (prefixes & PREFIX_REPNZ) {
5426 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5427 } else if (prefixes & PREFIX_REPZ) {
5428 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5431 s->cc_op = CC_OP_SUBB + ot;
5435 case 0xa6: /* cmpsS */
5440 ot = dflag + OT_WORD;
5441 if (prefixes & PREFIX_REPNZ) {
5442 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5443 } else if (prefixes & PREFIX_REPZ) {
5444 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5447 s->cc_op = CC_OP_SUBB + ot;
5450 case 0x6c: /* insS */
5455 ot = dflag ? OT_LONG : OT_WORD;
5456 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5457 gen_op_andl_T0_ffff();
5458 gen_check_io(s, ot, pc_start - s->cs_base,
5459 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
5460 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5461 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5466 case 0x6e: /* outsS */
5471 ot = dflag ? OT_LONG : OT_WORD;
5472 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5473 gen_op_andl_T0_ffff();
5474 gen_check_io(s, ot, pc_start - s->cs_base,
5475 svm_is_rep(prefixes) | 4);
5476 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5477 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5483 /************************/
5491 ot = dflag ? OT_LONG : OT_WORD;
5492 val = ldub_code(s->pc++);
5493 gen_op_movl_T0_im(val);
5494 gen_check_io(s, ot, pc_start - s->cs_base,
5495 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5496 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5497 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5498 gen_op_mov_reg_T1(ot, R_EAX);
5505 ot = dflag ? OT_LONG : OT_WORD;
5506 val = ldub_code(s->pc++);
5507 gen_op_movl_T0_im(val);
5508 gen_check_io(s, ot, pc_start - s->cs_base,
5509 svm_is_rep(prefixes));
5510 gen_op_mov_TN_reg(ot, 1, R_EAX);
5512 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5513 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
5514 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5515 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
5522 ot = dflag ? OT_LONG : OT_WORD;
5523 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5524 gen_op_andl_T0_ffff();
5525 gen_check_io(s, ot, pc_start - s->cs_base,
5526 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5527 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5528 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5529 gen_op_mov_reg_T1(ot, R_EAX);
5536 ot = dflag ? OT_LONG : OT_WORD;
5537 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5538 gen_op_andl_T0_ffff();
5539 gen_check_io(s, ot, pc_start - s->cs_base,
5540 svm_is_rep(prefixes));
5541 gen_op_mov_TN_reg(ot, 1, R_EAX);
5543 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5544 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
5545 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5546 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
5549 /************************/
5551 case 0xc2: /* ret im */
5552 val = ldsw_code(s->pc);
5555 if (CODE64(s) && s->dflag)
5557 gen_stack_update(s, val + (2 << s->dflag));
5559 gen_op_andl_T0_ffff();
5563 case 0xc3: /* ret */
5567 gen_op_andl_T0_ffff();
5571 case 0xca: /* lret im */
5572 val = ldsw_code(s->pc);
5575 if (s->pe && !s->vm86) {
5576 if (s->cc_op != CC_OP_DYNAMIC)
5577 gen_op_set_cc_op(s->cc_op);
5578 gen_jmp_im(pc_start - s->cs_base);
5579 tcg_gen_helper_0_2(helper_lret_protected,
5580 tcg_const_i32(s->dflag),
5581 tcg_const_i32(val));
5585 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5587 gen_op_andl_T0_ffff();
5588 /* NOTE: keeping EIP updated is not a problem in case of
5592 gen_op_addl_A0_im(2 << s->dflag);
5593 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5594 gen_op_movl_seg_T0_vm(R_CS);
5595 /* add stack offset */
5596 gen_stack_update(s, val + (4 << s->dflag));
5600 case 0xcb: /* lret */
5603 case 0xcf: /* iret */
5604 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET))
5608 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5609 s->cc_op = CC_OP_EFLAGS;
5610 } else if (s->vm86) {
5612 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5614 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5615 s->cc_op = CC_OP_EFLAGS;
5618 if (s->cc_op != CC_OP_DYNAMIC)
5619 gen_op_set_cc_op(s->cc_op);
5620 gen_jmp_im(pc_start - s->cs_base);
5621 tcg_gen_helper_0_2(helper_iret_protected,
5622 tcg_const_i32(s->dflag),
5623 tcg_const_i32(s->pc - s->cs_base));
5624 s->cc_op = CC_OP_EFLAGS;
5628 case 0xe8: /* call im */
5631 tval = (int32_t)insn_get(s, OT_LONG);
5633 tval = (int16_t)insn_get(s, OT_WORD);
5634 next_eip = s->pc - s->cs_base;
5638 gen_movtl_T0_im(next_eip);
5643 case 0x9a: /* lcall im */
5645 unsigned int selector, offset;
5649 ot = dflag ? OT_LONG : OT_WORD;
5650 offset = insn_get(s, ot);
5651 selector = insn_get(s, OT_WORD);
5653 gen_op_movl_T0_im(selector);
5654 gen_op_movl_T1_imu(offset);
5657 case 0xe9: /* jmp im */
5659 tval = (int32_t)insn_get(s, OT_LONG);
5661 tval = (int16_t)insn_get(s, OT_WORD);
5662 tval += s->pc - s->cs_base;
5667 case 0xea: /* ljmp im */
5669 unsigned int selector, offset;
5673 ot = dflag ? OT_LONG : OT_WORD;
5674 offset = insn_get(s, ot);
5675 selector = insn_get(s, OT_WORD);
5677 gen_op_movl_T0_im(selector);
5678 gen_op_movl_T1_imu(offset);
5681 case 0xeb: /* jmp Jb */
5682 tval = (int8_t)insn_get(s, OT_BYTE);
5683 tval += s->pc - s->cs_base;
5688 case 0x70 ... 0x7f: /* jcc Jb */
5689 tval = (int8_t)insn_get(s, OT_BYTE);
5691 case 0x180 ... 0x18f: /* jcc Jv */
5693 tval = (int32_t)insn_get(s, OT_LONG);
5695 tval = (int16_t)insn_get(s, OT_WORD);
5698 next_eip = s->pc - s->cs_base;
5702 gen_jcc(s, b, tval, next_eip);
5705 case 0x190 ... 0x19f: /* setcc Gv */
5706 modrm = ldub_code(s->pc++);
5708 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5710 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5711 ot = dflag + OT_WORD;
5712 modrm = ldub_code(s->pc++);
5713 reg = ((modrm >> 3) & 7) | rex_r;
5714 mod = (modrm >> 6) & 3;
5717 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5718 gen_op_ld_T1_A0(ot + s->mem_index);
5720 rm = (modrm & 7) | REX_B(s);
5721 gen_op_mov_TN_reg(ot, 1, rm);
5723 gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5726 /************************/
5728 case 0x9c: /* pushf */
5729 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF))
5731 if (s->vm86 && s->iopl != 3) {
5732 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5734 if (s->cc_op != CC_OP_DYNAMIC)
5735 gen_op_set_cc_op(s->cc_op);
5736 tcg_gen_helper_1_0(helper_read_eflags, cpu_T[0]);
5740 case 0x9d: /* popf */
5741 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF))
5743 if (s->vm86 && s->iopl != 3) {
5744 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5749 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5750 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK)));
5752 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5753 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK) & 0xffff));
5756 if (s->cpl <= s->iopl) {
5758 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5759 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK)));
5761 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5762 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK) & 0xffff));
5766 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5767 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK)));
5769 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5770 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK) & 0xffff));
5775 s->cc_op = CC_OP_EFLAGS;
5776 /* abort translation because TF flag may change */
5777 gen_jmp_im(s->pc - s->cs_base);
5781 case 0x9e: /* sahf */
5784 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
5785 if (s->cc_op != CC_OP_DYNAMIC)
5786 gen_op_set_cc_op(s->cc_op);
5787 gen_compute_eflags(cpu_cc_src);
5788 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, CC_O);
5789 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_S | CC_Z | CC_A | CC_P | CC_C);
5790 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
5791 s->cc_op = CC_OP_EFLAGS;
5793 case 0x9f: /* lahf */
5796 if (s->cc_op != CC_OP_DYNAMIC)
5797 gen_op_set_cc_op(s->cc_op);
5798 gen_compute_eflags(cpu_T[0]);
5799 /* Note: gen_compute_eflags() only gives the condition codes */
5800 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], 0x02);
5801 gen_op_mov_reg_T0(OT_BYTE, R_AH);
5803 case 0xf5: /* cmc */
5804 if (s->cc_op != CC_OP_DYNAMIC)
5805 gen_op_set_cc_op(s->cc_op);
5806 gen_compute_eflags(cpu_cc_src);
5807 tcg_gen_xori_tl(cpu_cc_src, cpu_cc_src, CC_C);
5808 s->cc_op = CC_OP_EFLAGS;
5810 case 0xf8: /* clc */
5811 if (s->cc_op != CC_OP_DYNAMIC)
5812 gen_op_set_cc_op(s->cc_op);
5813 gen_compute_eflags(cpu_cc_src);
5814 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_C);
5815 s->cc_op = CC_OP_EFLAGS;
5817 case 0xf9: /* stc */
5818 if (s->cc_op != CC_OP_DYNAMIC)
5819 gen_op_set_cc_op(s->cc_op);
5820 gen_compute_eflags(cpu_cc_src);
5821 tcg_gen_ori_tl(cpu_cc_src, cpu_cc_src, CC_C);
5822 s->cc_op = CC_OP_EFLAGS;
5824 case 0xfc: /* cld */
5825 tcg_gen_movi_i32(cpu_tmp2_i32, 1);
5826 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
5828 case 0xfd: /* std */
5829 tcg_gen_movi_i32(cpu_tmp2_i32, -1);
5830 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
5833 /************************/
5834 /* bit operations */
5835 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5836 ot = dflag + OT_WORD;
5837 modrm = ldub_code(s->pc++);
5838 op = (modrm >> 3) & 7;
5839 mod = (modrm >> 6) & 3;
5840 rm = (modrm & 7) | REX_B(s);
5843 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5844 gen_op_ld_T0_A0(ot + s->mem_index);
5846 gen_op_mov_TN_reg(ot, 0, rm);
5849 val = ldub_code(s->pc++);
5850 gen_op_movl_T1_im(val);
5855 case 0x1a3: /* bt Gv, Ev */
5858 case 0x1ab: /* bts */
5861 case 0x1b3: /* btr */
5864 case 0x1bb: /* btc */
5867 ot = dflag + OT_WORD;
5868 modrm = ldub_code(s->pc++);
5869 reg = ((modrm >> 3) & 7) | rex_r;
5870 mod = (modrm >> 6) & 3;
5871 rm = (modrm & 7) | REX_B(s);
5872 gen_op_mov_TN_reg(OT_LONG, 1, reg);
5874 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5875 /* specific case: we need to add a displacement */
5876 gen_exts(ot, cpu_T[1]);
5877 tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
5878 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
5879 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
5880 gen_op_ld_T0_A0(ot + s->mem_index);
5882 gen_op_mov_TN_reg(ot, 0, rm);
5885 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
5888 tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
5889 tcg_gen_movi_tl(cpu_cc_dst, 0);
5892 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
5893 tcg_gen_movi_tl(cpu_tmp0, 1);
5894 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
5895 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
5898 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
5899 tcg_gen_movi_tl(cpu_tmp0, 1);
5900 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
5901 tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
5902 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
5906 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
5907 tcg_gen_movi_tl(cpu_tmp0, 1);
5908 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
5909 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
5912 s->cc_op = CC_OP_SARB + ot;
5915 gen_op_st_T0_A0(ot + s->mem_index);
5917 gen_op_mov_reg_T0(ot, rm);
5918 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
5919 tcg_gen_movi_tl(cpu_cc_dst, 0);
5922 case 0x1bc: /* bsf */
5923 case 0x1bd: /* bsr */
5926 ot = dflag + OT_WORD;
5927 modrm = ldub_code(s->pc++);
5928 reg = ((modrm >> 3) & 7) | rex_r;
5929 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5930 gen_extu(ot, cpu_T[0]);
5931 label1 = gen_new_label();
5932 tcg_gen_movi_tl(cpu_cc_dst, 0);
5933 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[0], tcg_const_tl(0), label1);
5935 tcg_gen_helper_1_1(helper_bsr, cpu_T[0], cpu_T[0]);
5937 tcg_gen_helper_1_1(helper_bsf, cpu_T[0], cpu_T[0]);
5939 gen_op_mov_reg_T0(ot, reg);
5940 tcg_gen_movi_tl(cpu_cc_dst, 1);
5941 gen_set_label(label1);
5942 tcg_gen_discard_tl(cpu_cc_src);
5943 s->cc_op = CC_OP_LOGICB + ot;
5946 /************************/
5948 case 0x27: /* daa */
5951 if (s->cc_op != CC_OP_DYNAMIC)
5952 gen_op_set_cc_op(s->cc_op);
5953 tcg_gen_helper_0_0(helper_daa);
5954 s->cc_op = CC_OP_EFLAGS;
5956 case 0x2f: /* das */
5959 if (s->cc_op != CC_OP_DYNAMIC)
5960 gen_op_set_cc_op(s->cc_op);
5961 tcg_gen_helper_0_0(helper_das);
5962 s->cc_op = CC_OP_EFLAGS;
5964 case 0x37: /* aaa */
5967 if (s->cc_op != CC_OP_DYNAMIC)
5968 gen_op_set_cc_op(s->cc_op);
5969 tcg_gen_helper_0_0(helper_aaa);
5970 s->cc_op = CC_OP_EFLAGS;
5972 case 0x3f: /* aas */
5975 if (s->cc_op != CC_OP_DYNAMIC)
5976 gen_op_set_cc_op(s->cc_op);
5977 tcg_gen_helper_0_0(helper_aas);
5978 s->cc_op = CC_OP_EFLAGS;
5980 case 0xd4: /* aam */
5983 val = ldub_code(s->pc++);
5985 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
5987 tcg_gen_helper_0_1(helper_aam, tcg_const_i32(val));
5988 s->cc_op = CC_OP_LOGICB;
5991 case 0xd5: /* aad */
5994 val = ldub_code(s->pc++);
5995 tcg_gen_helper_0_1(helper_aad, tcg_const_i32(val));
5996 s->cc_op = CC_OP_LOGICB;
5998 /************************/
6000 case 0x90: /* nop */
6001 /* XXX: xchg + rex handling */
6002 /* XXX: correct lock test for all insn */
6003 if (prefixes & PREFIX_LOCK)
6005 if (prefixes & PREFIX_REPZ) {
6006 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
6009 case 0x9b: /* fwait */
6010 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
6011 (HF_MP_MASK | HF_TS_MASK)) {
6012 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6014 if (s->cc_op != CC_OP_DYNAMIC)
6015 gen_op_set_cc_op(s->cc_op);
6016 gen_jmp_im(pc_start - s->cs_base);
6017 tcg_gen_helper_0_0(helper_fwait);
6020 case 0xcc: /* int3 */
6021 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
6023 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
6025 case 0xcd: /* int N */
6026 val = ldub_code(s->pc++);
6027 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
6029 if (s->vm86 && s->iopl != 3) {
6030 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6032 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
6035 case 0xce: /* into */
6038 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
6040 if (s->cc_op != CC_OP_DYNAMIC)
6041 gen_op_set_cc_op(s->cc_op);
6042 gen_jmp_im(pc_start - s->cs_base);
6043 tcg_gen_helper_0_1(helper_into, tcg_const_i32(s->pc - pc_start));
6045 case 0xf1: /* icebp (undocumented, exits to external debugger) */
6046 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP))
6049 gen_debug(s, pc_start - s->cs_base);
6052 tb_flush(cpu_single_env);
6053 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
6056 case 0xfa: /* cli */
6058 if (s->cpl <= s->iopl) {
6059 tcg_gen_helper_0_0(helper_cli);
6061 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6065 tcg_gen_helper_0_0(helper_cli);
6067 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6071 case 0xfb: /* sti */
6073 if (s->cpl <= s->iopl) {
6075 tcg_gen_helper_0_0(helper_sti);
6076 /* interruptions are enabled only the first insn after sti */
6077 /* If several instructions disable interrupts, only the
6079 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
6080 tcg_gen_helper_0_0(helper_set_inhibit_irq);
6081 /* give a chance to handle pending irqs */
6082 gen_jmp_im(s->pc - s->cs_base);
6085 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6091 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6095 case 0x62: /* bound */
6098 ot = dflag ? OT_LONG : OT_WORD;
6099 modrm = ldub_code(s->pc++);
6100 reg = (modrm >> 3) & 7;
6101 mod = (modrm >> 6) & 3;
6104 gen_op_mov_TN_reg(ot, 0, reg);
6105 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6106 gen_jmp_im(pc_start - s->cs_base);
6107 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6109 tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
6111 tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
6113 case 0x1c8 ... 0x1cf: /* bswap reg */
6114 reg = (b & 7) | REX_B(s);
6115 #ifdef TARGET_X86_64
6117 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
6118 tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
6119 gen_op_mov_reg_T0(OT_QUAD, reg);
6123 gen_op_mov_TN_reg(OT_LONG, 0, reg);
6125 tmp0 = tcg_temp_new(TCG_TYPE_I32);
6126 tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
6127 tcg_gen_bswap_i32(tmp0, tmp0);
6128 tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
6129 gen_op_mov_reg_T0(OT_LONG, reg);
6133 gen_op_mov_TN_reg(OT_LONG, 0, reg);
6134 tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
6135 gen_op_mov_reg_T0(OT_LONG, reg);
6139 case 0xd6: /* salc */
6142 if (s->cc_op != CC_OP_DYNAMIC)
6143 gen_op_set_cc_op(s->cc_op);
6144 gen_compute_eflags_c(cpu_T[0]);
6145 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
6146 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
6148 case 0xe0: /* loopnz */
6149 case 0xe1: /* loopz */
6150 case 0xe2: /* loop */
6151 case 0xe3: /* jecxz */
6155 tval = (int8_t)insn_get(s, OT_BYTE);
6156 next_eip = s->pc - s->cs_base;
6161 l1 = gen_new_label();
6162 l2 = gen_new_label();
6163 l3 = gen_new_label();
6166 case 0: /* loopnz */
6168 if (s->cc_op != CC_OP_DYNAMIC)
6169 gen_op_set_cc_op(s->cc_op);
6170 gen_op_add_reg_im(s->aflag, R_ECX, -1);
6171 gen_op_jz_ecx(s->aflag, l3);
6172 gen_compute_eflags(cpu_tmp0);
6173 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_Z);
6175 tcg_gen_brcond_tl(TCG_COND_EQ,
6176 cpu_tmp0, tcg_const_tl(0), l1);
6178 tcg_gen_brcond_tl(TCG_COND_NE,
6179 cpu_tmp0, tcg_const_tl(0), l1);
6183 gen_op_add_reg_im(s->aflag, R_ECX, -1);
6184 gen_op_jnz_ecx(s->aflag, l1);
6188 gen_op_jz_ecx(s->aflag, l1);
6193 gen_jmp_im(next_eip);
6194 gen_op_jmp_label(l2);
6202 case 0x130: /* wrmsr */
6203 case 0x132: /* rdmsr */
6205 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6209 retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 0);
6210 tcg_gen_helper_0_0(helper_rdmsr);
6212 retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 1);
6213 tcg_gen_helper_0_0(helper_wrmsr);
6219 case 0x131: /* rdtsc */
6220 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RDTSC))
6222 gen_jmp_im(pc_start - s->cs_base);
6223 tcg_gen_helper_0_0(helper_rdtsc);
6225 case 0x133: /* rdpmc */
6226 gen_jmp_im(pc_start - s->cs_base);
6227 tcg_gen_helper_0_0(helper_rdpmc);
6229 case 0x134: /* sysenter */
6233 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6235 if (s->cc_op != CC_OP_DYNAMIC) {
6236 gen_op_set_cc_op(s->cc_op);
6237 s->cc_op = CC_OP_DYNAMIC;
6239 gen_jmp_im(pc_start - s->cs_base);
6240 tcg_gen_helper_0_0(helper_sysenter);
6244 case 0x135: /* sysexit */
6248 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6250 if (s->cc_op != CC_OP_DYNAMIC) {
6251 gen_op_set_cc_op(s->cc_op);
6252 s->cc_op = CC_OP_DYNAMIC;
6254 gen_jmp_im(pc_start - s->cs_base);
6255 tcg_gen_helper_0_0(helper_sysexit);
6259 #ifdef TARGET_X86_64
6260 case 0x105: /* syscall */
6261 /* XXX: is it usable in real mode ? */
6262 if (s->cc_op != CC_OP_DYNAMIC) {
6263 gen_op_set_cc_op(s->cc_op);
6264 s->cc_op = CC_OP_DYNAMIC;
6266 gen_jmp_im(pc_start - s->cs_base);
6267 tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
6270 case 0x107: /* sysret */
6272 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6274 if (s->cc_op != CC_OP_DYNAMIC) {
6275 gen_op_set_cc_op(s->cc_op);
6276 s->cc_op = CC_OP_DYNAMIC;
6278 gen_jmp_im(pc_start - s->cs_base);
6279 tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
6280 /* condition codes are modified only in long mode */
6282 s->cc_op = CC_OP_EFLAGS;
6287 case 0x1a2: /* cpuid */
6288 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CPUID))
6290 tcg_gen_helper_0_0(helper_cpuid);
6292 case 0xf4: /* hlt */
6294 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6296 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_HLT))
6298 if (s->cc_op != CC_OP_DYNAMIC)
6299 gen_op_set_cc_op(s->cc_op);
6300 gen_jmp_im(s->pc - s->cs_base);
6301 tcg_gen_helper_0_0(helper_hlt);
6306 modrm = ldub_code(s->pc++);
6307 mod = (modrm >> 6) & 3;
6308 op = (modrm >> 3) & 7;
6311 if (!s->pe || s->vm86)
6313 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ))
6315 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,ldt.selector));
6319 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6322 if (!s->pe || s->vm86)
6325 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6327 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE))
6329 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6330 gen_jmp_im(pc_start - s->cs_base);
6331 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6332 tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
6336 if (!s->pe || s->vm86)
6338 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ))
6340 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,tr.selector));
6344 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6347 if (!s->pe || s->vm86)
6350 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6352 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE))
6354 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6355 gen_jmp_im(pc_start - s->cs_base);
6356 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6357 tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
6362 if (!s->pe || s->vm86)
6364 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6365 if (s->cc_op != CC_OP_DYNAMIC)
6366 gen_op_set_cc_op(s->cc_op);
6368 tcg_gen_helper_0_1(helper_verr, cpu_T[0]);
6370 tcg_gen_helper_0_1(helper_verw, cpu_T[0]);
6371 s->cc_op = CC_OP_EFLAGS;
6378 modrm = ldub_code(s->pc++);
6379 mod = (modrm >> 6) & 3;
6380 op = (modrm >> 3) & 7;
6386 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ))
6388 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6389 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.limit));
6390 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6391 gen_add_A0_im(s, 2);
6392 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.base));
6394 gen_op_andl_T0_im(0xffffff);
6395 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6400 case 0: /* monitor */
6401 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6404 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MONITOR))
6406 gen_jmp_im(pc_start - s->cs_base);
6407 #ifdef TARGET_X86_64
6408 if (s->aflag == 2) {
6409 gen_op_movq_A0_reg(R_EAX);
6413 gen_op_movl_A0_reg(R_EAX);
6415 gen_op_andl_A0_ffff();
6417 gen_add_A0_ds_seg(s);
6418 tcg_gen_helper_0_1(helper_monitor, cpu_A0);
6421 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6424 if (s->cc_op != CC_OP_DYNAMIC) {
6425 gen_op_set_cc_op(s->cc_op);
6426 s->cc_op = CC_OP_DYNAMIC;
6428 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MWAIT))
6430 gen_jmp_im(s->pc - s->cs_base);
6431 tcg_gen_helper_0_0(helper_mwait);
6438 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ))
6440 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6441 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.limit));
6442 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6443 gen_add_A0_im(s, 2);
6444 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.base));
6446 gen_op_andl_T0_im(0xffffff);
6447 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6455 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMRUN))
6457 if (s->cc_op != CC_OP_DYNAMIC)
6458 gen_op_set_cc_op(s->cc_op);
6459 gen_jmp_im(s->pc - s->cs_base);
6460 tcg_gen_helper_0_0(helper_vmrun);
6461 s->cc_op = CC_OP_EFLAGS;
6464 case 1: /* VMMCALL */
6465 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMMCALL))
6467 /* FIXME: cause #UD if hflags & SVM */
6468 tcg_gen_helper_0_0(helper_vmmcall);
6470 case 2: /* VMLOAD */
6471 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMLOAD))
6473 tcg_gen_helper_0_0(helper_vmload);
6475 case 3: /* VMSAVE */
6476 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMSAVE))
6478 tcg_gen_helper_0_0(helper_vmsave);
6481 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_STGI))
6483 tcg_gen_helper_0_0(helper_stgi);
6486 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CLGI))
6488 tcg_gen_helper_0_0(helper_clgi);
6490 case 6: /* SKINIT */
6491 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SKINIT))
6493 tcg_gen_helper_0_0(helper_skinit);
6495 case 7: /* INVLPGA */
6496 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPGA))
6498 tcg_gen_helper_0_0(helper_invlpga);
6503 } else if (s->cpl != 0) {
6504 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6506 if (gen_svm_check_intercept(s, pc_start,
6507 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE))
6509 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6510 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
6511 gen_add_A0_im(s, 2);
6512 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6514 gen_op_andl_T0_im(0xffffff);
6516 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,gdt.base));
6517 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,gdt.limit));
6519 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,idt.base));
6520 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,idt.limit));
6525 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0))
6527 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]));
6528 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
6532 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6534 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0))
6536 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6537 tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
6538 gen_jmp_im(s->pc - s->cs_base);
6542 case 7: /* invlpg */
6544 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6547 #ifdef TARGET_X86_64
6548 if (CODE64(s) && rm == 0) {
6550 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
6551 tcg_gen_ld_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,kernelgsbase));
6552 tcg_gen_st_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
6553 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,kernelgsbase));
6560 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPG))
6562 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6563 tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
6564 gen_jmp_im(s->pc - s->cs_base);
6573 case 0x108: /* invd */
6574 case 0x109: /* wbinvd */
6576 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6578 if (gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD))
6583 case 0x63: /* arpl or movslS (x86_64) */
6584 #ifdef TARGET_X86_64
6587 /* d_ot is the size of destination */
6588 d_ot = dflag + OT_WORD;
6590 modrm = ldub_code(s->pc++);
6591 reg = ((modrm >> 3) & 7) | rex_r;
6592 mod = (modrm >> 6) & 3;
6593 rm = (modrm & 7) | REX_B(s);
6596 gen_op_mov_TN_reg(OT_LONG, 0, rm);
6598 if (d_ot == OT_QUAD)
6599 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
6600 gen_op_mov_reg_T0(d_ot, reg);
6602 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6603 if (d_ot == OT_QUAD) {
6604 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
6606 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6608 gen_op_mov_reg_T0(d_ot, reg);
6614 if (!s->pe || s->vm86)
6617 modrm = ldub_code(s->pc++);
6618 reg = (modrm >> 3) & 7;
6619 mod = (modrm >> 6) & 3;
6622 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6623 gen_op_ld_T0_A0(ot + s->mem_index);
6625 gen_op_mov_TN_reg(ot, 0, rm);
6627 gen_op_mov_TN_reg(ot, 1, reg);
6628 tcg_gen_andi_tl(cpu_tmp0, cpu_T[0], 3);
6629 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], 3);
6630 tcg_gen_movi_tl(cpu_T3, 0);
6631 label1 = gen_new_label();
6632 tcg_gen_brcond_tl(TCG_COND_GE, cpu_tmp0, cpu_T[1], label1);
6633 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], ~3);
6634 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
6635 tcg_gen_movi_tl(cpu_T3, CC_Z);
6636 gen_set_label(label1);
6638 gen_op_st_T0_A0(ot + s->mem_index);
6640 gen_op_mov_reg_T0(ot, rm);
6642 if (s->cc_op != CC_OP_DYNAMIC)
6643 gen_op_set_cc_op(s->cc_op);
6644 gen_compute_eflags(cpu_cc_src);
6645 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_Z);
6646 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T3);
6647 s->cc_op = CC_OP_EFLAGS;
6650 case 0x102: /* lar */
6651 case 0x103: /* lsl */
6654 if (!s->pe || s->vm86)
6656 ot = dflag ? OT_LONG : OT_WORD;
6657 modrm = ldub_code(s->pc++);
6658 reg = ((modrm >> 3) & 7) | rex_r;
6659 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6660 if (s->cc_op != CC_OP_DYNAMIC)
6661 gen_op_set_cc_op(s->cc_op);
6663 tcg_gen_helper_1_1(helper_lar, cpu_T[0], cpu_T[0]);
6665 tcg_gen_helper_1_1(helper_lsl, cpu_T[0], cpu_T[0]);
6666 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
6667 label1 = gen_new_label();
6668 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_tmp0, tcg_const_tl(0), label1);
6669 gen_op_mov_reg_T0(ot, reg);
6670 gen_set_label(label1);
6671 s->cc_op = CC_OP_EFLAGS;
6675 modrm = ldub_code(s->pc++);
6676 mod = (modrm >> 6) & 3;
6677 op = (modrm >> 3) & 7;
6679 case 0: /* prefetchnta */
6680 case 1: /* prefetchnt0 */
6681 case 2: /* prefetchnt0 */
6682 case 3: /* prefetchnt0 */
6685 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6686 /* nothing more to do */
6688 default: /* nop (multi byte) */
6689 gen_nop_modrm(s, modrm);
6693 case 0x119 ... 0x11f: /* nop (multi byte) */
6694 modrm = ldub_code(s->pc++);
6695 gen_nop_modrm(s, modrm);
6697 case 0x120: /* mov reg, crN */
6698 case 0x122: /* mov crN, reg */
6700 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6702 modrm = ldub_code(s->pc++);
6703 if ((modrm & 0xc0) != 0xc0)
6705 rm = (modrm & 7) | REX_B(s);
6706 reg = ((modrm >> 3) & 7) | rex_r;
6718 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0 + reg);
6719 gen_op_mov_TN_reg(ot, 0, rm);
6720 tcg_gen_helper_0_2(helper_movl_crN_T0,
6721 tcg_const_i32(reg), cpu_T[0]);
6722 gen_jmp_im(s->pc - s->cs_base);
6725 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0 + reg);
6726 #if !defined(CONFIG_USER_ONLY)
6728 tcg_gen_helper_1_0(helper_movtl_T0_cr8, cpu_T[0]);
6731 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[reg]));
6732 gen_op_mov_reg_T0(ot, rm);
6740 case 0x121: /* mov reg, drN */
6741 case 0x123: /* mov drN, reg */
6743 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6745 modrm = ldub_code(s->pc++);
6746 if ((modrm & 0xc0) != 0xc0)
6748 rm = (modrm & 7) | REX_B(s);
6749 reg = ((modrm >> 3) & 7) | rex_r;
6754 /* XXX: do it dynamically with CR4.DE bit */
6755 if (reg == 4 || reg == 5 || reg >= 8)
6758 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
6759 gen_op_mov_TN_reg(ot, 0, rm);
6760 tcg_gen_helper_0_2(helper_movl_drN_T0,
6761 tcg_const_i32(reg), cpu_T[0]);
6762 gen_jmp_im(s->pc - s->cs_base);
6765 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
6766 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,dr[reg]));
6767 gen_op_mov_reg_T0(ot, rm);
6771 case 0x106: /* clts */
6773 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6775 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
6776 tcg_gen_helper_0_0(helper_clts);
6777 /* abort block because static cpu state changed */
6778 gen_jmp_im(s->pc - s->cs_base);
6782 /* MMX/3DNow!/SSE/SSE2/SSE3 support */
6783 case 0x1c3: /* MOVNTI reg, mem */
6784 if (!(s->cpuid_features & CPUID_SSE2))
6786 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6787 modrm = ldub_code(s->pc++);
6788 mod = (modrm >> 6) & 3;
6791 reg = ((modrm >> 3) & 7) | rex_r;
6792 /* generate a generic store */
6793 gen_ldst_modrm(s, modrm, ot, reg, 1);
6796 modrm = ldub_code(s->pc++);
6797 mod = (modrm >> 6) & 3;
6798 op = (modrm >> 3) & 7;
6800 case 0: /* fxsave */
6801 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6802 (s->flags & HF_EM_MASK))
6804 if (s->flags & HF_TS_MASK) {
6805 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6808 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6809 if (s->cc_op != CC_OP_DYNAMIC)
6810 gen_op_set_cc_op(s->cc_op);
6811 gen_jmp_im(pc_start - s->cs_base);
6812 tcg_gen_helper_0_2(helper_fxsave,
6813 cpu_A0, tcg_const_i32((s->dflag == 2)));
6815 case 1: /* fxrstor */
6816 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6817 (s->flags & HF_EM_MASK))
6819 if (s->flags & HF_TS_MASK) {
6820 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6823 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6824 if (s->cc_op != CC_OP_DYNAMIC)
6825 gen_op_set_cc_op(s->cc_op);
6826 gen_jmp_im(pc_start - s->cs_base);
6827 tcg_gen_helper_0_2(helper_fxrstor,
6828 cpu_A0, tcg_const_i32((s->dflag == 2)));
6830 case 2: /* ldmxcsr */
6831 case 3: /* stmxcsr */
6832 if (s->flags & HF_TS_MASK) {
6833 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6836 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6839 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6841 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6842 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
6844 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
6845 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6848 case 5: /* lfence */
6849 case 6: /* mfence */
6850 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6853 case 7: /* sfence / clflush */
6854 if ((modrm & 0xc7) == 0xc0) {
6856 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
6857 if (!(s->cpuid_features & CPUID_SSE))
6861 if (!(s->cpuid_features & CPUID_CLFLUSH))
6863 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6870 case 0x10d: /* 3DNow! prefetch(w) */
6871 modrm = ldub_code(s->pc++);
6872 mod = (modrm >> 6) & 3;
6875 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6876 /* ignore for now */
6878 case 0x1aa: /* rsm */
6879 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM))
6881 if (!(s->flags & HF_SMM_MASK))
6883 if (s->cc_op != CC_OP_DYNAMIC) {
6884 gen_op_set_cc_op(s->cc_op);
6885 s->cc_op = CC_OP_DYNAMIC;
6887 gen_jmp_im(s->pc - s->cs_base);
6888 tcg_gen_helper_0_0(helper_rsm);
6891 case 0x10e ... 0x10f:
6892 /* 3DNow! instructions, ignore prefixes */
6893 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
6894 case 0x110 ... 0x117:
6895 case 0x128 ... 0x12f:
6896 case 0x150 ... 0x177:
6897 case 0x17c ... 0x17f:
6899 case 0x1c4 ... 0x1c6:
6900 case 0x1d0 ... 0x1fe:
6901 gen_sse(s, b, pc_start, rex_r);
6906 /* lock generation */
6907 if (s->prefix & PREFIX_LOCK)
6908 tcg_gen_helper_0_0(helper_unlock);
6911 if (s->prefix & PREFIX_LOCK)
6912 tcg_gen_helper_0_0(helper_unlock);
6913 /* XXX: ensure that no lock was generated */
6914 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6918 static void tcg_macro_func(TCGContext *s, int macro_id, const int *dead_args)
6923 tcg_gen_helper_0_1(helper_divl_EAX_T0, cpu_T[0]);
6929 void optimize_flags_init(void)
6931 #if TCG_TARGET_REG_BITS == 32
6932 assert(sizeof(CCTable) == (1 << 3));
6934 assert(sizeof(CCTable) == (1 << 4));
6936 tcg_set_macro_func(&tcg_ctx, tcg_macro_func);
6938 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
6939 #if TARGET_LONG_BITS > HOST_LONG_BITS
6940 cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL,
6941 TCG_AREG0, offsetof(CPUState, t0), "T0");
6942 cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
6943 TCG_AREG0, offsetof(CPUState, t1), "T1");
6944 cpu_A0 = tcg_global_mem_new(TCG_TYPE_TL,
6945 TCG_AREG0, offsetof(CPUState, t2), "A0");
6947 cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
6948 cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
6949 cpu_A0 = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "A0");
6951 cpu_T3 = tcg_global_mem_new(TCG_TYPE_TL,
6952 TCG_AREG0, offsetof(CPUState, t3), "T3");
6953 #if defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS)
6954 /* XXX: must be suppressed once there are less fixed registers */
6955 cpu_tmp1_i64 = tcg_global_reg2_new_hack(TCG_TYPE_I64, TCG_AREG1, TCG_AREG2, "tmp1");
6957 cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
6958 TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
6959 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
6960 TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
6961 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
6962 TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
6965 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6966 basic block 'tb'. If search_pc is TRUE, also generate PC
6967 information for each intermediate instruction. */
6968 static inline int gen_intermediate_code_internal(CPUState *env,
6969 TranslationBlock *tb,
6972 DisasContext dc1, *dc = &dc1;
6973 target_ulong pc_ptr;
6974 uint16_t *gen_opc_end;
6977 target_ulong pc_start;
6978 target_ulong cs_base;
6980 /* generate intermediate code */
6982 cs_base = tb->cs_base;
6984 cflags = tb->cflags;
6986 dc->pe = (flags >> HF_PE_SHIFT) & 1;
6987 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6988 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6989 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6991 dc->vm86 = (flags >> VM_SHIFT) & 1;
6992 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6993 dc->iopl = (flags >> IOPL_SHIFT) & 3;
6994 dc->tf = (flags >> TF_SHIFT) & 1;
6995 dc->singlestep_enabled = env->singlestep_enabled;
6996 dc->cc_op = CC_OP_DYNAMIC;
6997 dc->cs_base = cs_base;
6999 dc->popl_esp_hack = 0;
7000 /* select memory access functions */
7002 if (flags & HF_SOFTMMU_MASK) {
7004 dc->mem_index = 2 * 4;
7006 dc->mem_index = 1 * 4;
7008 dc->cpuid_features = env->cpuid_features;
7009 dc->cpuid_ext_features = env->cpuid_ext_features;
7010 dc->cpuid_ext2_features = env->cpuid_ext2_features;
7011 #ifdef TARGET_X86_64
7012 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
7013 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
7016 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
7017 (flags & HF_INHIBIT_IRQ_MASK)
7018 #ifndef CONFIG_SOFTMMU
7019 || (flags & HF_SOFTMMU_MASK)
7023 /* check addseg logic */
7024 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
7025 printf("ERROR addseg\n");
7028 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
7029 #if !(defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS))
7030 cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
7032 cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
7033 cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
7034 cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
7035 cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
7036 cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
7037 cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
7038 cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
7040 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
7042 dc->is_jmp = DISAS_NEXT;
7047 if (env->nb_breakpoints > 0) {
7048 for(j = 0; j < env->nb_breakpoints; j++) {
7049 if (env->breakpoints[j] == pc_ptr) {
7050 gen_debug(dc, pc_ptr - dc->cs_base);
7056 j = gen_opc_ptr - gen_opc_buf;
7060 gen_opc_instr_start[lj++] = 0;
7062 gen_opc_pc[lj] = pc_ptr;
7063 gen_opc_cc_op[lj] = dc->cc_op;
7064 gen_opc_instr_start[lj] = 1;
7066 pc_ptr = disas_insn(dc, pc_ptr);
7067 /* stop translation if indicated */
7070 /* if single step mode, we generate only one instruction and
7071 generate an exception */
7072 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
7073 the flag and abort the translation to give the irqs a
7074 change to be happen */
7075 if (dc->tf || dc->singlestep_enabled ||
7076 (flags & HF_INHIBIT_IRQ_MASK) ||
7077 (cflags & CF_SINGLE_INSN)) {
7078 gen_jmp_im(pc_ptr - dc->cs_base);
7082 /* if too long translation, stop generation too */
7083 if (gen_opc_ptr >= gen_opc_end ||
7084 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
7085 gen_jmp_im(pc_ptr - dc->cs_base);
7090 *gen_opc_ptr = INDEX_op_end;
7091 /* we don't forget to fill the last values */
7093 j = gen_opc_ptr - gen_opc_buf;
7096 gen_opc_instr_start[lj++] = 0;
7100 if (loglevel & CPU_LOG_TB_CPU) {
7101 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
7103 if (loglevel & CPU_LOG_TB_IN_ASM) {
7105 fprintf(logfile, "----------------\n");
7106 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
7107 #ifdef TARGET_X86_64
7112 disas_flags = !dc->code32;
7113 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
7114 fprintf(logfile, "\n");
7115 if (loglevel & CPU_LOG_TB_OP_OPT) {
7116 fprintf(logfile, "OP before opt:\n");
7117 tcg_dump_ops(&tcg_ctx, logfile);
7118 fprintf(logfile, "\n");
7124 tb->size = pc_ptr - pc_start;
7128 int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
7130 return gen_intermediate_code_internal(env, tb, 0);
7133 int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
7135 return gen_intermediate_code_internal(env, tb, 1);
7138 void gen_pc_load(CPUState *env, TranslationBlock *tb,
7139 unsigned long searched_pc, int pc_pos, void *puc)
7143 if (loglevel & CPU_LOG_TB_OP) {
7145 fprintf(logfile, "RESTORE:\n");
7146 for(i = 0;i <= pc_pos; i++) {
7147 if (gen_opc_instr_start[i]) {
7148 fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
7151 fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
7152 searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
7153 (uint32_t)tb->cs_base);
7156 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
7157 cc_op = gen_opc_cc_op[pc_pos];
7158 if (cc_op != CC_OP_DYNAMIC)