4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
34 #define PREFIX_REPZ 0x01
35 #define PREFIX_REPNZ 0x02
36 #define PREFIX_LOCK 0x04
37 #define PREFIX_DATA 0x08
38 #define PREFIX_ADR 0x10
41 #define X86_64_ONLY(x) x
42 #define X86_64_DEF(x...) x
43 #define CODE64(s) ((s)->code64)
44 #define REX_X(s) ((s)->rex_x)
45 #define REX_B(s) ((s)->rex_b)
46 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
48 #define BUGGY_64(x) NULL
51 #define X86_64_ONLY(x) NULL
52 #define X86_64_DEF(x...)
58 //#define MACRO_TEST 1
60 /* global register indexes */
61 static TCGv cpu_env, cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst, cpu_cc_tmp;
63 static TCGv cpu_T[2], cpu_T3;
64 /* local register indexes (only used inside old micro ops) */
65 static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
66 static TCGv cpu_tmp5, cpu_tmp6;
68 #include "gen-icount.h"
71 static int x86_64_hregs;
74 typedef struct DisasContext {
75 /* current insn context */
76 int override; /* -1 if no override */
79 target_ulong pc; /* pc = eip + cs_base */
80 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
81 static state change (stop translation) */
82 /* current block context */
83 target_ulong cs_base; /* base of CS segment */
84 int pe; /* protected mode */
85 int code32; /* 32 bit code segment */
87 int lma; /* long mode active */
88 int code64; /* 64 bit code segment */
91 int ss32; /* 32 bit stack segment */
92 int cc_op; /* current CC operation */
93 int addseg; /* non zero if either DS/ES/SS have a non zero base */
94 int f_st; /* currently unused */
95 int vm86; /* vm86 mode */
98 int tf; /* TF cpu flag */
99 int singlestep_enabled; /* "hardware" single step enabled */
100 int jmp_opt; /* use direct block chaining for direct jumps */
101 int mem_index; /* select memory access functions */
102 uint64_t flags; /* all execution flags */
103 struct TranslationBlock *tb;
104 int popl_esp_hack; /* for correct popl with esp base handling */
105 int rip_offset; /* only used in x86_64, but left for simplicity */
107 int cpuid_ext_features;
108 int cpuid_ext2_features;
109 int cpuid_ext3_features;
112 static void gen_eob(DisasContext *s);
113 static void gen_jmp(DisasContext *s, target_ulong eip);
114 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
116 /* i386 arith/logic operations */
136 OP_SHL1, /* undocumented */
160 /* I386 int registers */
161 OR_EAX, /* MUST be even numbered */
170 OR_TMP0 = 16, /* temporary operand register */
172 OR_A0, /* temporary register used when doing address evaluation */
175 static inline void gen_op_movl_T0_0(void)
177 tcg_gen_movi_tl(cpu_T[0], 0);
180 static inline void gen_op_movl_T0_im(int32_t val)
182 tcg_gen_movi_tl(cpu_T[0], val);
185 static inline void gen_op_movl_T0_imu(uint32_t val)
187 tcg_gen_movi_tl(cpu_T[0], val);
190 static inline void gen_op_movl_T1_im(int32_t val)
192 tcg_gen_movi_tl(cpu_T[1], val);
195 static inline void gen_op_movl_T1_imu(uint32_t val)
197 tcg_gen_movi_tl(cpu_T[1], val);
200 static inline void gen_op_movl_A0_im(uint32_t val)
202 tcg_gen_movi_tl(cpu_A0, val);
206 static inline void gen_op_movq_A0_im(int64_t val)
208 tcg_gen_movi_tl(cpu_A0, val);
212 static inline void gen_movtl_T0_im(target_ulong val)
214 tcg_gen_movi_tl(cpu_T[0], val);
217 static inline void gen_movtl_T1_im(target_ulong val)
219 tcg_gen_movi_tl(cpu_T[1], val);
222 static inline void gen_op_andl_T0_ffff(void)
224 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
227 static inline void gen_op_andl_T0_im(uint32_t val)
229 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
232 static inline void gen_op_movl_T0_T1(void)
234 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
237 static inline void gen_op_andl_A0_ffff(void)
239 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
244 #define NB_OP_SIZES 4
246 #else /* !TARGET_X86_64 */
248 #define NB_OP_SIZES 3
250 #endif /* !TARGET_X86_64 */
252 #if defined(WORDS_BIGENDIAN)
253 #define REG_B_OFFSET (sizeof(target_ulong) - 1)
254 #define REG_H_OFFSET (sizeof(target_ulong) - 2)
255 #define REG_W_OFFSET (sizeof(target_ulong) - 2)
256 #define REG_L_OFFSET (sizeof(target_ulong) - 4)
257 #define REG_LH_OFFSET (sizeof(target_ulong) - 8)
259 #define REG_B_OFFSET 0
260 #define REG_H_OFFSET 1
261 #define REG_W_OFFSET 0
262 #define REG_L_OFFSET 0
263 #define REG_LH_OFFSET 4
266 static inline void gen_op_mov_reg_v(int ot, int reg, TCGv t0)
270 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
271 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
273 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
277 tcg_gen_st16_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
281 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
282 /* high part of register set to zero */
283 tcg_gen_movi_tl(cpu_tmp0, 0);
284 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
288 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
293 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
299 static inline void gen_op_mov_reg_T0(int ot, int reg)
301 gen_op_mov_reg_v(ot, reg, cpu_T[0]);
304 static inline void gen_op_mov_reg_T1(int ot, int reg)
306 gen_op_mov_reg_v(ot, reg, cpu_T[1]);
309 static inline void gen_op_mov_reg_A0(int size, int reg)
313 tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
317 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
318 /* high part of register set to zero */
319 tcg_gen_movi_tl(cpu_tmp0, 0);
320 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
324 tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
329 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
335 static inline void gen_op_mov_v_reg(int ot, TCGv t0, int reg)
339 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
342 tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
347 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
352 static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
354 gen_op_mov_v_reg(ot, cpu_T[t_index], reg);
357 static inline void gen_op_movl_A0_reg(int reg)
359 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
362 static inline void gen_op_addl_A0_im(int32_t val)
364 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
366 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
371 static inline void gen_op_addq_A0_im(int64_t val)
373 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
377 static void gen_add_A0_im(DisasContext *s, int val)
381 gen_op_addq_A0_im(val);
384 gen_op_addl_A0_im(val);
387 static inline void gen_op_addl_T0_T1(void)
389 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
392 static inline void gen_op_jmp_T0(void)
394 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
397 static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
401 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
402 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
403 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
406 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
407 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
409 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
411 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
415 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
416 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
417 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
423 static inline void gen_op_add_reg_T0(int size, int reg)
427 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
428 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
429 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
432 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
433 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
435 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
437 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
441 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
442 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
443 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
449 static inline void gen_op_set_cc_op(int32_t val)
451 tcg_gen_movi_i32(cpu_cc_op, val);
454 static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
456 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
458 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
459 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
461 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
465 static inline void gen_op_movl_A0_seg(int reg)
467 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
470 static inline void gen_op_addl_A0_seg(int reg)
472 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
473 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
475 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
480 static inline void gen_op_movq_A0_seg(int reg)
482 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
485 static inline void gen_op_addq_A0_seg(int reg)
487 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
488 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
491 static inline void gen_op_movq_A0_reg(int reg)
493 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
496 static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
498 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
500 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
501 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
505 static inline void gen_op_lds_T0_A0(int idx)
507 int mem_index = (idx >> 2) - 1;
510 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
513 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
517 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
522 static inline void gen_op_ld_v(int idx, TCGv t0, TCGv a0)
524 int mem_index = (idx >> 2) - 1;
527 tcg_gen_qemu_ld8u(t0, a0, mem_index);
530 tcg_gen_qemu_ld16u(t0, a0, mem_index);
533 tcg_gen_qemu_ld32u(t0, a0, mem_index);
537 tcg_gen_qemu_ld64(t0, a0, mem_index);
542 /* XXX: always use ldu or lds */
543 static inline void gen_op_ld_T0_A0(int idx)
545 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
548 static inline void gen_op_ldu_T0_A0(int idx)
550 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
553 static inline void gen_op_ld_T1_A0(int idx)
555 gen_op_ld_v(idx, cpu_T[1], cpu_A0);
558 static inline void gen_op_st_v(int idx, TCGv t0, TCGv a0)
560 int mem_index = (idx >> 2) - 1;
563 tcg_gen_qemu_st8(t0, a0, mem_index);
566 tcg_gen_qemu_st16(t0, a0, mem_index);
569 tcg_gen_qemu_st32(t0, a0, mem_index);
573 tcg_gen_qemu_st64(t0, a0, mem_index);
578 static inline void gen_op_st_T0_A0(int idx)
580 gen_op_st_v(idx, cpu_T[0], cpu_A0);
583 static inline void gen_op_st_T1_A0(int idx)
585 gen_op_st_v(idx, cpu_T[1], cpu_A0);
588 static inline void gen_jmp_im(target_ulong pc)
590 tcg_gen_movi_tl(cpu_tmp0, pc);
591 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
594 static inline void gen_string_movl_A0_ESI(DisasContext *s)
598 override = s->override;
602 gen_op_movq_A0_seg(override);
603 gen_op_addq_A0_reg_sN(0, R_ESI);
605 gen_op_movq_A0_reg(R_ESI);
611 if (s->addseg && override < 0)
614 gen_op_movl_A0_seg(override);
615 gen_op_addl_A0_reg_sN(0, R_ESI);
617 gen_op_movl_A0_reg(R_ESI);
620 /* 16 address, always override */
623 gen_op_movl_A0_reg(R_ESI);
624 gen_op_andl_A0_ffff();
625 gen_op_addl_A0_seg(override);
629 static inline void gen_string_movl_A0_EDI(DisasContext *s)
633 gen_op_movq_A0_reg(R_EDI);
638 gen_op_movl_A0_seg(R_ES);
639 gen_op_addl_A0_reg_sN(0, R_EDI);
641 gen_op_movl_A0_reg(R_EDI);
644 gen_op_movl_A0_reg(R_EDI);
645 gen_op_andl_A0_ffff();
646 gen_op_addl_A0_seg(R_ES);
650 static inline void gen_op_movl_T0_Dshift(int ot)
652 tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
653 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
656 static void gen_extu(int ot, TCGv reg)
660 tcg_gen_ext8u_tl(reg, reg);
663 tcg_gen_ext16u_tl(reg, reg);
666 tcg_gen_ext32u_tl(reg, reg);
673 static void gen_exts(int ot, TCGv reg)
677 tcg_gen_ext8s_tl(reg, reg);
680 tcg_gen_ext16s_tl(reg, reg);
683 tcg_gen_ext32s_tl(reg, reg);
690 static inline void gen_op_jnz_ecx(int size, int label1)
692 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
693 gen_extu(size + 1, cpu_tmp0);
694 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, label1);
697 static inline void gen_op_jz_ecx(int size, int label1)
699 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
700 gen_extu(size + 1, cpu_tmp0);
701 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
704 static void *helper_in_func[3] = {
710 static void *helper_out_func[3] = {
716 static void *gen_check_io_func[3] = {
722 static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
726 target_ulong next_eip;
729 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
730 if (s->cc_op != CC_OP_DYNAMIC)
731 gen_op_set_cc_op(s->cc_op);
734 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
735 tcg_gen_helper_0_1(gen_check_io_func[ot],
738 if(s->flags & HF_SVMI_MASK) {
740 if (s->cc_op != CC_OP_DYNAMIC)
741 gen_op_set_cc_op(s->cc_op);
745 svm_flags |= (1 << (4 + ot));
746 next_eip = s->pc - s->cs_base;
747 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
748 tcg_gen_helper_0_3(helper_svm_check_io,
750 tcg_const_i32(svm_flags),
751 tcg_const_i32(next_eip - cur_eip));
755 static inline void gen_movs(DisasContext *s, int ot)
757 gen_string_movl_A0_ESI(s);
758 gen_op_ld_T0_A0(ot + s->mem_index);
759 gen_string_movl_A0_EDI(s);
760 gen_op_st_T0_A0(ot + s->mem_index);
761 gen_op_movl_T0_Dshift(ot);
762 gen_op_add_reg_T0(s->aflag, R_ESI);
763 gen_op_add_reg_T0(s->aflag, R_EDI);
766 static inline void gen_update_cc_op(DisasContext *s)
768 if (s->cc_op != CC_OP_DYNAMIC) {
769 gen_op_set_cc_op(s->cc_op);
770 s->cc_op = CC_OP_DYNAMIC;
774 static void gen_op_update1_cc(void)
776 tcg_gen_discard_tl(cpu_cc_src);
777 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
780 static void gen_op_update2_cc(void)
782 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
783 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
786 static inline void gen_op_cmpl_T0_T1_cc(void)
788 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
789 tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
792 static inline void gen_op_testl_T0_T1_cc(void)
794 tcg_gen_discard_tl(cpu_cc_src);
795 tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
798 static void gen_op_update_neg_cc(void)
800 tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
801 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
804 /* compute eflags.C to reg */
805 static void gen_compute_eflags_c(TCGv reg)
807 #if TCG_TARGET_REG_BITS == 32
808 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
809 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
810 (long)cc_table + offsetof(CCTable, compute_c));
811 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
812 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
813 1, &cpu_tmp2_i32, 0, NULL);
815 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
816 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
817 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
818 (long)cc_table + offsetof(CCTable, compute_c));
819 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
820 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
821 1, &cpu_tmp2_i32, 0, NULL);
823 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
826 /* compute all eflags to cc_src */
827 static void gen_compute_eflags(TCGv reg)
829 #if TCG_TARGET_REG_BITS == 32
830 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
831 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
832 (long)cc_table + offsetof(CCTable, compute_all));
833 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
834 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
835 1, &cpu_tmp2_i32, 0, NULL);
837 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
838 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
839 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
840 (long)cc_table + offsetof(CCTable, compute_all));
841 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
842 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
843 1, &cpu_tmp2_i32, 0, NULL);
845 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
848 static inline void gen_setcc_slow_T0(DisasContext *s, int jcc_op)
850 if (s->cc_op != CC_OP_DYNAMIC)
851 gen_op_set_cc_op(s->cc_op);
854 gen_compute_eflags(cpu_T[0]);
855 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 11);
856 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
859 gen_compute_eflags_c(cpu_T[0]);
862 gen_compute_eflags(cpu_T[0]);
863 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 6);
864 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
867 gen_compute_eflags(cpu_tmp0);
868 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 6);
869 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
870 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
873 gen_compute_eflags(cpu_T[0]);
874 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 7);
875 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
878 gen_compute_eflags(cpu_T[0]);
879 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 2);
880 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
883 gen_compute_eflags(cpu_tmp0);
884 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
885 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 7); /* CC_S */
886 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
887 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
891 gen_compute_eflags(cpu_tmp0);
892 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
893 tcg_gen_shri_tl(cpu_tmp4, cpu_tmp0, 7); /* CC_S */
894 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 6); /* CC_Z */
895 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
896 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
897 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
902 /* return true if setcc_slow is not needed (WARNING: must be kept in
903 sync with gen_jcc1) */
904 static int is_fast_jcc_case(DisasContext *s, int b)
907 jcc_op = (b >> 1) & 7;
909 /* we optimize the cmp/jcc case */
914 if (jcc_op == JCC_O || jcc_op == JCC_P)
918 /* some jumps are easy to compute */
943 if (jcc_op != JCC_Z && jcc_op != JCC_S)
953 /* generate a conditional jump to label 'l1' according to jump opcode
954 value 'b'. In the fast case, T0 is guaranted not to be used. */
955 static inline void gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
957 int inv, jcc_op, size, cond;
961 jcc_op = (b >> 1) & 7;
964 /* we optimize the cmp/jcc case */
970 size = cc_op - CC_OP_SUBB;
976 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xff);
980 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffff);
985 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffffffff);
993 tcg_gen_brcondi_tl(inv ? TCG_COND_NE : TCG_COND_EQ, t0, 0, l1);
999 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80);
1000 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1004 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x8000);
1005 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1008 #ifdef TARGET_X86_64
1010 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80000000);
1011 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1016 tcg_gen_brcondi_tl(inv ? TCG_COND_GE : TCG_COND_LT, cpu_cc_dst,
1023 cond = inv ? TCG_COND_GEU : TCG_COND_LTU;
1026 cond = inv ? TCG_COND_GTU : TCG_COND_LEU;
1028 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1032 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xff);
1033 tcg_gen_andi_tl(t0, cpu_cc_src, 0xff);
1037 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffff);
1038 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffff);
1040 #ifdef TARGET_X86_64
1043 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffffffff);
1044 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffffffff);
1051 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1055 cond = inv ? TCG_COND_GE : TCG_COND_LT;
1058 cond = inv ? TCG_COND_GT : TCG_COND_LE;
1060 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1064 tcg_gen_ext8s_tl(cpu_tmp4, cpu_tmp4);
1065 tcg_gen_ext8s_tl(t0, cpu_cc_src);
1069 tcg_gen_ext16s_tl(cpu_tmp4, cpu_tmp4);
1070 tcg_gen_ext16s_tl(t0, cpu_cc_src);
1072 #ifdef TARGET_X86_64
1075 tcg_gen_ext32s_tl(cpu_tmp4, cpu_tmp4);
1076 tcg_gen_ext32s_tl(t0, cpu_cc_src);
1083 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1091 /* some jumps are easy to compute */
1133 size = (cc_op - CC_OP_ADDB) & 3;
1136 size = (cc_op - CC_OP_ADDB) & 3;
1144 gen_setcc_slow_T0(s, jcc_op);
1145 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE,
1151 /* XXX: does not work with gdbstub "ice" single step - not a
1153 static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1157 l1 = gen_new_label();
1158 l2 = gen_new_label();
1159 gen_op_jnz_ecx(s->aflag, l1);
1161 gen_jmp_tb(s, next_eip, 1);
1166 static inline void gen_stos(DisasContext *s, int ot)
1168 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1169 gen_string_movl_A0_EDI(s);
1170 gen_op_st_T0_A0(ot + s->mem_index);
1171 gen_op_movl_T0_Dshift(ot);
1172 gen_op_add_reg_T0(s->aflag, R_EDI);
1175 static inline void gen_lods(DisasContext *s, int ot)
1177 gen_string_movl_A0_ESI(s);
1178 gen_op_ld_T0_A0(ot + s->mem_index);
1179 gen_op_mov_reg_T0(ot, R_EAX);
1180 gen_op_movl_T0_Dshift(ot);
1181 gen_op_add_reg_T0(s->aflag, R_ESI);
1184 static inline void gen_scas(DisasContext *s, int ot)
1186 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1187 gen_string_movl_A0_EDI(s);
1188 gen_op_ld_T1_A0(ot + s->mem_index);
1189 gen_op_cmpl_T0_T1_cc();
1190 gen_op_movl_T0_Dshift(ot);
1191 gen_op_add_reg_T0(s->aflag, R_EDI);
1194 static inline void gen_cmps(DisasContext *s, int ot)
1196 gen_string_movl_A0_ESI(s);
1197 gen_op_ld_T0_A0(ot + s->mem_index);
1198 gen_string_movl_A0_EDI(s);
1199 gen_op_ld_T1_A0(ot + s->mem_index);
1200 gen_op_cmpl_T0_T1_cc();
1201 gen_op_movl_T0_Dshift(ot);
1202 gen_op_add_reg_T0(s->aflag, R_ESI);
1203 gen_op_add_reg_T0(s->aflag, R_EDI);
1206 static inline void gen_ins(DisasContext *s, int ot)
1210 gen_string_movl_A0_EDI(s);
1211 /* Note: we must do this dummy write first to be restartable in
1212 case of page fault. */
1214 gen_op_st_T0_A0(ot + s->mem_index);
1215 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1216 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1217 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1218 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
1219 gen_op_st_T0_A0(ot + s->mem_index);
1220 gen_op_movl_T0_Dshift(ot);
1221 gen_op_add_reg_T0(s->aflag, R_EDI);
1226 static inline void gen_outs(DisasContext *s, int ot)
1230 gen_string_movl_A0_ESI(s);
1231 gen_op_ld_T0_A0(ot + s->mem_index);
1233 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1234 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1235 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1236 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
1237 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
1239 gen_op_movl_T0_Dshift(ot);
1240 gen_op_add_reg_T0(s->aflag, R_ESI);
1245 /* same method as Valgrind : we generate jumps to current or next
1247 #define GEN_REPZ(op) \
1248 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1249 target_ulong cur_eip, target_ulong next_eip) \
1252 gen_update_cc_op(s); \
1253 l2 = gen_jz_ecx_string(s, next_eip); \
1254 gen_ ## op(s, ot); \
1255 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1256 /* a loop would cause two single step exceptions if ECX = 1 \
1257 before rep string_insn */ \
1259 gen_op_jz_ecx(s->aflag, l2); \
1260 gen_jmp(s, cur_eip); \
1263 #define GEN_REPZ2(op) \
1264 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1265 target_ulong cur_eip, \
1266 target_ulong next_eip, \
1270 gen_update_cc_op(s); \
1271 l2 = gen_jz_ecx_string(s, next_eip); \
1272 gen_ ## op(s, ot); \
1273 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1274 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1275 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1277 gen_op_jz_ecx(s->aflag, l2); \
1278 gen_jmp(s, cur_eip); \
1289 static void *helper_fp_arith_ST0_FT0[8] = {
1290 helper_fadd_ST0_FT0,
1291 helper_fmul_ST0_FT0,
1292 helper_fcom_ST0_FT0,
1293 helper_fcom_ST0_FT0,
1294 helper_fsub_ST0_FT0,
1295 helper_fsubr_ST0_FT0,
1296 helper_fdiv_ST0_FT0,
1297 helper_fdivr_ST0_FT0,
1300 /* NOTE the exception in "r" op ordering */
1301 static void *helper_fp_arith_STN_ST0[8] = {
1302 helper_fadd_STN_ST0,
1303 helper_fmul_STN_ST0,
1306 helper_fsubr_STN_ST0,
1307 helper_fsub_STN_ST0,
1308 helper_fdivr_STN_ST0,
1309 helper_fdiv_STN_ST0,
1312 /* if d == OR_TMP0, it means memory operand (address in A0) */
1313 static void gen_op(DisasContext *s1, int op, int ot, int d)
1316 gen_op_mov_TN_reg(ot, 0, d);
1318 gen_op_ld_T0_A0(ot + s1->mem_index);
1322 if (s1->cc_op != CC_OP_DYNAMIC)
1323 gen_op_set_cc_op(s1->cc_op);
1324 gen_compute_eflags_c(cpu_tmp4);
1325 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1326 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1328 gen_op_mov_reg_T0(ot, d);
1330 gen_op_st_T0_A0(ot + s1->mem_index);
1331 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1332 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1333 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1334 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1335 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1336 s1->cc_op = CC_OP_DYNAMIC;
1339 if (s1->cc_op != CC_OP_DYNAMIC)
1340 gen_op_set_cc_op(s1->cc_op);
1341 gen_compute_eflags_c(cpu_tmp4);
1342 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1343 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1345 gen_op_mov_reg_T0(ot, d);
1347 gen_op_st_T0_A0(ot + s1->mem_index);
1348 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1349 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1350 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1351 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1352 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1353 s1->cc_op = CC_OP_DYNAMIC;
1356 gen_op_addl_T0_T1();
1358 gen_op_mov_reg_T0(ot, d);
1360 gen_op_st_T0_A0(ot + s1->mem_index);
1361 gen_op_update2_cc();
1362 s1->cc_op = CC_OP_ADDB + ot;
1365 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1367 gen_op_mov_reg_T0(ot, d);
1369 gen_op_st_T0_A0(ot + s1->mem_index);
1370 gen_op_update2_cc();
1371 s1->cc_op = CC_OP_SUBB + ot;
1375 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1377 gen_op_mov_reg_T0(ot, d);
1379 gen_op_st_T0_A0(ot + s1->mem_index);
1380 gen_op_update1_cc();
1381 s1->cc_op = CC_OP_LOGICB + ot;
1384 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1386 gen_op_mov_reg_T0(ot, d);
1388 gen_op_st_T0_A0(ot + s1->mem_index);
1389 gen_op_update1_cc();
1390 s1->cc_op = CC_OP_LOGICB + ot;
1393 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1395 gen_op_mov_reg_T0(ot, d);
1397 gen_op_st_T0_A0(ot + s1->mem_index);
1398 gen_op_update1_cc();
1399 s1->cc_op = CC_OP_LOGICB + ot;
1402 gen_op_cmpl_T0_T1_cc();
1403 s1->cc_op = CC_OP_SUBB + ot;
1408 /* if d == OR_TMP0, it means memory operand (address in A0) */
1409 static void gen_inc(DisasContext *s1, int ot, int d, int c)
1412 gen_op_mov_TN_reg(ot, 0, d);
1414 gen_op_ld_T0_A0(ot + s1->mem_index);
1415 if (s1->cc_op != CC_OP_DYNAMIC)
1416 gen_op_set_cc_op(s1->cc_op);
1418 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1419 s1->cc_op = CC_OP_INCB + ot;
1421 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1422 s1->cc_op = CC_OP_DECB + ot;
1425 gen_op_mov_reg_T0(ot, d);
1427 gen_op_st_T0_A0(ot + s1->mem_index);
1428 gen_compute_eflags_c(cpu_cc_src);
1429 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1432 static void gen_shift_rm_T1(DisasContext *s, int ot, int op1,
1433 int is_right, int is_arith)
1446 gen_op_ld_T0_A0(ot + s->mem_index);
1448 gen_op_mov_TN_reg(ot, 0, op1);
1450 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1452 tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1456 gen_exts(ot, cpu_T[0]);
1457 tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1458 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1460 gen_extu(ot, cpu_T[0]);
1461 tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1462 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1465 tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1466 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1471 gen_op_st_T0_A0(ot + s->mem_index);
1473 gen_op_mov_reg_T0(ot, op1);
1475 /* update eflags if non zero shift */
1476 if (s->cc_op != CC_OP_DYNAMIC)
1477 gen_op_set_cc_op(s->cc_op);
1479 /* XXX: inefficient */
1480 t0 = tcg_temp_local_new(TCG_TYPE_TL);
1481 t1 = tcg_temp_local_new(TCG_TYPE_TL);
1483 tcg_gen_mov_tl(t0, cpu_T[0]);
1484 tcg_gen_mov_tl(t1, cpu_T3);
1486 shift_label = gen_new_label();
1487 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_T[1], 0, shift_label);
1489 tcg_gen_mov_tl(cpu_cc_src, t1);
1490 tcg_gen_mov_tl(cpu_cc_dst, t0);
1492 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1494 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1496 gen_set_label(shift_label);
1497 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1503 static void gen_shift_rm_im(DisasContext *s, int ot, int op1, int op2,
1504 int is_right, int is_arith)
1515 gen_op_ld_T0_A0(ot + s->mem_index);
1517 gen_op_mov_TN_reg(ot, 0, op1);
1523 gen_exts(ot, cpu_T[0]);
1524 tcg_gen_sari_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1525 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], op2);
1527 gen_extu(ot, cpu_T[0]);
1528 tcg_gen_shri_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1529 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], op2);
1532 tcg_gen_shli_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1533 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], op2);
1539 gen_op_st_T0_A0(ot + s->mem_index);
1541 gen_op_mov_reg_T0(ot, op1);
1543 /* update eflags if non zero shift */
1545 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
1546 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1548 s->cc_op = CC_OP_SARB + ot;
1550 s->cc_op = CC_OP_SHLB + ot;
1554 static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1557 tcg_gen_shli_tl(ret, arg1, arg2);
1559 tcg_gen_shri_tl(ret, arg1, -arg2);
1562 /* XXX: add faster immediate case */
1563 static void gen_rot_rm_T1(DisasContext *s, int ot, int op1,
1567 int label1, label2, data_bits;
1568 TCGv t0, t1, t2, a0;
1570 /* XXX: inefficient, but we must use local temps */
1571 t0 = tcg_temp_local_new(TCG_TYPE_TL);
1572 t1 = tcg_temp_local_new(TCG_TYPE_TL);
1573 t2 = tcg_temp_local_new(TCG_TYPE_TL);
1574 a0 = tcg_temp_local_new(TCG_TYPE_TL);
1582 if (op1 == OR_TMP0) {
1583 tcg_gen_mov_tl(a0, cpu_A0);
1584 gen_op_ld_v(ot + s->mem_index, t0, a0);
1586 gen_op_mov_v_reg(ot, t0, op1);
1589 tcg_gen_mov_tl(t1, cpu_T[1]);
1591 tcg_gen_andi_tl(t1, t1, mask);
1593 /* Must test zero case to avoid using undefined behaviour in TCG
1595 label1 = gen_new_label();
1596 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label1);
1599 tcg_gen_andi_tl(cpu_tmp0, t1, (1 << (3 + ot)) - 1);
1601 tcg_gen_mov_tl(cpu_tmp0, t1);
1604 tcg_gen_mov_tl(t2, t0);
1606 data_bits = 8 << ot;
1607 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
1608 fix TCG definition) */
1610 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp0);
1611 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1612 tcg_gen_shl_tl(t0, t0, cpu_tmp0);
1614 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp0);
1615 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1616 tcg_gen_shr_tl(t0, t0, cpu_tmp0);
1618 tcg_gen_or_tl(t0, t0, cpu_tmp4);
1620 gen_set_label(label1);
1622 if (op1 == OR_TMP0) {
1623 gen_op_st_v(ot + s->mem_index, t0, a0);
1625 gen_op_mov_reg_v(ot, op1, t0);
1629 if (s->cc_op != CC_OP_DYNAMIC)
1630 gen_op_set_cc_op(s->cc_op);
1632 label2 = gen_new_label();
1633 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label2);
1635 gen_compute_eflags(cpu_cc_src);
1636 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
1637 tcg_gen_xor_tl(cpu_tmp0, t2, t0);
1638 tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
1639 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
1640 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
1642 tcg_gen_shri_tl(t0, t0, data_bits - 1);
1644 tcg_gen_andi_tl(t0, t0, CC_C);
1645 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
1647 tcg_gen_discard_tl(cpu_cc_dst);
1648 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1650 gen_set_label(label2);
1651 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1659 static void *helper_rotc[8] = {
1663 X86_64_ONLY(helper_rclq),
1667 X86_64_ONLY(helper_rcrq),
1670 /* XXX: add faster immediate = 1 case */
1671 static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1,
1676 if (s->cc_op != CC_OP_DYNAMIC)
1677 gen_op_set_cc_op(s->cc_op);
1681 gen_op_ld_T0_A0(ot + s->mem_index);
1683 gen_op_mov_TN_reg(ot, 0, op1);
1685 tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
1686 cpu_T[0], cpu_T[0], cpu_T[1]);
1689 gen_op_st_T0_A0(ot + s->mem_index);
1691 gen_op_mov_reg_T0(ot, op1);
1694 label1 = gen_new_label();
1695 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_tmp, -1, label1);
1697 tcg_gen_mov_tl(cpu_cc_src, cpu_cc_tmp);
1698 tcg_gen_discard_tl(cpu_cc_dst);
1699 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1701 gen_set_label(label1);
1702 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1705 /* XXX: add faster immediate case */
1706 static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1,
1709 int label1, label2, data_bits;
1711 TCGv t0, t1, t2, a0;
1713 t0 = tcg_temp_local_new(TCG_TYPE_TL);
1714 t1 = tcg_temp_local_new(TCG_TYPE_TL);
1715 t2 = tcg_temp_local_new(TCG_TYPE_TL);
1716 a0 = tcg_temp_local_new(TCG_TYPE_TL);
1724 if (op1 == OR_TMP0) {
1725 tcg_gen_mov_tl(a0, cpu_A0);
1726 gen_op_ld_v(ot + s->mem_index, t0, a0);
1728 gen_op_mov_v_reg(ot, t0, op1);
1731 tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
1733 tcg_gen_mov_tl(t1, cpu_T[1]);
1734 tcg_gen_mov_tl(t2, cpu_T3);
1736 /* Must test zero case to avoid using undefined behaviour in TCG
1738 label1 = gen_new_label();
1739 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
1741 tcg_gen_addi_tl(cpu_tmp5, t2, -1);
1742 if (ot == OT_WORD) {
1743 /* Note: we implement the Intel behaviour for shift count > 16 */
1745 tcg_gen_andi_tl(t0, t0, 0xffff);
1746 tcg_gen_shli_tl(cpu_tmp0, t1, 16);
1747 tcg_gen_or_tl(t0, t0, cpu_tmp0);
1748 tcg_gen_ext32u_tl(t0, t0);
1750 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
1752 /* only needed if count > 16, but a test would complicate */
1753 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
1754 tcg_gen_shl_tl(cpu_tmp0, t0, cpu_tmp5);
1756 tcg_gen_shr_tl(t0, t0, t2);
1758 tcg_gen_or_tl(t0, t0, cpu_tmp0);
1760 /* XXX: not optimal */
1761 tcg_gen_andi_tl(t0, t0, 0xffff);
1762 tcg_gen_shli_tl(t1, t1, 16);
1763 tcg_gen_or_tl(t1, t1, t0);
1764 tcg_gen_ext32u_tl(t1, t1);
1766 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
1767 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
1768 tcg_gen_shr_tl(cpu_tmp6, t1, cpu_tmp0);
1769 tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
1771 tcg_gen_shl_tl(t0, t0, t2);
1772 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
1773 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
1774 tcg_gen_or_tl(t0, t0, t1);
1777 data_bits = 8 << ot;
1780 tcg_gen_ext32u_tl(t0, t0);
1782 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
1784 tcg_gen_shr_tl(t0, t0, t2);
1785 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
1786 tcg_gen_shl_tl(t1, t1, cpu_tmp5);
1787 tcg_gen_or_tl(t0, t0, t1);
1791 tcg_gen_ext32u_tl(t1, t1);
1793 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
1795 tcg_gen_shl_tl(t0, t0, t2);
1796 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
1797 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
1798 tcg_gen_or_tl(t0, t0, t1);
1801 tcg_gen_mov_tl(t1, cpu_tmp4);
1803 gen_set_label(label1);
1805 if (op1 == OR_TMP0) {
1806 gen_op_st_v(ot + s->mem_index, t0, a0);
1808 gen_op_mov_reg_v(ot, op1, t0);
1812 if (s->cc_op != CC_OP_DYNAMIC)
1813 gen_op_set_cc_op(s->cc_op);
1815 label2 = gen_new_label();
1816 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label2);
1818 tcg_gen_mov_tl(cpu_cc_src, t1);
1819 tcg_gen_mov_tl(cpu_cc_dst, t0);
1821 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1823 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1825 gen_set_label(label2);
1826 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1834 static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1837 gen_op_mov_TN_reg(ot, 1, s);
1840 gen_rot_rm_T1(s1, ot, d, 0);
1843 gen_rot_rm_T1(s1, ot, d, 1);
1847 gen_shift_rm_T1(s1, ot, d, 0, 0);
1850 gen_shift_rm_T1(s1, ot, d, 1, 0);
1853 gen_shift_rm_T1(s1, ot, d, 1, 1);
1856 gen_rotc_rm_T1(s1, ot, d, 0);
1859 gen_rotc_rm_T1(s1, ot, d, 1);
1864 static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1869 gen_shift_rm_im(s1, ot, d, c, 0, 0);
1872 gen_shift_rm_im(s1, ot, d, c, 1, 0);
1875 gen_shift_rm_im(s1, ot, d, c, 1, 1);
1878 /* currently not optimized */
1879 gen_op_movl_T1_im(c);
1880 gen_shift(s1, op, ot, d, OR_TMP1);
1885 static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1893 int mod, rm, code, override, must_add_seg;
1895 override = s->override;
1896 must_add_seg = s->addseg;
1899 mod = (modrm >> 6) & 3;
1911 code = ldub_code(s->pc++);
1912 scale = (code >> 6) & 3;
1913 index = ((code >> 3) & 7) | REX_X(s);
1920 if ((base & 7) == 5) {
1922 disp = (int32_t)ldl_code(s->pc);
1924 if (CODE64(s) && !havesib) {
1925 disp += s->pc + s->rip_offset;
1932 disp = (int8_t)ldub_code(s->pc++);
1936 disp = ldl_code(s->pc);
1942 /* for correct popl handling with esp */
1943 if (base == 4 && s->popl_esp_hack)
1944 disp += s->popl_esp_hack;
1945 #ifdef TARGET_X86_64
1946 if (s->aflag == 2) {
1947 gen_op_movq_A0_reg(base);
1949 gen_op_addq_A0_im(disp);
1954 gen_op_movl_A0_reg(base);
1956 gen_op_addl_A0_im(disp);
1959 #ifdef TARGET_X86_64
1960 if (s->aflag == 2) {
1961 gen_op_movq_A0_im(disp);
1965 gen_op_movl_A0_im(disp);
1968 /* XXX: index == 4 is always invalid */
1969 if (havesib && (index != 4 || scale != 0)) {
1970 #ifdef TARGET_X86_64
1971 if (s->aflag == 2) {
1972 gen_op_addq_A0_reg_sN(scale, index);
1976 gen_op_addl_A0_reg_sN(scale, index);
1981 if (base == R_EBP || base == R_ESP)
1986 #ifdef TARGET_X86_64
1987 if (s->aflag == 2) {
1988 gen_op_addq_A0_seg(override);
1992 gen_op_addl_A0_seg(override);
1999 disp = lduw_code(s->pc);
2001 gen_op_movl_A0_im(disp);
2002 rm = 0; /* avoid SS override */
2009 disp = (int8_t)ldub_code(s->pc++);
2013 disp = lduw_code(s->pc);
2019 gen_op_movl_A0_reg(R_EBX);
2020 gen_op_addl_A0_reg_sN(0, R_ESI);
2023 gen_op_movl_A0_reg(R_EBX);
2024 gen_op_addl_A0_reg_sN(0, R_EDI);
2027 gen_op_movl_A0_reg(R_EBP);
2028 gen_op_addl_A0_reg_sN(0, R_ESI);
2031 gen_op_movl_A0_reg(R_EBP);
2032 gen_op_addl_A0_reg_sN(0, R_EDI);
2035 gen_op_movl_A0_reg(R_ESI);
2038 gen_op_movl_A0_reg(R_EDI);
2041 gen_op_movl_A0_reg(R_EBP);
2045 gen_op_movl_A0_reg(R_EBX);
2049 gen_op_addl_A0_im(disp);
2050 gen_op_andl_A0_ffff();
2054 if (rm == 2 || rm == 3 || rm == 6)
2059 gen_op_addl_A0_seg(override);
2069 static void gen_nop_modrm(DisasContext *s, int modrm)
2071 int mod, rm, base, code;
2073 mod = (modrm >> 6) & 3;
2083 code = ldub_code(s->pc++);
2119 /* used for LEA and MOV AX, mem */
2120 static void gen_add_A0_ds_seg(DisasContext *s)
2122 int override, must_add_seg;
2123 must_add_seg = s->addseg;
2125 if (s->override >= 0) {
2126 override = s->override;
2132 #ifdef TARGET_X86_64
2134 gen_op_addq_A0_seg(override);
2138 gen_op_addl_A0_seg(override);
2143 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg ==
2145 static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
2147 int mod, rm, opreg, disp;
2149 mod = (modrm >> 6) & 3;
2150 rm = (modrm & 7) | REX_B(s);
2154 gen_op_mov_TN_reg(ot, 0, reg);
2155 gen_op_mov_reg_T0(ot, rm);
2157 gen_op_mov_TN_reg(ot, 0, rm);
2159 gen_op_mov_reg_T0(ot, reg);
2162 gen_lea_modrm(s, modrm, &opreg, &disp);
2165 gen_op_mov_TN_reg(ot, 0, reg);
2166 gen_op_st_T0_A0(ot + s->mem_index);
2168 gen_op_ld_T0_A0(ot + s->mem_index);
2170 gen_op_mov_reg_T0(ot, reg);
2175 static inline uint32_t insn_get(DisasContext *s, int ot)
2181 ret = ldub_code(s->pc);
2185 ret = lduw_code(s->pc);
2190 ret = ldl_code(s->pc);
2197 static inline int insn_const_size(unsigned int ot)
2205 static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2207 TranslationBlock *tb;
2210 pc = s->cs_base + eip;
2212 /* NOTE: we handle the case where the TB spans two pages here */
2213 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2214 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
2215 /* jump to same page: we can use a direct jump */
2216 tcg_gen_goto_tb(tb_num);
2218 tcg_gen_exit_tb((long)tb + tb_num);
2220 /* jump to another page: currently not optimized */
2226 static inline void gen_jcc(DisasContext *s, int b,
2227 target_ulong val, target_ulong next_eip)
2232 if (s->cc_op != CC_OP_DYNAMIC) {
2233 gen_op_set_cc_op(s->cc_op);
2234 s->cc_op = CC_OP_DYNAMIC;
2237 l1 = gen_new_label();
2238 gen_jcc1(s, cc_op, b, l1);
2240 gen_goto_tb(s, 0, next_eip);
2243 gen_goto_tb(s, 1, val);
2247 l1 = gen_new_label();
2248 l2 = gen_new_label();
2249 gen_jcc1(s, cc_op, b, l1);
2251 gen_jmp_im(next_eip);
2261 static void gen_setcc(DisasContext *s, int b)
2263 int inv, jcc_op, l1;
2266 if (is_fast_jcc_case(s, b)) {
2267 /* nominal case: we use a jump */
2268 /* XXX: make it faster by adding new instructions in TCG */
2269 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2270 tcg_gen_movi_tl(t0, 0);
2271 l1 = gen_new_label();
2272 gen_jcc1(s, s->cc_op, b ^ 1, l1);
2273 tcg_gen_movi_tl(t0, 1);
2275 tcg_gen_mov_tl(cpu_T[0], t0);
2278 /* slow case: it is more efficient not to generate a jump,
2279 although it is questionnable whether this optimization is
2282 jcc_op = (b >> 1) & 7;
2283 gen_setcc_slow_T0(s, jcc_op);
2285 tcg_gen_xori_tl(cpu_T[0], cpu_T[0], 1);
2290 static inline void gen_op_movl_T0_seg(int seg_reg)
2292 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2293 offsetof(CPUX86State,segs[seg_reg].selector));
2296 static inline void gen_op_movl_seg_T0_vm(int seg_reg)
2298 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
2299 tcg_gen_st32_tl(cpu_T[0], cpu_env,
2300 offsetof(CPUX86State,segs[seg_reg].selector));
2301 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], 4);
2302 tcg_gen_st_tl(cpu_T[0], cpu_env,
2303 offsetof(CPUX86State,segs[seg_reg].base));
2306 /* move T0 to seg_reg and compute if the CPU state may change. Never
2307 call this function with seg_reg == R_CS */
2308 static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2310 if (s->pe && !s->vm86) {
2311 /* XXX: optimize by finding processor state dynamically */
2312 if (s->cc_op != CC_OP_DYNAMIC)
2313 gen_op_set_cc_op(s->cc_op);
2314 gen_jmp_im(cur_eip);
2315 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2316 tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2317 /* abort translation because the addseg value may change or
2318 because ss32 may change. For R_SS, translation must always
2319 stop as a special handling must be done to disable hardware
2320 interrupts for the next instruction */
2321 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2324 gen_op_movl_seg_T0_vm(seg_reg);
2325 if (seg_reg == R_SS)
2330 static inline int svm_is_rep(int prefixes)
2332 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2336 gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2337 uint32_t type, uint64_t param)
2339 /* no SVM activated; fast case */
2340 if (likely(!(s->flags & HF_SVMI_MASK)))
2342 if (s->cc_op != CC_OP_DYNAMIC)
2343 gen_op_set_cc_op(s->cc_op);
2344 gen_jmp_im(pc_start - s->cs_base);
2345 tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2346 tcg_const_i32(type), tcg_const_i64(param));
2350 gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2352 gen_svm_check_intercept_param(s, pc_start, type, 0);
2355 static inline void gen_stack_update(DisasContext *s, int addend)
2357 #ifdef TARGET_X86_64
2359 gen_op_add_reg_im(2, R_ESP, addend);
2363 gen_op_add_reg_im(1, R_ESP, addend);
2365 gen_op_add_reg_im(0, R_ESP, addend);
2369 /* generate a push. It depends on ss32, addseg and dflag */
2370 static void gen_push_T0(DisasContext *s)
2372 #ifdef TARGET_X86_64
2374 gen_op_movq_A0_reg(R_ESP);
2376 gen_op_addq_A0_im(-8);
2377 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2379 gen_op_addq_A0_im(-2);
2380 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2382 gen_op_mov_reg_A0(2, R_ESP);
2386 gen_op_movl_A0_reg(R_ESP);
2388 gen_op_addl_A0_im(-2);
2390 gen_op_addl_A0_im(-4);
2393 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2394 gen_op_addl_A0_seg(R_SS);
2397 gen_op_andl_A0_ffff();
2398 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2399 gen_op_addl_A0_seg(R_SS);
2401 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2402 if (s->ss32 && !s->addseg)
2403 gen_op_mov_reg_A0(1, R_ESP);
2405 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2409 /* generate a push. It depends on ss32, addseg and dflag */
2410 /* slower version for T1, only used for call Ev */
2411 static void gen_push_T1(DisasContext *s)
2413 #ifdef TARGET_X86_64
2415 gen_op_movq_A0_reg(R_ESP);
2417 gen_op_addq_A0_im(-8);
2418 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2420 gen_op_addq_A0_im(-2);
2421 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2423 gen_op_mov_reg_A0(2, R_ESP);
2427 gen_op_movl_A0_reg(R_ESP);
2429 gen_op_addl_A0_im(-2);
2431 gen_op_addl_A0_im(-4);
2434 gen_op_addl_A0_seg(R_SS);
2437 gen_op_andl_A0_ffff();
2438 gen_op_addl_A0_seg(R_SS);
2440 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2442 if (s->ss32 && !s->addseg)
2443 gen_op_mov_reg_A0(1, R_ESP);
2445 gen_stack_update(s, (-2) << s->dflag);
2449 /* two step pop is necessary for precise exceptions */
2450 static void gen_pop_T0(DisasContext *s)
2452 #ifdef TARGET_X86_64
2454 gen_op_movq_A0_reg(R_ESP);
2455 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2459 gen_op_movl_A0_reg(R_ESP);
2462 gen_op_addl_A0_seg(R_SS);
2464 gen_op_andl_A0_ffff();
2465 gen_op_addl_A0_seg(R_SS);
2467 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2471 static void gen_pop_update(DisasContext *s)
2473 #ifdef TARGET_X86_64
2474 if (CODE64(s) && s->dflag) {
2475 gen_stack_update(s, 8);
2479 gen_stack_update(s, 2 << s->dflag);
2483 static void gen_stack_A0(DisasContext *s)
2485 gen_op_movl_A0_reg(R_ESP);
2487 gen_op_andl_A0_ffff();
2488 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2490 gen_op_addl_A0_seg(R_SS);
2493 /* NOTE: wrap around in 16 bit not fully handled */
2494 static void gen_pusha(DisasContext *s)
2497 gen_op_movl_A0_reg(R_ESP);
2498 gen_op_addl_A0_im(-16 << s->dflag);
2500 gen_op_andl_A0_ffff();
2501 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2503 gen_op_addl_A0_seg(R_SS);
2504 for(i = 0;i < 8; i++) {
2505 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2506 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2507 gen_op_addl_A0_im(2 << s->dflag);
2509 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2512 /* NOTE: wrap around in 16 bit not fully handled */
2513 static void gen_popa(DisasContext *s)
2516 gen_op_movl_A0_reg(R_ESP);
2518 gen_op_andl_A0_ffff();
2519 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2520 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 << s->dflag);
2522 gen_op_addl_A0_seg(R_SS);
2523 for(i = 0;i < 8; i++) {
2524 /* ESP is not reloaded */
2526 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2527 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2529 gen_op_addl_A0_im(2 << s->dflag);
2531 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2534 static void gen_enter(DisasContext *s, int esp_addend, int level)
2539 #ifdef TARGET_X86_64
2541 ot = s->dflag ? OT_QUAD : OT_WORD;
2544 gen_op_movl_A0_reg(R_ESP);
2545 gen_op_addq_A0_im(-opsize);
2546 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2549 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2550 gen_op_st_T0_A0(ot + s->mem_index);
2552 /* XXX: must save state */
2553 tcg_gen_helper_0_3(helper_enter64_level,
2554 tcg_const_i32(level),
2555 tcg_const_i32((ot == OT_QUAD)),
2558 gen_op_mov_reg_T1(ot, R_EBP);
2559 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2560 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2564 ot = s->dflag + OT_WORD;
2565 opsize = 2 << s->dflag;
2567 gen_op_movl_A0_reg(R_ESP);
2568 gen_op_addl_A0_im(-opsize);
2570 gen_op_andl_A0_ffff();
2571 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2573 gen_op_addl_A0_seg(R_SS);
2575 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2576 gen_op_st_T0_A0(ot + s->mem_index);
2578 /* XXX: must save state */
2579 tcg_gen_helper_0_3(helper_enter_level,
2580 tcg_const_i32(level),
2581 tcg_const_i32(s->dflag),
2584 gen_op_mov_reg_T1(ot, R_EBP);
2585 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2586 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2590 static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2592 if (s->cc_op != CC_OP_DYNAMIC)
2593 gen_op_set_cc_op(s->cc_op);
2594 gen_jmp_im(cur_eip);
2595 tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
2599 /* an interrupt is different from an exception because of the
2601 static void gen_interrupt(DisasContext *s, int intno,
2602 target_ulong cur_eip, target_ulong next_eip)
2604 if (s->cc_op != CC_OP_DYNAMIC)
2605 gen_op_set_cc_op(s->cc_op);
2606 gen_jmp_im(cur_eip);
2607 tcg_gen_helper_0_2(helper_raise_interrupt,
2608 tcg_const_i32(intno),
2609 tcg_const_i32(next_eip - cur_eip));
2613 static void gen_debug(DisasContext *s, target_ulong cur_eip)
2615 if (s->cc_op != CC_OP_DYNAMIC)
2616 gen_op_set_cc_op(s->cc_op);
2617 gen_jmp_im(cur_eip);
2618 tcg_gen_helper_0_0(helper_debug);
2622 /* generate a generic end of block. Trace exception is also generated
2624 static void gen_eob(DisasContext *s)
2626 if (s->cc_op != CC_OP_DYNAMIC)
2627 gen_op_set_cc_op(s->cc_op);
2628 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2629 tcg_gen_helper_0_0(helper_reset_inhibit_irq);
2631 if (s->singlestep_enabled) {
2632 tcg_gen_helper_0_0(helper_debug);
2634 tcg_gen_helper_0_0(helper_single_step);
2641 /* generate a jump to eip. No segment change must happen before as a
2642 direct call to the next block may occur */
2643 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2646 if (s->cc_op != CC_OP_DYNAMIC) {
2647 gen_op_set_cc_op(s->cc_op);
2648 s->cc_op = CC_OP_DYNAMIC;
2650 gen_goto_tb(s, tb_num, eip);
2658 static void gen_jmp(DisasContext *s, target_ulong eip)
2660 gen_jmp_tb(s, eip, 0);
2663 static inline void gen_ldq_env_A0(int idx, int offset)
2665 int mem_index = (idx >> 2) - 1;
2666 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2667 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
2670 static inline void gen_stq_env_A0(int idx, int offset)
2672 int mem_index = (idx >> 2) - 1;
2673 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
2674 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2677 static inline void gen_ldo_env_A0(int idx, int offset)
2679 int mem_index = (idx >> 2) - 1;
2680 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2681 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2682 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2683 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2684 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2687 static inline void gen_sto_env_A0(int idx, int offset)
2689 int mem_index = (idx >> 2) - 1;
2690 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2691 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2692 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2693 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2694 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2697 static inline void gen_op_movo(int d_offset, int s_offset)
2699 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2700 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2701 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
2702 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
2705 static inline void gen_op_movq(int d_offset, int s_offset)
2707 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2708 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2711 static inline void gen_op_movl(int d_offset, int s_offset)
2713 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
2714 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
2717 static inline void gen_op_movq_env_0(int d_offset)
2719 tcg_gen_movi_i64(cpu_tmp1_i64, 0);
2720 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2723 #define SSE_SPECIAL ((void *)1)
2724 #define SSE_DUMMY ((void *)2)
2726 #define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
2727 #define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
2728 helper_ ## x ## ss, helper_ ## x ## sd, }
2730 static void *sse_op_table1[256][4] = {
2731 /* 3DNow! extensions */
2732 [0x0e] = { SSE_DUMMY }, /* femms */
2733 [0x0f] = { SSE_DUMMY }, /* pf... */
2734 /* pure SSE operations */
2735 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2736 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2737 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2738 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2739 [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
2740 [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
2741 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2742 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2744 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2745 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2746 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2747 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
2748 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2749 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2750 [0x2e] = { helper_ucomiss, helper_ucomisd },
2751 [0x2f] = { helper_comiss, helper_comisd },
2752 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2753 [0x51] = SSE_FOP(sqrt),
2754 [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
2755 [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
2756 [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
2757 [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
2758 [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
2759 [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
2760 [0x58] = SSE_FOP(add),
2761 [0x59] = SSE_FOP(mul),
2762 [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
2763 helper_cvtss2sd, helper_cvtsd2ss },
2764 [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
2765 [0x5c] = SSE_FOP(sub),
2766 [0x5d] = SSE_FOP(min),
2767 [0x5e] = SSE_FOP(div),
2768 [0x5f] = SSE_FOP(max),
2770 [0xc2] = SSE_FOP(cmpeq),
2771 [0xc6] = { helper_shufps, helper_shufpd },
2773 [0x38] = { SSE_SPECIAL, SSE_SPECIAL, NULL, SSE_SPECIAL }, /* SSSE3/SSE4 */
2774 [0x3a] = { SSE_SPECIAL, SSE_SPECIAL }, /* SSSE3/SSE4 */
2776 /* MMX ops and their SSE extensions */
2777 [0x60] = MMX_OP2(punpcklbw),
2778 [0x61] = MMX_OP2(punpcklwd),
2779 [0x62] = MMX_OP2(punpckldq),
2780 [0x63] = MMX_OP2(packsswb),
2781 [0x64] = MMX_OP2(pcmpgtb),
2782 [0x65] = MMX_OP2(pcmpgtw),
2783 [0x66] = MMX_OP2(pcmpgtl),
2784 [0x67] = MMX_OP2(packuswb),
2785 [0x68] = MMX_OP2(punpckhbw),
2786 [0x69] = MMX_OP2(punpckhwd),
2787 [0x6a] = MMX_OP2(punpckhdq),
2788 [0x6b] = MMX_OP2(packssdw),
2789 [0x6c] = { NULL, helper_punpcklqdq_xmm },
2790 [0x6d] = { NULL, helper_punpckhqdq_xmm },
2791 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2792 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2793 [0x70] = { helper_pshufw_mmx,
2796 helper_pshuflw_xmm },
2797 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2798 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2799 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2800 [0x74] = MMX_OP2(pcmpeqb),
2801 [0x75] = MMX_OP2(pcmpeqw),
2802 [0x76] = MMX_OP2(pcmpeql),
2803 [0x77] = { SSE_DUMMY }, /* emms */
2804 [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
2805 [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
2806 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2807 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2808 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2809 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2810 [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
2811 [0xd1] = MMX_OP2(psrlw),
2812 [0xd2] = MMX_OP2(psrld),
2813 [0xd3] = MMX_OP2(psrlq),
2814 [0xd4] = MMX_OP2(paddq),
2815 [0xd5] = MMX_OP2(pmullw),
2816 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2817 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2818 [0xd8] = MMX_OP2(psubusb),
2819 [0xd9] = MMX_OP2(psubusw),
2820 [0xda] = MMX_OP2(pminub),
2821 [0xdb] = MMX_OP2(pand),
2822 [0xdc] = MMX_OP2(paddusb),
2823 [0xdd] = MMX_OP2(paddusw),
2824 [0xde] = MMX_OP2(pmaxub),
2825 [0xdf] = MMX_OP2(pandn),
2826 [0xe0] = MMX_OP2(pavgb),
2827 [0xe1] = MMX_OP2(psraw),
2828 [0xe2] = MMX_OP2(psrad),
2829 [0xe3] = MMX_OP2(pavgw),
2830 [0xe4] = MMX_OP2(pmulhuw),
2831 [0xe5] = MMX_OP2(pmulhw),
2832 [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
2833 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2834 [0xe8] = MMX_OP2(psubsb),
2835 [0xe9] = MMX_OP2(psubsw),
2836 [0xea] = MMX_OP2(pminsw),
2837 [0xeb] = MMX_OP2(por),
2838 [0xec] = MMX_OP2(paddsb),
2839 [0xed] = MMX_OP2(paddsw),
2840 [0xee] = MMX_OP2(pmaxsw),
2841 [0xef] = MMX_OP2(pxor),
2842 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2843 [0xf1] = MMX_OP2(psllw),
2844 [0xf2] = MMX_OP2(pslld),
2845 [0xf3] = MMX_OP2(psllq),
2846 [0xf4] = MMX_OP2(pmuludq),
2847 [0xf5] = MMX_OP2(pmaddwd),
2848 [0xf6] = MMX_OP2(psadbw),
2849 [0xf7] = MMX_OP2(maskmov),
2850 [0xf8] = MMX_OP2(psubb),
2851 [0xf9] = MMX_OP2(psubw),
2852 [0xfa] = MMX_OP2(psubl),
2853 [0xfb] = MMX_OP2(psubq),
2854 [0xfc] = MMX_OP2(paddb),
2855 [0xfd] = MMX_OP2(paddw),
2856 [0xfe] = MMX_OP2(paddl),
2859 static void *sse_op_table2[3 * 8][2] = {
2860 [0 + 2] = MMX_OP2(psrlw),
2861 [0 + 4] = MMX_OP2(psraw),
2862 [0 + 6] = MMX_OP2(psllw),
2863 [8 + 2] = MMX_OP2(psrld),
2864 [8 + 4] = MMX_OP2(psrad),
2865 [8 + 6] = MMX_OP2(pslld),
2866 [16 + 2] = MMX_OP2(psrlq),
2867 [16 + 3] = { NULL, helper_psrldq_xmm },
2868 [16 + 6] = MMX_OP2(psllq),
2869 [16 + 7] = { NULL, helper_pslldq_xmm },
2872 static void *sse_op_table3[4 * 3] = {
2875 X86_64_ONLY(helper_cvtsq2ss),
2876 X86_64_ONLY(helper_cvtsq2sd),
2880 X86_64_ONLY(helper_cvttss2sq),
2881 X86_64_ONLY(helper_cvttsd2sq),
2885 X86_64_ONLY(helper_cvtss2sq),
2886 X86_64_ONLY(helper_cvtsd2sq),
2889 static void *sse_op_table4[8][4] = {
2900 static void *sse_op_table5[256] = {
2901 [0x0c] = helper_pi2fw,
2902 [0x0d] = helper_pi2fd,
2903 [0x1c] = helper_pf2iw,
2904 [0x1d] = helper_pf2id,
2905 [0x8a] = helper_pfnacc,
2906 [0x8e] = helper_pfpnacc,
2907 [0x90] = helper_pfcmpge,
2908 [0x94] = helper_pfmin,
2909 [0x96] = helper_pfrcp,
2910 [0x97] = helper_pfrsqrt,
2911 [0x9a] = helper_pfsub,
2912 [0x9e] = helper_pfadd,
2913 [0xa0] = helper_pfcmpgt,
2914 [0xa4] = helper_pfmax,
2915 [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
2916 [0xa7] = helper_movq, /* pfrsqit1 */
2917 [0xaa] = helper_pfsubr,
2918 [0xae] = helper_pfacc,
2919 [0xb0] = helper_pfcmpeq,
2920 [0xb4] = helper_pfmul,
2921 [0xb6] = helper_movq, /* pfrcpit2 */
2922 [0xb7] = helper_pmulhrw_mmx,
2923 [0xbb] = helper_pswapd,
2924 [0xbf] = helper_pavgb_mmx /* pavgusb */
2927 struct sse_op_helper_s {
2928 void *op[2]; uint32_t ext_mask;
2930 #define SSSE3_OP(x) { MMX_OP2(x), CPUID_EXT_SSSE3 }
2931 #define SSE41_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE41 }
2932 #define SSE42_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE42 }
2933 #define SSE41_SPECIAL { { NULL, SSE_SPECIAL }, CPUID_EXT_SSE41 }
2934 static struct sse_op_helper_s sse_op_table6[256] = {
2935 [0x00] = SSSE3_OP(pshufb),
2936 [0x01] = SSSE3_OP(phaddw),
2937 [0x02] = SSSE3_OP(phaddd),
2938 [0x03] = SSSE3_OP(phaddsw),
2939 [0x04] = SSSE3_OP(pmaddubsw),
2940 [0x05] = SSSE3_OP(phsubw),
2941 [0x06] = SSSE3_OP(phsubd),
2942 [0x07] = SSSE3_OP(phsubsw),
2943 [0x08] = SSSE3_OP(psignb),
2944 [0x09] = SSSE3_OP(psignw),
2945 [0x0a] = SSSE3_OP(psignd),
2946 [0x0b] = SSSE3_OP(pmulhrsw),
2947 [0x10] = SSE41_OP(pblendvb),
2948 [0x14] = SSE41_OP(blendvps),
2949 [0x15] = SSE41_OP(blendvpd),
2950 [0x17] = SSE41_OP(ptest),
2951 [0x1c] = SSSE3_OP(pabsb),
2952 [0x1d] = SSSE3_OP(pabsw),
2953 [0x1e] = SSSE3_OP(pabsd),
2954 [0x20] = SSE41_OP(pmovsxbw),
2955 [0x21] = SSE41_OP(pmovsxbd),
2956 [0x22] = SSE41_OP(pmovsxbq),
2957 [0x23] = SSE41_OP(pmovsxwd),
2958 [0x24] = SSE41_OP(pmovsxwq),
2959 [0x25] = SSE41_OP(pmovsxdq),
2960 [0x28] = SSE41_OP(pmuldq),
2961 [0x29] = SSE41_OP(pcmpeqq),
2962 [0x2a] = SSE41_SPECIAL, /* movntqda */
2963 [0x2b] = SSE41_OP(packusdw),
2964 [0x30] = SSE41_OP(pmovzxbw),
2965 [0x31] = SSE41_OP(pmovzxbd),
2966 [0x32] = SSE41_OP(pmovzxbq),
2967 [0x33] = SSE41_OP(pmovzxwd),
2968 [0x34] = SSE41_OP(pmovzxwq),
2969 [0x35] = SSE41_OP(pmovzxdq),
2970 [0x37] = SSE42_OP(pcmpgtq),
2971 [0x38] = SSE41_OP(pminsb),
2972 [0x39] = SSE41_OP(pminsd),
2973 [0x3a] = SSE41_OP(pminuw),
2974 [0x3b] = SSE41_OP(pminud),
2975 [0x3c] = SSE41_OP(pmaxsb),
2976 [0x3d] = SSE41_OP(pmaxsd),
2977 [0x3e] = SSE41_OP(pmaxuw),
2978 [0x3f] = SSE41_OP(pmaxud),
2979 [0x40] = SSE41_OP(pmulld),
2980 [0x41] = SSE41_OP(phminposuw),
2983 static struct sse_op_helper_s sse_op_table7[256] = {
2984 [0x08] = SSE41_OP(roundps),
2985 [0x09] = SSE41_OP(roundpd),
2986 [0x0a] = SSE41_OP(roundss),
2987 [0x0b] = SSE41_OP(roundsd),
2988 [0x0c] = SSE41_OP(blendps),
2989 [0x0d] = SSE41_OP(blendpd),
2990 [0x0e] = SSE41_OP(pblendw),
2991 [0x0f] = SSSE3_OP(palignr),
2992 [0x14] = SSE41_SPECIAL, /* pextrb */
2993 [0x15] = SSE41_SPECIAL, /* pextrw */
2994 [0x16] = SSE41_SPECIAL, /* pextrd/pextrq */
2995 [0x17] = SSE41_SPECIAL, /* extractps */
2996 [0x20] = SSE41_SPECIAL, /* pinsrb */
2997 [0x21] = SSE41_SPECIAL, /* insertps */
2998 [0x22] = SSE41_SPECIAL, /* pinsrd/pinsrq */
2999 [0x40] = SSE41_OP(dpps),
3000 [0x41] = SSE41_OP(dppd),
3001 [0x42] = SSE41_OP(mpsadbw),
3002 [0x60] = SSE42_OP(pcmpestrm),
3003 [0x61] = SSE42_OP(pcmpestri),
3004 [0x62] = SSE42_OP(pcmpistrm),
3005 [0x63] = SSE42_OP(pcmpistri),
3008 static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
3010 int b1, op1_offset, op2_offset, is_xmm, val, ot;
3011 int modrm, mod, rm, reg, reg_addr, offset_addr;
3015 if (s->prefix & PREFIX_DATA)
3017 else if (s->prefix & PREFIX_REPZ)
3019 else if (s->prefix & PREFIX_REPNZ)
3023 sse_op2 = sse_op_table1[b][b1];
3026 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
3036 /* simple MMX/SSE operation */
3037 if (s->flags & HF_TS_MASK) {
3038 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
3041 if (s->flags & HF_EM_MASK) {
3043 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
3046 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
3047 if ((b != 0x38 && b != 0x3a) || (s->prefix & PREFIX_DATA))
3050 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3053 tcg_gen_helper_0_0(helper_emms);
3058 tcg_gen_helper_0_0(helper_emms);
3061 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
3062 the static cpu state) */
3064 tcg_gen_helper_0_0(helper_enter_mmx);
3067 modrm = ldub_code(s->pc++);
3068 reg = ((modrm >> 3) & 7);
3071 mod = (modrm >> 6) & 3;
3072 if (sse_op2 == SSE_SPECIAL) {
3075 case 0x0e7: /* movntq */
3078 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3079 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3081 case 0x1e7: /* movntdq */
3082 case 0x02b: /* movntps */
3083 case 0x12b: /* movntps */
3084 case 0x3f0: /* lddqu */
3087 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3088 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3090 case 0x6e: /* movd mm, ea */
3091 #ifdef TARGET_X86_64
3092 if (s->dflag == 2) {
3093 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3094 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
3098 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3099 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3100 offsetof(CPUX86State,fpregs[reg].mmx));
3101 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
3104 case 0x16e: /* movd xmm, ea */
3105 #ifdef TARGET_X86_64
3106 if (s->dflag == 2) {
3107 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3108 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3109 offsetof(CPUX86State,xmm_regs[reg]));
3110 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
3114 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3115 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3116 offsetof(CPUX86State,xmm_regs[reg]));
3117 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3118 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
3121 case 0x6f: /* movq mm, ea */
3123 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3124 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3127 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3128 offsetof(CPUX86State,fpregs[rm].mmx));
3129 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3130 offsetof(CPUX86State,fpregs[reg].mmx));
3133 case 0x010: /* movups */
3134 case 0x110: /* movupd */
3135 case 0x028: /* movaps */
3136 case 0x128: /* movapd */
3137 case 0x16f: /* movdqa xmm, ea */
3138 case 0x26f: /* movdqu xmm, ea */
3140 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3141 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3143 rm = (modrm & 7) | REX_B(s);
3144 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
3145 offsetof(CPUX86State,xmm_regs[rm]));
3148 case 0x210: /* movss xmm, ea */
3150 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3151 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3152 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3154 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3155 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3156 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3158 rm = (modrm & 7) | REX_B(s);
3159 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3160 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3163 case 0x310: /* movsd xmm, ea */
3165 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3166 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3168 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3169 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3171 rm = (modrm & 7) | REX_B(s);
3172 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3173 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3176 case 0x012: /* movlps */
3177 case 0x112: /* movlpd */
3179 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3180 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3183 rm = (modrm & 7) | REX_B(s);
3184 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3185 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3188 case 0x212: /* movsldup */
3190 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3191 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3193 rm = (modrm & 7) | REX_B(s);
3194 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3195 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3196 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3197 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3199 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3200 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3201 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3202 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3204 case 0x312: /* movddup */
3206 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3207 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3209 rm = (modrm & 7) | REX_B(s);
3210 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3211 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3213 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3214 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3216 case 0x016: /* movhps */
3217 case 0x116: /* movhpd */
3219 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3220 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3223 rm = (modrm & 7) | REX_B(s);
3224 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3225 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3228 case 0x216: /* movshdup */
3230 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3231 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3233 rm = (modrm & 7) | REX_B(s);
3234 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3235 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3236 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3237 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3239 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3240 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3241 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3242 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3244 case 0x7e: /* movd ea, mm */
3245 #ifdef TARGET_X86_64
3246 if (s->dflag == 2) {
3247 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3248 offsetof(CPUX86State,fpregs[reg].mmx));
3249 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3253 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3254 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3255 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3258 case 0x17e: /* movd ea, xmm */
3259 #ifdef TARGET_X86_64
3260 if (s->dflag == 2) {
3261 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3262 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3263 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3267 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3268 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3269 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3272 case 0x27e: /* movq xmm, ea */
3274 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3275 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3277 rm = (modrm & 7) | REX_B(s);
3278 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3279 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3281 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3283 case 0x7f: /* movq ea, mm */
3285 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3286 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3289 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3290 offsetof(CPUX86State,fpregs[reg].mmx));
3293 case 0x011: /* movups */
3294 case 0x111: /* movupd */
3295 case 0x029: /* movaps */
3296 case 0x129: /* movapd */
3297 case 0x17f: /* movdqa ea, xmm */
3298 case 0x27f: /* movdqu ea, xmm */
3300 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3301 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3303 rm = (modrm & 7) | REX_B(s);
3304 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3305 offsetof(CPUX86State,xmm_regs[reg]));
3308 case 0x211: /* movss ea, xmm */
3310 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3311 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3312 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3314 rm = (modrm & 7) | REX_B(s);
3315 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3316 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3319 case 0x311: /* movsd ea, xmm */
3321 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3322 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3324 rm = (modrm & 7) | REX_B(s);
3325 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3326 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3329 case 0x013: /* movlps */
3330 case 0x113: /* movlpd */
3332 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3333 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3338 case 0x017: /* movhps */
3339 case 0x117: /* movhpd */
3341 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3342 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3347 case 0x71: /* shift mm, im */
3350 case 0x171: /* shift xmm, im */
3353 val = ldub_code(s->pc++);
3355 gen_op_movl_T0_im(val);
3356 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3358 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3359 op1_offset = offsetof(CPUX86State,xmm_t0);
3361 gen_op_movl_T0_im(val);
3362 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3364 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3365 op1_offset = offsetof(CPUX86State,mmx_t0);
3367 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3371 rm = (modrm & 7) | REX_B(s);
3372 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3375 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3377 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3378 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3379 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3381 case 0x050: /* movmskps */
3382 rm = (modrm & 7) | REX_B(s);
3383 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3384 offsetof(CPUX86State,xmm_regs[rm]));
3385 tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3386 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3387 gen_op_mov_reg_T0(OT_LONG, reg);
3389 case 0x150: /* movmskpd */
3390 rm = (modrm & 7) | REX_B(s);
3391 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3392 offsetof(CPUX86State,xmm_regs[rm]));
3393 tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3394 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3395 gen_op_mov_reg_T0(OT_LONG, reg);
3397 case 0x02a: /* cvtpi2ps */
3398 case 0x12a: /* cvtpi2pd */
3399 tcg_gen_helper_0_0(helper_enter_mmx);
3401 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3402 op2_offset = offsetof(CPUX86State,mmx_t0);
3403 gen_ldq_env_A0(s->mem_index, op2_offset);
3406 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3408 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3409 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3410 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3413 tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3417 tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3421 case 0x22a: /* cvtsi2ss */
3422 case 0x32a: /* cvtsi2sd */
3423 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3424 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3425 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3426 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3427 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3428 if (ot == OT_LONG) {
3429 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3430 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
3432 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_T[0]);
3435 case 0x02c: /* cvttps2pi */
3436 case 0x12c: /* cvttpd2pi */
3437 case 0x02d: /* cvtps2pi */
3438 case 0x12d: /* cvtpd2pi */
3439 tcg_gen_helper_0_0(helper_enter_mmx);
3441 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3442 op2_offset = offsetof(CPUX86State,xmm_t0);
3443 gen_ldo_env_A0(s->mem_index, op2_offset);
3445 rm = (modrm & 7) | REX_B(s);
3446 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3448 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3449 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3450 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3453 tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3456 tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3459 tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3462 tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
3466 case 0x22c: /* cvttss2si */
3467 case 0x32c: /* cvttsd2si */
3468 case 0x22d: /* cvtss2si */
3469 case 0x32d: /* cvtsd2si */
3470 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3472 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3474 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3476 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3477 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3479 op2_offset = offsetof(CPUX86State,xmm_t0);
3481 rm = (modrm & 7) | REX_B(s);
3482 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3484 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3486 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3487 if (ot == OT_LONG) {
3488 tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
3489 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3491 tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
3493 gen_op_mov_reg_T0(ot, reg);
3495 case 0xc4: /* pinsrw */
3498 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3499 val = ldub_code(s->pc++);
3502 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3503 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
3506 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3507 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
3510 case 0xc5: /* pextrw */
3514 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3515 val = ldub_code(s->pc++);
3518 rm = (modrm & 7) | REX_B(s);
3519 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3520 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
3524 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3525 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
3527 reg = ((modrm >> 3) & 7) | rex_r;
3528 gen_op_mov_reg_T0(ot, reg);
3530 case 0x1d6: /* movq ea, xmm */
3532 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3533 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3535 rm = (modrm & 7) | REX_B(s);
3536 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3537 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3538 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3541 case 0x2d6: /* movq2dq */
3542 tcg_gen_helper_0_0(helper_enter_mmx);
3544 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3545 offsetof(CPUX86State,fpregs[rm].mmx));
3546 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3548 case 0x3d6: /* movdq2q */
3549 tcg_gen_helper_0_0(helper_enter_mmx);
3550 rm = (modrm & 7) | REX_B(s);
3551 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3552 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3554 case 0xd7: /* pmovmskb */
3559 rm = (modrm & 7) | REX_B(s);
3560 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
3561 tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
3564 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
3565 tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
3567 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3568 reg = ((modrm >> 3) & 7) | rex_r;
3569 gen_op_mov_reg_T0(OT_LONG, reg);
3572 if (s->prefix & PREFIX_REPNZ)
3576 modrm = ldub_code(s->pc++);
3578 reg = ((modrm >> 3) & 7) | rex_r;
3579 mod = (modrm >> 6) & 3;
3581 sse_op2 = sse_op_table6[b].op[b1];
3584 if (!(s->cpuid_ext_features & sse_op_table6[b].ext_mask))
3588 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3590 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
3592 op2_offset = offsetof(CPUX86State,xmm_t0);
3593 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3595 case 0x20: case 0x30: /* pmovsxbw, pmovzxbw */
3596 case 0x23: case 0x33: /* pmovsxwd, pmovzxwd */
3597 case 0x25: case 0x35: /* pmovsxdq, pmovzxdq */
3598 gen_ldq_env_A0(s->mem_index, op2_offset +
3599 offsetof(XMMReg, XMM_Q(0)));
3601 case 0x21: case 0x31: /* pmovsxbd, pmovzxbd */
3602 case 0x24: case 0x34: /* pmovsxwq, pmovzxwq */
3603 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
3604 (s->mem_index >> 2) - 1);
3605 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, op2_offset +
3606 offsetof(XMMReg, XMM_L(0)));
3608 case 0x22: case 0x32: /* pmovsxbq, pmovzxbq */
3609 tcg_gen_qemu_ld16u(cpu_tmp0, cpu_A0,
3610 (s->mem_index >> 2) - 1);
3611 tcg_gen_st16_tl(cpu_tmp0, cpu_env, op2_offset +
3612 offsetof(XMMReg, XMM_W(0)));
3614 case 0x2a: /* movntqda */
3615 gen_ldo_env_A0(s->mem_index, op1_offset);
3618 gen_ldo_env_A0(s->mem_index, op2_offset);
3622 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3624 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3626 op2_offset = offsetof(CPUX86State,mmx_t0);
3627 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3628 gen_ldq_env_A0(s->mem_index, op2_offset);
3631 if (sse_op2 == SSE_SPECIAL)
3634 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3635 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3636 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3639 s->cc_op = CC_OP_EFLAGS;
3641 case 0x338: /* crc32 */
3644 modrm = ldub_code(s->pc++);
3645 reg = ((modrm >> 3) & 7) | rex_r;
3647 if (b != 0xf0 && b != 0xf1)
3649 if (!(s->cpuid_ext_features & CPUID_EXT_SSE42))
3654 else if (b == 0xf1 && s->dflag != 2)
3655 if (s->prefix & PREFIX_DATA)
3662 gen_op_mov_TN_reg(OT_LONG, 0, reg);
3663 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3664 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3665 tcg_gen_helper_1_3(helper_crc32, cpu_T[0], cpu_tmp2_i32,
3666 cpu_T[0], tcg_const_i32(8 << ot));
3668 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3669 gen_op_mov_reg_T0(ot, reg);
3674 modrm = ldub_code(s->pc++);
3676 reg = ((modrm >> 3) & 7) | rex_r;
3677 mod = (modrm >> 6) & 3;
3679 sse_op2 = sse_op_table7[b].op[b1];
3682 if (!(s->cpuid_ext_features & sse_op_table7[b].ext_mask))
3685 if (sse_op2 == SSE_SPECIAL) {
3686 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3687 rm = (modrm & 7) | REX_B(s);
3689 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3690 reg = ((modrm >> 3) & 7) | rex_r;
3691 val = ldub_code(s->pc++);
3693 case 0x14: /* pextrb */
3694 tcg_gen_ld8u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
3695 xmm_regs[reg].XMM_B(val & 15)));
3697 gen_op_mov_reg_T0(ot, rm);
3699 tcg_gen_qemu_st8(cpu_T[0], cpu_A0,
3700 (s->mem_index >> 2) - 1);
3702 case 0x15: /* pextrw */
3703 tcg_gen_ld16u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
3704 xmm_regs[reg].XMM_W(val & 7)));
3706 gen_op_mov_reg_T0(ot, rm);
3708 tcg_gen_qemu_st16(cpu_T[0], cpu_A0,
3709 (s->mem_index >> 2) - 1);
3712 if (ot == OT_LONG) { /* pextrd */
3713 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
3714 offsetof(CPUX86State,
3715 xmm_regs[reg].XMM_L(val & 3)));
3717 gen_op_mov_reg_v(ot, rm, cpu_tmp2_i32);
3719 tcg_gen_qemu_st32(cpu_tmp2_i32, cpu_A0,
3720 (s->mem_index >> 2) - 1);
3721 } else { /* pextrq */
3722 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3723 offsetof(CPUX86State,
3724 xmm_regs[reg].XMM_Q(val & 1)));
3726 gen_op_mov_reg_v(ot, rm, cpu_tmp1_i64);
3728 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
3729 (s->mem_index >> 2) - 1);
3732 case 0x17: /* extractps */
3733 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
3734 xmm_regs[reg].XMM_L(val & 3)));
3736 gen_op_mov_reg_T0(ot, rm);
3738 tcg_gen_qemu_st32(cpu_T[0], cpu_A0,
3739 (s->mem_index >> 2) - 1);
3741 case 0x20: /* pinsrb */
3743 gen_op_mov_TN_reg(OT_LONG, 0, rm);
3745 tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0,
3746 (s->mem_index >> 2) - 1);
3747 tcg_gen_st8_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
3748 xmm_regs[reg].XMM_B(val & 15)));
3750 case 0x21: /* insertps */
3752 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
3753 offsetof(CPUX86State,xmm_regs[rm]
3754 .XMM_L((val >> 6) & 3)));
3756 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
3757 (s->mem_index >> 2) - 1);
3758 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
3759 offsetof(CPUX86State,xmm_regs[reg]
3760 .XMM_L((val >> 4) & 3)));
3762 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
3763 cpu_env, offsetof(CPUX86State,
3764 xmm_regs[reg].XMM_L(0)));
3766 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
3767 cpu_env, offsetof(CPUX86State,
3768 xmm_regs[reg].XMM_L(1)));
3770 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
3771 cpu_env, offsetof(CPUX86State,
3772 xmm_regs[reg].XMM_L(2)));
3774 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
3775 cpu_env, offsetof(CPUX86State,
3776 xmm_regs[reg].XMM_L(3)));
3779 if (ot == OT_LONG) { /* pinsrd */
3781 gen_op_mov_v_reg(ot, cpu_tmp2_i32, rm);
3783 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
3784 (s->mem_index >> 2) - 1);
3785 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
3786 offsetof(CPUX86State,
3787 xmm_regs[reg].XMM_L(val & 3)));
3788 } else { /* pinsrq */
3790 gen_op_mov_v_reg(ot, cpu_tmp1_i64, rm);
3792 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
3793 (s->mem_index >> 2) - 1);
3794 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3795 offsetof(CPUX86State,
3796 xmm_regs[reg].XMM_Q(val & 1)));
3804 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3806 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
3808 op2_offset = offsetof(CPUX86State,xmm_t0);
3809 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3810 gen_ldo_env_A0(s->mem_index, op2_offset);
3813 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3815 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3817 op2_offset = offsetof(CPUX86State,mmx_t0);
3818 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3819 gen_ldq_env_A0(s->mem_index, op2_offset);
3822 val = ldub_code(s->pc++);
3824 if ((b & 0xfc) == 0x60) { /* pcmpXstrX */
3825 s->cc_op = CC_OP_EFLAGS;
3828 /* The helper must use entire 64-bit gp registers */
3832 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3833 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3834 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
3840 /* generic MMX or SSE operation */
3842 case 0x70: /* pshufx insn */
3843 case 0xc6: /* pshufx insn */
3844 case 0xc2: /* compare insns */
3851 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3853 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3854 op2_offset = offsetof(CPUX86State,xmm_t0);
3855 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3857 /* specific case for SSE single instructions */
3860 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3861 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3864 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3867 gen_ldo_env_A0(s->mem_index, op2_offset);
3870 rm = (modrm & 7) | REX_B(s);
3871 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3874 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3876 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3877 op2_offset = offsetof(CPUX86State,mmx_t0);
3878 gen_ldq_env_A0(s->mem_index, op2_offset);
3881 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3885 case 0x0f: /* 3DNow! data insns */
3886 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3888 val = ldub_code(s->pc++);
3889 sse_op2 = sse_op_table5[val];
3892 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3893 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3894 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3896 case 0x70: /* pshufx insn */
3897 case 0xc6: /* pshufx insn */
3898 val = ldub_code(s->pc++);
3899 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3900 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3901 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
3905 val = ldub_code(s->pc++);
3908 sse_op2 = sse_op_table4[val][b1];
3909 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3910 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3911 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3914 /* maskmov : we must prepare A0 */
3917 #ifdef TARGET_X86_64
3918 if (s->aflag == 2) {
3919 gen_op_movq_A0_reg(R_EDI);
3923 gen_op_movl_A0_reg(R_EDI);
3925 gen_op_andl_A0_ffff();
3927 gen_add_A0_ds_seg(s);
3929 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3930 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3931 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
3934 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3935 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3936 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3939 if (b == 0x2e || b == 0x2f) {
3940 s->cc_op = CC_OP_EFLAGS;
3945 /* convert one instruction. s->is_jmp is set if the translation must
3946 be stopped. Return the next pc value */
3947 static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3949 int b, prefixes, aflag, dflag;
3951 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3952 target_ulong next_eip, tval;
3955 if (unlikely(loglevel & CPU_LOG_TB_OP))
3956 tcg_gen_debug_insn_start(pc_start);
3964 #ifdef TARGET_X86_64
3969 s->rip_offset = 0; /* for relative ip address */
3971 b = ldub_code(s->pc);
3973 /* check prefixes */
3974 #ifdef TARGET_X86_64
3978 prefixes |= PREFIX_REPZ;
3981 prefixes |= PREFIX_REPNZ;
3984 prefixes |= PREFIX_LOCK;
4005 prefixes |= PREFIX_DATA;
4008 prefixes |= PREFIX_ADR;
4012 rex_w = (b >> 3) & 1;
4013 rex_r = (b & 0x4) << 1;
4014 s->rex_x = (b & 0x2) << 2;
4015 REX_B(s) = (b & 0x1) << 3;
4016 x86_64_hregs = 1; /* select uniform byte register addressing */
4020 /* 0x66 is ignored if rex.w is set */
4023 if (prefixes & PREFIX_DATA)
4026 if (!(prefixes & PREFIX_ADR))
4033 prefixes |= PREFIX_REPZ;
4036 prefixes |= PREFIX_REPNZ;
4039 prefixes |= PREFIX_LOCK;
4060 prefixes |= PREFIX_DATA;
4063 prefixes |= PREFIX_ADR;
4066 if (prefixes & PREFIX_DATA)
4068 if (prefixes & PREFIX_ADR)
4072 s->prefix = prefixes;
4076 /* lock generation */
4077 if (prefixes & PREFIX_LOCK)
4078 tcg_gen_helper_0_0(helper_lock);
4080 /* now check op code */
4084 /**************************/
4085 /* extended op code */
4086 b = ldub_code(s->pc++) | 0x100;
4089 /**************************/
4107 ot = dflag + OT_WORD;
4110 case 0: /* OP Ev, Gv */
4111 modrm = ldub_code(s->pc++);
4112 reg = ((modrm >> 3) & 7) | rex_r;
4113 mod = (modrm >> 6) & 3;
4114 rm = (modrm & 7) | REX_B(s);
4116 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4118 } else if (op == OP_XORL && rm == reg) {
4120 /* xor reg, reg optimisation */
4122 s->cc_op = CC_OP_LOGICB + ot;
4123 gen_op_mov_reg_T0(ot, reg);
4124 gen_op_update1_cc();
4129 gen_op_mov_TN_reg(ot, 1, reg);
4130 gen_op(s, op, ot, opreg);
4132 case 1: /* OP Gv, Ev */
4133 modrm = ldub_code(s->pc++);
4134 mod = (modrm >> 6) & 3;
4135 reg = ((modrm >> 3) & 7) | rex_r;
4136 rm = (modrm & 7) | REX_B(s);
4138 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4139 gen_op_ld_T1_A0(ot + s->mem_index);
4140 } else if (op == OP_XORL && rm == reg) {
4143 gen_op_mov_TN_reg(ot, 1, rm);
4145 gen_op(s, op, ot, reg);
4147 case 2: /* OP A, Iv */
4148 val = insn_get(s, ot);
4149 gen_op_movl_T1_im(val);
4150 gen_op(s, op, ot, OR_EAX);
4159 case 0x80: /* GRP1 */
4168 ot = dflag + OT_WORD;
4170 modrm = ldub_code(s->pc++);
4171 mod = (modrm >> 6) & 3;
4172 rm = (modrm & 7) | REX_B(s);
4173 op = (modrm >> 3) & 7;
4179 s->rip_offset = insn_const_size(ot);
4180 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4191 val = insn_get(s, ot);
4194 val = (int8_t)insn_get(s, OT_BYTE);
4197 gen_op_movl_T1_im(val);
4198 gen_op(s, op, ot, opreg);
4202 /**************************/
4203 /* inc, dec, and other misc arith */
4204 case 0x40 ... 0x47: /* inc Gv */
4205 ot = dflag ? OT_LONG : OT_WORD;
4206 gen_inc(s, ot, OR_EAX + (b & 7), 1);
4208 case 0x48 ... 0x4f: /* dec Gv */
4209 ot = dflag ? OT_LONG : OT_WORD;
4210 gen_inc(s, ot, OR_EAX + (b & 7), -1);
4212 case 0xf6: /* GRP3 */
4217 ot = dflag + OT_WORD;
4219 modrm = ldub_code(s->pc++);
4220 mod = (modrm >> 6) & 3;
4221 rm = (modrm & 7) | REX_B(s);
4222 op = (modrm >> 3) & 7;
4225 s->rip_offset = insn_const_size(ot);
4226 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4227 gen_op_ld_T0_A0(ot + s->mem_index);
4229 gen_op_mov_TN_reg(ot, 0, rm);
4234 val = insn_get(s, ot);
4235 gen_op_movl_T1_im(val);
4236 gen_op_testl_T0_T1_cc();
4237 s->cc_op = CC_OP_LOGICB + ot;
4240 tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
4242 gen_op_st_T0_A0(ot + s->mem_index);
4244 gen_op_mov_reg_T0(ot, rm);
4248 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
4250 gen_op_st_T0_A0(ot + s->mem_index);
4252 gen_op_mov_reg_T0(ot, rm);
4254 gen_op_update_neg_cc();
4255 s->cc_op = CC_OP_SUBB + ot;
4260 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
4261 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4262 tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
4263 /* XXX: use 32 bit mul which could be faster */
4264 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4265 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4266 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4267 tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
4268 s->cc_op = CC_OP_MULB;
4271 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
4272 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
4273 tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
4274 /* XXX: use 32 bit mul which could be faster */
4275 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4276 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4277 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4278 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
4279 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4280 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4281 s->cc_op = CC_OP_MULW;
4285 #ifdef TARGET_X86_64
4286 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4287 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
4288 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
4289 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4290 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4291 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4292 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
4293 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4294 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4298 t0 = tcg_temp_new(TCG_TYPE_I64);
4299 t1 = tcg_temp_new(TCG_TYPE_I64);
4300 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4301 tcg_gen_extu_i32_i64(t0, cpu_T[0]);
4302 tcg_gen_extu_i32_i64(t1, cpu_T[1]);
4303 tcg_gen_mul_i64(t0, t0, t1);
4304 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4305 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4306 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4307 tcg_gen_shri_i64(t0, t0, 32);
4308 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4309 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4310 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4313 s->cc_op = CC_OP_MULL;
4315 #ifdef TARGET_X86_64
4317 tcg_gen_helper_0_1(helper_mulq_EAX_T0, cpu_T[0]);
4318 s->cc_op = CC_OP_MULQ;
4326 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
4327 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4328 tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
4329 /* XXX: use 32 bit mul which could be faster */
4330 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4331 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4332 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4333 tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
4334 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4335 s->cc_op = CC_OP_MULB;
4338 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
4339 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4340 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
4341 /* XXX: use 32 bit mul which could be faster */
4342 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4343 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4344 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4345 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
4346 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4347 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
4348 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4349 s->cc_op = CC_OP_MULW;
4353 #ifdef TARGET_X86_64
4354 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4355 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4356 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
4357 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4358 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4359 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4360 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
4361 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4362 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
4363 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4367 t0 = tcg_temp_new(TCG_TYPE_I64);
4368 t1 = tcg_temp_new(TCG_TYPE_I64);
4369 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4370 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
4371 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
4372 tcg_gen_mul_i64(t0, t0, t1);
4373 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4374 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4375 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4376 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
4377 tcg_gen_shri_i64(t0, t0, 32);
4378 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4379 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4380 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4383 s->cc_op = CC_OP_MULL;
4385 #ifdef TARGET_X86_64
4387 tcg_gen_helper_0_1(helper_imulq_EAX_T0, cpu_T[0]);
4388 s->cc_op = CC_OP_MULQ;
4396 gen_jmp_im(pc_start - s->cs_base);
4397 tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
4400 gen_jmp_im(pc_start - s->cs_base);
4401 tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
4405 gen_jmp_im(pc_start - s->cs_base);
4406 tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
4408 #ifdef TARGET_X86_64
4410 gen_jmp_im(pc_start - s->cs_base);
4411 tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
4419 gen_jmp_im(pc_start - s->cs_base);
4420 tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
4423 gen_jmp_im(pc_start - s->cs_base);
4424 tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
4428 gen_jmp_im(pc_start - s->cs_base);
4429 tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
4431 #ifdef TARGET_X86_64
4433 gen_jmp_im(pc_start - s->cs_base);
4434 tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
4444 case 0xfe: /* GRP4 */
4445 case 0xff: /* GRP5 */
4449 ot = dflag + OT_WORD;
4451 modrm = ldub_code(s->pc++);
4452 mod = (modrm >> 6) & 3;
4453 rm = (modrm & 7) | REX_B(s);
4454 op = (modrm >> 3) & 7;
4455 if (op >= 2 && b == 0xfe) {
4459 if (op == 2 || op == 4) {
4460 /* operand size for jumps is 64 bit */
4462 } else if (op == 3 || op == 5) {
4463 /* for call calls, the operand is 16 or 32 bit, even
4465 ot = dflag ? OT_LONG : OT_WORD;
4466 } else if (op == 6) {
4467 /* default push size is 64 bit */
4468 ot = dflag ? OT_QUAD : OT_WORD;
4472 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4473 if (op >= 2 && op != 3 && op != 5)
4474 gen_op_ld_T0_A0(ot + s->mem_index);
4476 gen_op_mov_TN_reg(ot, 0, rm);
4480 case 0: /* inc Ev */
4485 gen_inc(s, ot, opreg, 1);
4487 case 1: /* dec Ev */
4492 gen_inc(s, ot, opreg, -1);
4494 case 2: /* call Ev */
4495 /* XXX: optimize if memory (no 'and' is necessary) */
4497 gen_op_andl_T0_ffff();
4498 next_eip = s->pc - s->cs_base;
4499 gen_movtl_T1_im(next_eip);
4504 case 3: /* lcall Ev */
4505 gen_op_ld_T1_A0(ot + s->mem_index);
4506 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4507 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4509 if (s->pe && !s->vm86) {
4510 if (s->cc_op != CC_OP_DYNAMIC)
4511 gen_op_set_cc_op(s->cc_op);
4512 gen_jmp_im(pc_start - s->cs_base);
4513 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4514 tcg_gen_helper_0_4(helper_lcall_protected,
4515 cpu_tmp2_i32, cpu_T[1],
4516 tcg_const_i32(dflag),
4517 tcg_const_i32(s->pc - pc_start));
4519 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4520 tcg_gen_helper_0_4(helper_lcall_real,
4521 cpu_tmp2_i32, cpu_T[1],
4522 tcg_const_i32(dflag),
4523 tcg_const_i32(s->pc - s->cs_base));
4527 case 4: /* jmp Ev */
4529 gen_op_andl_T0_ffff();
4533 case 5: /* ljmp Ev */
4534 gen_op_ld_T1_A0(ot + s->mem_index);
4535 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4536 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4538 if (s->pe && !s->vm86) {
4539 if (s->cc_op != CC_OP_DYNAMIC)
4540 gen_op_set_cc_op(s->cc_op);
4541 gen_jmp_im(pc_start - s->cs_base);
4542 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4543 tcg_gen_helper_0_3(helper_ljmp_protected,
4546 tcg_const_i32(s->pc - pc_start));
4548 gen_op_movl_seg_T0_vm(R_CS);
4549 gen_op_movl_T0_T1();
4554 case 6: /* push Ev */
4562 case 0x84: /* test Ev, Gv */
4567 ot = dflag + OT_WORD;
4569 modrm = ldub_code(s->pc++);
4570 mod = (modrm >> 6) & 3;
4571 rm = (modrm & 7) | REX_B(s);
4572 reg = ((modrm >> 3) & 7) | rex_r;
4574 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4575 gen_op_mov_TN_reg(ot, 1, reg);
4576 gen_op_testl_T0_T1_cc();
4577 s->cc_op = CC_OP_LOGICB + ot;
4580 case 0xa8: /* test eAX, Iv */
4585 ot = dflag + OT_WORD;
4586 val = insn_get(s, ot);
4588 gen_op_mov_TN_reg(ot, 0, OR_EAX);
4589 gen_op_movl_T1_im(val);
4590 gen_op_testl_T0_T1_cc();
4591 s->cc_op = CC_OP_LOGICB + ot;
4594 case 0x98: /* CWDE/CBW */
4595 #ifdef TARGET_X86_64
4597 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4598 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4599 gen_op_mov_reg_T0(OT_QUAD, R_EAX);
4603 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4604 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4605 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4607 gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
4608 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4609 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4612 case 0x99: /* CDQ/CWD */
4613 #ifdef TARGET_X86_64
4615 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
4616 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
4617 gen_op_mov_reg_T0(OT_QUAD, R_EDX);
4621 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4622 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4623 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
4624 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4626 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4627 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4628 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
4629 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4632 case 0x1af: /* imul Gv, Ev */
4633 case 0x69: /* imul Gv, Ev, I */
4635 ot = dflag + OT_WORD;
4636 modrm = ldub_code(s->pc++);
4637 reg = ((modrm >> 3) & 7) | rex_r;
4639 s->rip_offset = insn_const_size(ot);
4642 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4644 val = insn_get(s, ot);
4645 gen_op_movl_T1_im(val);
4646 } else if (b == 0x6b) {
4647 val = (int8_t)insn_get(s, OT_BYTE);
4648 gen_op_movl_T1_im(val);
4650 gen_op_mov_TN_reg(ot, 1, reg);
4653 #ifdef TARGET_X86_64
4654 if (ot == OT_QUAD) {
4655 tcg_gen_helper_1_2(helper_imulq_T0_T1, cpu_T[0], cpu_T[0], cpu_T[1]);
4658 if (ot == OT_LONG) {
4659 #ifdef TARGET_X86_64
4660 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4661 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
4662 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4663 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4664 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
4665 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4669 t0 = tcg_temp_new(TCG_TYPE_I64);
4670 t1 = tcg_temp_new(TCG_TYPE_I64);
4671 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
4672 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
4673 tcg_gen_mul_i64(t0, t0, t1);
4674 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4675 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4676 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
4677 tcg_gen_shri_i64(t0, t0, 32);
4678 tcg_gen_trunc_i64_i32(cpu_T[1], t0);
4679 tcg_gen_sub_tl(cpu_cc_src, cpu_T[1], cpu_tmp0);
4683 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4684 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
4685 /* XXX: use 32 bit mul which could be faster */
4686 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4687 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4688 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
4689 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4691 gen_op_mov_reg_T0(ot, reg);
4692 s->cc_op = CC_OP_MULB + ot;
4695 case 0x1c1: /* xadd Ev, Gv */
4699 ot = dflag + OT_WORD;
4700 modrm = ldub_code(s->pc++);
4701 reg = ((modrm >> 3) & 7) | rex_r;
4702 mod = (modrm >> 6) & 3;
4704 rm = (modrm & 7) | REX_B(s);
4705 gen_op_mov_TN_reg(ot, 0, reg);
4706 gen_op_mov_TN_reg(ot, 1, rm);
4707 gen_op_addl_T0_T1();
4708 gen_op_mov_reg_T1(ot, reg);
4709 gen_op_mov_reg_T0(ot, rm);
4711 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4712 gen_op_mov_TN_reg(ot, 0, reg);
4713 gen_op_ld_T1_A0(ot + s->mem_index);
4714 gen_op_addl_T0_T1();
4715 gen_op_st_T0_A0(ot + s->mem_index);
4716 gen_op_mov_reg_T1(ot, reg);
4718 gen_op_update2_cc();
4719 s->cc_op = CC_OP_ADDB + ot;
4722 case 0x1b1: /* cmpxchg Ev, Gv */
4725 TCGv t0, t1, t2, a0;
4730 ot = dflag + OT_WORD;
4731 modrm = ldub_code(s->pc++);
4732 reg = ((modrm >> 3) & 7) | rex_r;
4733 mod = (modrm >> 6) & 3;
4734 t0 = tcg_temp_local_new(TCG_TYPE_TL);
4735 t1 = tcg_temp_local_new(TCG_TYPE_TL);
4736 t2 = tcg_temp_local_new(TCG_TYPE_TL);
4737 a0 = tcg_temp_local_new(TCG_TYPE_TL);
4738 gen_op_mov_v_reg(ot, t1, reg);
4740 rm = (modrm & 7) | REX_B(s);
4741 gen_op_mov_v_reg(ot, t0, rm);
4743 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4744 tcg_gen_mov_tl(a0, cpu_A0);
4745 gen_op_ld_v(ot + s->mem_index, t0, a0);
4746 rm = 0; /* avoid warning */
4748 label1 = gen_new_label();
4749 tcg_gen_ld_tl(t2, cpu_env, offsetof(CPUState, regs[R_EAX]));
4750 tcg_gen_sub_tl(t2, t2, t0);
4752 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
4754 label2 = gen_new_label();
4755 gen_op_mov_reg_v(ot, R_EAX, t0);
4757 gen_set_label(label1);
4758 gen_op_mov_reg_v(ot, rm, t1);
4759 gen_set_label(label2);
4761 tcg_gen_mov_tl(t1, t0);
4762 gen_op_mov_reg_v(ot, R_EAX, t0);
4763 gen_set_label(label1);
4765 gen_op_st_v(ot + s->mem_index, t1, a0);
4767 tcg_gen_mov_tl(cpu_cc_src, t0);
4768 tcg_gen_mov_tl(cpu_cc_dst, t2);
4769 s->cc_op = CC_OP_SUBB + ot;
4776 case 0x1c7: /* cmpxchg8b */
4777 modrm = ldub_code(s->pc++);
4778 mod = (modrm >> 6) & 3;
4779 if ((mod == 3) || ((modrm & 0x38) != 0x8))
4781 #ifdef TARGET_X86_64
4783 if (!(s->cpuid_ext_features & CPUID_EXT_CX16))
4785 gen_jmp_im(pc_start - s->cs_base);
4786 if (s->cc_op != CC_OP_DYNAMIC)
4787 gen_op_set_cc_op(s->cc_op);
4788 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4789 tcg_gen_helper_0_1(helper_cmpxchg16b, cpu_A0);
4793 if (!(s->cpuid_features & CPUID_CX8))
4795 gen_jmp_im(pc_start - s->cs_base);
4796 if (s->cc_op != CC_OP_DYNAMIC)
4797 gen_op_set_cc_op(s->cc_op);
4798 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4799 tcg_gen_helper_0_1(helper_cmpxchg8b, cpu_A0);
4801 s->cc_op = CC_OP_EFLAGS;
4804 /**************************/
4806 case 0x50 ... 0x57: /* push */
4807 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
4810 case 0x58 ... 0x5f: /* pop */
4812 ot = dflag ? OT_QUAD : OT_WORD;
4814 ot = dflag + OT_WORD;
4817 /* NOTE: order is important for pop %sp */
4819 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
4821 case 0x60: /* pusha */
4826 case 0x61: /* popa */
4831 case 0x68: /* push Iv */
4834 ot = dflag ? OT_QUAD : OT_WORD;
4836 ot = dflag + OT_WORD;
4839 val = insn_get(s, ot);
4841 val = (int8_t)insn_get(s, OT_BYTE);
4842 gen_op_movl_T0_im(val);
4845 case 0x8f: /* pop Ev */
4847 ot = dflag ? OT_QUAD : OT_WORD;
4849 ot = dflag + OT_WORD;
4851 modrm = ldub_code(s->pc++);
4852 mod = (modrm >> 6) & 3;
4855 /* NOTE: order is important for pop %sp */
4857 rm = (modrm & 7) | REX_B(s);
4858 gen_op_mov_reg_T0(ot, rm);
4860 /* NOTE: order is important too for MMU exceptions */
4861 s->popl_esp_hack = 1 << ot;
4862 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4863 s->popl_esp_hack = 0;
4867 case 0xc8: /* enter */
4870 val = lduw_code(s->pc);
4872 level = ldub_code(s->pc++);
4873 gen_enter(s, val, level);
4876 case 0xc9: /* leave */
4877 /* XXX: exception not precise (ESP is updated before potential exception) */
4879 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
4880 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
4881 } else if (s->ss32) {
4882 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
4883 gen_op_mov_reg_T0(OT_LONG, R_ESP);
4885 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
4886 gen_op_mov_reg_T0(OT_WORD, R_ESP);
4890 ot = dflag ? OT_QUAD : OT_WORD;
4892 ot = dflag + OT_WORD;
4894 gen_op_mov_reg_T0(ot, R_EBP);
4897 case 0x06: /* push es */
4898 case 0x0e: /* push cs */
4899 case 0x16: /* push ss */
4900 case 0x1e: /* push ds */
4903 gen_op_movl_T0_seg(b >> 3);
4906 case 0x1a0: /* push fs */
4907 case 0x1a8: /* push gs */
4908 gen_op_movl_T0_seg((b >> 3) & 7);
4911 case 0x07: /* pop es */
4912 case 0x17: /* pop ss */
4913 case 0x1f: /* pop ds */
4918 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4921 /* if reg == SS, inhibit interrupts/trace. */
4922 /* If several instructions disable interrupts, only the
4924 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4925 tcg_gen_helper_0_0(helper_set_inhibit_irq);
4929 gen_jmp_im(s->pc - s->cs_base);
4933 case 0x1a1: /* pop fs */
4934 case 0x1a9: /* pop gs */
4936 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4939 gen_jmp_im(s->pc - s->cs_base);
4944 /**************************/
4947 case 0x89: /* mov Gv, Ev */
4951 ot = dflag + OT_WORD;
4952 modrm = ldub_code(s->pc++);
4953 reg = ((modrm >> 3) & 7) | rex_r;
4955 /* generate a generic store */
4956 gen_ldst_modrm(s, modrm, ot, reg, 1);
4959 case 0xc7: /* mov Ev, Iv */
4963 ot = dflag + OT_WORD;
4964 modrm = ldub_code(s->pc++);
4965 mod = (modrm >> 6) & 3;
4967 s->rip_offset = insn_const_size(ot);
4968 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4970 val = insn_get(s, ot);
4971 gen_op_movl_T0_im(val);
4973 gen_op_st_T0_A0(ot + s->mem_index);
4975 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
4978 case 0x8b: /* mov Ev, Gv */
4982 ot = OT_WORD + dflag;
4983 modrm = ldub_code(s->pc++);
4984 reg = ((modrm >> 3) & 7) | rex_r;
4986 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4987 gen_op_mov_reg_T0(ot, reg);
4989 case 0x8e: /* mov seg, Gv */
4990 modrm = ldub_code(s->pc++);
4991 reg = (modrm >> 3) & 7;
4992 if (reg >= 6 || reg == R_CS)
4994 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4995 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4997 /* if reg == SS, inhibit interrupts/trace */
4998 /* If several instructions disable interrupts, only the
5000 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5001 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5005 gen_jmp_im(s->pc - s->cs_base);
5009 case 0x8c: /* mov Gv, seg */
5010 modrm = ldub_code(s->pc++);
5011 reg = (modrm >> 3) & 7;
5012 mod = (modrm >> 6) & 3;
5015 gen_op_movl_T0_seg(reg);
5017 ot = OT_WORD + dflag;
5020 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5023 case 0x1b6: /* movzbS Gv, Eb */
5024 case 0x1b7: /* movzwS Gv, Eb */
5025 case 0x1be: /* movsbS Gv, Eb */
5026 case 0x1bf: /* movswS Gv, Eb */
5029 /* d_ot is the size of destination */
5030 d_ot = dflag + OT_WORD;
5031 /* ot is the size of source */
5032 ot = (b & 1) + OT_BYTE;
5033 modrm = ldub_code(s->pc++);
5034 reg = ((modrm >> 3) & 7) | rex_r;
5035 mod = (modrm >> 6) & 3;
5036 rm = (modrm & 7) | REX_B(s);
5039 gen_op_mov_TN_reg(ot, 0, rm);
5040 switch(ot | (b & 8)) {
5042 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
5045 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5048 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
5052 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5055 gen_op_mov_reg_T0(d_ot, reg);
5057 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5059 gen_op_lds_T0_A0(ot + s->mem_index);
5061 gen_op_ldu_T0_A0(ot + s->mem_index);
5063 gen_op_mov_reg_T0(d_ot, reg);
5068 case 0x8d: /* lea */
5069 ot = dflag + OT_WORD;
5070 modrm = ldub_code(s->pc++);
5071 mod = (modrm >> 6) & 3;
5074 reg = ((modrm >> 3) & 7) | rex_r;
5075 /* we must ensure that no segment is added */
5079 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5081 gen_op_mov_reg_A0(ot - OT_WORD, reg);
5084 case 0xa0: /* mov EAX, Ov */
5086 case 0xa2: /* mov Ov, EAX */
5089 target_ulong offset_addr;
5094 ot = dflag + OT_WORD;
5095 #ifdef TARGET_X86_64
5096 if (s->aflag == 2) {
5097 offset_addr = ldq_code(s->pc);
5099 gen_op_movq_A0_im(offset_addr);
5104 offset_addr = insn_get(s, OT_LONG);
5106 offset_addr = insn_get(s, OT_WORD);
5108 gen_op_movl_A0_im(offset_addr);
5110 gen_add_A0_ds_seg(s);
5112 gen_op_ld_T0_A0(ot + s->mem_index);
5113 gen_op_mov_reg_T0(ot, R_EAX);
5115 gen_op_mov_TN_reg(ot, 0, R_EAX);
5116 gen_op_st_T0_A0(ot + s->mem_index);
5120 case 0xd7: /* xlat */
5121 #ifdef TARGET_X86_64
5122 if (s->aflag == 2) {
5123 gen_op_movq_A0_reg(R_EBX);
5124 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5125 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5126 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5130 gen_op_movl_A0_reg(R_EBX);
5131 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5132 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5133 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5135 gen_op_andl_A0_ffff();
5137 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
5139 gen_add_A0_ds_seg(s);
5140 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
5141 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
5143 case 0xb0 ... 0xb7: /* mov R, Ib */
5144 val = insn_get(s, OT_BYTE);
5145 gen_op_movl_T0_im(val);
5146 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
5148 case 0xb8 ... 0xbf: /* mov R, Iv */
5149 #ifdef TARGET_X86_64
5153 tmp = ldq_code(s->pc);
5155 reg = (b & 7) | REX_B(s);
5156 gen_movtl_T0_im(tmp);
5157 gen_op_mov_reg_T0(OT_QUAD, reg);
5161 ot = dflag ? OT_LONG : OT_WORD;
5162 val = insn_get(s, ot);
5163 reg = (b & 7) | REX_B(s);
5164 gen_op_movl_T0_im(val);
5165 gen_op_mov_reg_T0(ot, reg);
5169 case 0x91 ... 0x97: /* xchg R, EAX */
5170 ot = dflag + OT_WORD;
5171 reg = (b & 7) | REX_B(s);
5175 case 0x87: /* xchg Ev, Gv */
5179 ot = dflag + OT_WORD;
5180 modrm = ldub_code(s->pc++);
5181 reg = ((modrm >> 3) & 7) | rex_r;
5182 mod = (modrm >> 6) & 3;
5184 rm = (modrm & 7) | REX_B(s);
5186 gen_op_mov_TN_reg(ot, 0, reg);
5187 gen_op_mov_TN_reg(ot, 1, rm);
5188 gen_op_mov_reg_T0(ot, rm);
5189 gen_op_mov_reg_T1(ot, reg);
5191 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5192 gen_op_mov_TN_reg(ot, 0, reg);
5193 /* for xchg, lock is implicit */
5194 if (!(prefixes & PREFIX_LOCK))
5195 tcg_gen_helper_0_0(helper_lock);
5196 gen_op_ld_T1_A0(ot + s->mem_index);
5197 gen_op_st_T0_A0(ot + s->mem_index);
5198 if (!(prefixes & PREFIX_LOCK))
5199 tcg_gen_helper_0_0(helper_unlock);
5200 gen_op_mov_reg_T1(ot, reg);
5203 case 0xc4: /* les Gv */
5208 case 0xc5: /* lds Gv */
5213 case 0x1b2: /* lss Gv */
5216 case 0x1b4: /* lfs Gv */
5219 case 0x1b5: /* lgs Gv */
5222 ot = dflag ? OT_LONG : OT_WORD;
5223 modrm = ldub_code(s->pc++);
5224 reg = ((modrm >> 3) & 7) | rex_r;
5225 mod = (modrm >> 6) & 3;
5228 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5229 gen_op_ld_T1_A0(ot + s->mem_index);
5230 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5231 /* load the segment first to handle exceptions properly */
5232 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5233 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
5234 /* then put the data */
5235 gen_op_mov_reg_T1(ot, reg);
5237 gen_jmp_im(s->pc - s->cs_base);
5242 /************************/
5253 ot = dflag + OT_WORD;
5255 modrm = ldub_code(s->pc++);
5256 mod = (modrm >> 6) & 3;
5257 op = (modrm >> 3) & 7;
5263 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5266 opreg = (modrm & 7) | REX_B(s);
5271 gen_shift(s, op, ot, opreg, OR_ECX);
5274 shift = ldub_code(s->pc++);
5276 gen_shifti(s, op, ot, opreg, shift);
5291 case 0x1a4: /* shld imm */
5295 case 0x1a5: /* shld cl */
5299 case 0x1ac: /* shrd imm */
5303 case 0x1ad: /* shrd cl */
5307 ot = dflag + OT_WORD;
5308 modrm = ldub_code(s->pc++);
5309 mod = (modrm >> 6) & 3;
5310 rm = (modrm & 7) | REX_B(s);
5311 reg = ((modrm >> 3) & 7) | rex_r;
5313 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5318 gen_op_mov_TN_reg(ot, 1, reg);
5321 val = ldub_code(s->pc++);
5322 tcg_gen_movi_tl(cpu_T3, val);
5324 tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
5326 gen_shiftd_rm_T1_T3(s, ot, opreg, op);
5329 /************************/
5332 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
5333 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
5334 /* XXX: what to do if illegal op ? */
5335 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5338 modrm = ldub_code(s->pc++);
5339 mod = (modrm >> 6) & 3;
5341 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
5344 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5346 case 0x00 ... 0x07: /* fxxxs */
5347 case 0x10 ... 0x17: /* fixxxl */
5348 case 0x20 ... 0x27: /* fxxxl */
5349 case 0x30 ... 0x37: /* fixxx */
5356 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5357 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5358 tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
5361 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5362 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5363 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
5366 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
5367 (s->mem_index >> 2) - 1);
5368 tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
5372 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
5373 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5374 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
5378 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
5380 /* fcomp needs pop */
5381 tcg_gen_helper_0_0(helper_fpop);
5385 case 0x08: /* flds */
5386 case 0x0a: /* fsts */
5387 case 0x0b: /* fstps */
5388 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
5389 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
5390 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
5395 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5396 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5397 tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
5400 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5401 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5402 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
5405 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
5406 (s->mem_index >> 2) - 1);
5407 tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
5411 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
5412 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5413 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
5418 /* XXX: the corresponding CPUID bit must be tested ! */
5421 tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
5422 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5423 gen_op_st_T0_A0(OT_LONG + s->mem_index);
5426 tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
5427 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
5428 (s->mem_index >> 2) - 1);
5432 tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
5433 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5434 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5437 tcg_gen_helper_0_0(helper_fpop);
5442 tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
5443 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5444 gen_op_st_T0_A0(OT_LONG + s->mem_index);
5447 tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
5448 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5449 gen_op_st_T0_A0(OT_LONG + s->mem_index);
5452 tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
5453 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
5454 (s->mem_index >> 2) - 1);
5458 tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
5459 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5460 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5464 tcg_gen_helper_0_0(helper_fpop);
5468 case 0x0c: /* fldenv mem */
5469 if (s->cc_op != CC_OP_DYNAMIC)
5470 gen_op_set_cc_op(s->cc_op);
5471 gen_jmp_im(pc_start - s->cs_base);
5472 tcg_gen_helper_0_2(helper_fldenv,
5473 cpu_A0, tcg_const_i32(s->dflag));
5475 case 0x0d: /* fldcw mem */
5476 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
5477 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5478 tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
5480 case 0x0e: /* fnstenv mem */
5481 if (s->cc_op != CC_OP_DYNAMIC)
5482 gen_op_set_cc_op(s->cc_op);
5483 gen_jmp_im(pc_start - s->cs_base);
5484 tcg_gen_helper_0_2(helper_fstenv,
5485 cpu_A0, tcg_const_i32(s->dflag));
5487 case 0x0f: /* fnstcw mem */
5488 tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
5489 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5490 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5492 case 0x1d: /* fldt mem */
5493 if (s->cc_op != CC_OP_DYNAMIC)
5494 gen_op_set_cc_op(s->cc_op);
5495 gen_jmp_im(pc_start - s->cs_base);
5496 tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
5498 case 0x1f: /* fstpt mem */
5499 if (s->cc_op != CC_OP_DYNAMIC)
5500 gen_op_set_cc_op(s->cc_op);
5501 gen_jmp_im(pc_start - s->cs_base);
5502 tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
5503 tcg_gen_helper_0_0(helper_fpop);
5505 case 0x2c: /* frstor mem */
5506 if (s->cc_op != CC_OP_DYNAMIC)
5507 gen_op_set_cc_op(s->cc_op);
5508 gen_jmp_im(pc_start - s->cs_base);
5509 tcg_gen_helper_0_2(helper_frstor,
5510 cpu_A0, tcg_const_i32(s->dflag));
5512 case 0x2e: /* fnsave mem */
5513 if (s->cc_op != CC_OP_DYNAMIC)
5514 gen_op_set_cc_op(s->cc_op);
5515 gen_jmp_im(pc_start - s->cs_base);
5516 tcg_gen_helper_0_2(helper_fsave,
5517 cpu_A0, tcg_const_i32(s->dflag));
5519 case 0x2f: /* fnstsw mem */
5520 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
5521 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5522 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5524 case 0x3c: /* fbld */
5525 if (s->cc_op != CC_OP_DYNAMIC)
5526 gen_op_set_cc_op(s->cc_op);
5527 gen_jmp_im(pc_start - s->cs_base);
5528 tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
5530 case 0x3e: /* fbstp */
5531 if (s->cc_op != CC_OP_DYNAMIC)
5532 gen_op_set_cc_op(s->cc_op);
5533 gen_jmp_im(pc_start - s->cs_base);
5534 tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
5535 tcg_gen_helper_0_0(helper_fpop);
5537 case 0x3d: /* fildll */
5538 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
5539 (s->mem_index >> 2) - 1);
5540 tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
5542 case 0x3f: /* fistpll */
5543 tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
5544 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
5545 (s->mem_index >> 2) - 1);
5546 tcg_gen_helper_0_0(helper_fpop);
5552 /* register float ops */
5556 case 0x08: /* fld sti */
5557 tcg_gen_helper_0_0(helper_fpush);
5558 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
5560 case 0x09: /* fxchg sti */
5561 case 0x29: /* fxchg4 sti, undocumented op */
5562 case 0x39: /* fxchg7 sti, undocumented op */
5563 tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
5565 case 0x0a: /* grp d9/2 */
5568 /* check exceptions (FreeBSD FPU probe) */
5569 if (s->cc_op != CC_OP_DYNAMIC)
5570 gen_op_set_cc_op(s->cc_op);
5571 gen_jmp_im(pc_start - s->cs_base);
5572 tcg_gen_helper_0_0(helper_fwait);
5578 case 0x0c: /* grp d9/4 */
5581 tcg_gen_helper_0_0(helper_fchs_ST0);
5584 tcg_gen_helper_0_0(helper_fabs_ST0);
5587 tcg_gen_helper_0_0(helper_fldz_FT0);
5588 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5591 tcg_gen_helper_0_0(helper_fxam_ST0);
5597 case 0x0d: /* grp d9/5 */
5601 tcg_gen_helper_0_0(helper_fpush);
5602 tcg_gen_helper_0_0(helper_fld1_ST0);
5605 tcg_gen_helper_0_0(helper_fpush);
5606 tcg_gen_helper_0_0(helper_fldl2t_ST0);
5609 tcg_gen_helper_0_0(helper_fpush);
5610 tcg_gen_helper_0_0(helper_fldl2e_ST0);
5613 tcg_gen_helper_0_0(helper_fpush);
5614 tcg_gen_helper_0_0(helper_fldpi_ST0);
5617 tcg_gen_helper_0_0(helper_fpush);
5618 tcg_gen_helper_0_0(helper_fldlg2_ST0);
5621 tcg_gen_helper_0_0(helper_fpush);
5622 tcg_gen_helper_0_0(helper_fldln2_ST0);
5625 tcg_gen_helper_0_0(helper_fpush);
5626 tcg_gen_helper_0_0(helper_fldz_ST0);
5633 case 0x0e: /* grp d9/6 */
5636 tcg_gen_helper_0_0(helper_f2xm1);
5639 tcg_gen_helper_0_0(helper_fyl2x);
5642 tcg_gen_helper_0_0(helper_fptan);
5644 case 3: /* fpatan */
5645 tcg_gen_helper_0_0(helper_fpatan);
5647 case 4: /* fxtract */
5648 tcg_gen_helper_0_0(helper_fxtract);
5650 case 5: /* fprem1 */
5651 tcg_gen_helper_0_0(helper_fprem1);
5653 case 6: /* fdecstp */
5654 tcg_gen_helper_0_0(helper_fdecstp);
5657 case 7: /* fincstp */
5658 tcg_gen_helper_0_0(helper_fincstp);
5662 case 0x0f: /* grp d9/7 */
5665 tcg_gen_helper_0_0(helper_fprem);
5667 case 1: /* fyl2xp1 */
5668 tcg_gen_helper_0_0(helper_fyl2xp1);
5671 tcg_gen_helper_0_0(helper_fsqrt);
5673 case 3: /* fsincos */
5674 tcg_gen_helper_0_0(helper_fsincos);
5676 case 5: /* fscale */
5677 tcg_gen_helper_0_0(helper_fscale);
5679 case 4: /* frndint */
5680 tcg_gen_helper_0_0(helper_frndint);
5683 tcg_gen_helper_0_0(helper_fsin);
5687 tcg_gen_helper_0_0(helper_fcos);
5691 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
5692 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
5693 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
5699 tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
5701 tcg_gen_helper_0_0(helper_fpop);
5703 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5704 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
5708 case 0x02: /* fcom */
5709 case 0x22: /* fcom2, undocumented op */
5710 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5711 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5713 case 0x03: /* fcomp */
5714 case 0x23: /* fcomp3, undocumented op */
5715 case 0x32: /* fcomp5, undocumented op */
5716 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5717 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5718 tcg_gen_helper_0_0(helper_fpop);
5720 case 0x15: /* da/5 */
5722 case 1: /* fucompp */
5723 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5724 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5725 tcg_gen_helper_0_0(helper_fpop);
5726 tcg_gen_helper_0_0(helper_fpop);
5734 case 0: /* feni (287 only, just do nop here) */
5736 case 1: /* fdisi (287 only, just do nop here) */
5739 tcg_gen_helper_0_0(helper_fclex);
5741 case 3: /* fninit */
5742 tcg_gen_helper_0_0(helper_fninit);
5744 case 4: /* fsetpm (287 only, just do nop here) */
5750 case 0x1d: /* fucomi */
5751 if (s->cc_op != CC_OP_DYNAMIC)
5752 gen_op_set_cc_op(s->cc_op);
5753 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5754 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5755 s->cc_op = CC_OP_EFLAGS;
5757 case 0x1e: /* fcomi */
5758 if (s->cc_op != CC_OP_DYNAMIC)
5759 gen_op_set_cc_op(s->cc_op);
5760 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5761 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5762 s->cc_op = CC_OP_EFLAGS;
5764 case 0x28: /* ffree sti */
5765 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5767 case 0x2a: /* fst sti */
5768 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5770 case 0x2b: /* fstp sti */
5771 case 0x0b: /* fstp1 sti, undocumented op */
5772 case 0x3a: /* fstp8 sti, undocumented op */
5773 case 0x3b: /* fstp9 sti, undocumented op */
5774 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5775 tcg_gen_helper_0_0(helper_fpop);
5777 case 0x2c: /* fucom st(i) */
5778 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5779 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5781 case 0x2d: /* fucomp st(i) */
5782 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5783 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5784 tcg_gen_helper_0_0(helper_fpop);
5786 case 0x33: /* de/3 */
5788 case 1: /* fcompp */
5789 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5790 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5791 tcg_gen_helper_0_0(helper_fpop);
5792 tcg_gen_helper_0_0(helper_fpop);
5798 case 0x38: /* ffreep sti, undocumented op */
5799 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5800 tcg_gen_helper_0_0(helper_fpop);
5802 case 0x3c: /* df/4 */
5805 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
5806 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5807 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5813 case 0x3d: /* fucomip */
5814 if (s->cc_op != CC_OP_DYNAMIC)
5815 gen_op_set_cc_op(s->cc_op);
5816 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5817 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5818 tcg_gen_helper_0_0(helper_fpop);
5819 s->cc_op = CC_OP_EFLAGS;
5821 case 0x3e: /* fcomip */
5822 if (s->cc_op != CC_OP_DYNAMIC)
5823 gen_op_set_cc_op(s->cc_op);
5824 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5825 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5826 tcg_gen_helper_0_0(helper_fpop);
5827 s->cc_op = CC_OP_EFLAGS;
5829 case 0x10 ... 0x13: /* fcmovxx */
5833 static const uint8_t fcmov_cc[8] = {
5839 op1 = fcmov_cc[op & 3] | (((op >> 3) & 1) ^ 1);
5840 l1 = gen_new_label();
5841 gen_jcc1(s, s->cc_op, op1, l1);
5842 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
5851 /************************/
5854 case 0xa4: /* movsS */
5859 ot = dflag + OT_WORD;
5861 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5862 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5868 case 0xaa: /* stosS */
5873 ot = dflag + OT_WORD;
5875 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5876 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5881 case 0xac: /* lodsS */
5886 ot = dflag + OT_WORD;
5887 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5888 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5893 case 0xae: /* scasS */
5898 ot = dflag + OT_WORD;
5899 if (prefixes & PREFIX_REPNZ) {
5900 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5901 } else if (prefixes & PREFIX_REPZ) {
5902 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5905 s->cc_op = CC_OP_SUBB + ot;
5909 case 0xa6: /* cmpsS */
5914 ot = dflag + OT_WORD;
5915 if (prefixes & PREFIX_REPNZ) {
5916 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5917 } else if (prefixes & PREFIX_REPZ) {
5918 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5921 s->cc_op = CC_OP_SUBB + ot;
5924 case 0x6c: /* insS */
5929 ot = dflag ? OT_LONG : OT_WORD;
5930 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5931 gen_op_andl_T0_ffff();
5932 gen_check_io(s, ot, pc_start - s->cs_base,
5933 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
5934 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5935 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5939 gen_jmp(s, s->pc - s->cs_base);
5943 case 0x6e: /* outsS */
5948 ot = dflag ? OT_LONG : OT_WORD;
5949 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5950 gen_op_andl_T0_ffff();
5951 gen_check_io(s, ot, pc_start - s->cs_base,
5952 svm_is_rep(prefixes) | 4);
5953 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5954 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5958 gen_jmp(s, s->pc - s->cs_base);
5963 /************************/
5971 ot = dflag ? OT_LONG : OT_WORD;
5972 val = ldub_code(s->pc++);
5973 gen_op_movl_T0_im(val);
5974 gen_check_io(s, ot, pc_start - s->cs_base,
5975 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5978 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5979 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5980 gen_op_mov_reg_T1(ot, R_EAX);
5983 gen_jmp(s, s->pc - s->cs_base);
5991 ot = dflag ? OT_LONG : OT_WORD;
5992 val = ldub_code(s->pc++);
5993 gen_op_movl_T0_im(val);
5994 gen_check_io(s, ot, pc_start - s->cs_base,
5995 svm_is_rep(prefixes));
5996 gen_op_mov_TN_reg(ot, 1, R_EAX);
6000 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6001 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6002 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6003 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6006 gen_jmp(s, s->pc - s->cs_base);
6014 ot = dflag ? OT_LONG : OT_WORD;
6015 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6016 gen_op_andl_T0_ffff();
6017 gen_check_io(s, ot, pc_start - s->cs_base,
6018 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6021 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6022 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6023 gen_op_mov_reg_T1(ot, R_EAX);
6026 gen_jmp(s, s->pc - s->cs_base);
6034 ot = dflag ? OT_LONG : OT_WORD;
6035 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6036 gen_op_andl_T0_ffff();
6037 gen_check_io(s, ot, pc_start - s->cs_base,
6038 svm_is_rep(prefixes));
6039 gen_op_mov_TN_reg(ot, 1, R_EAX);
6043 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6044 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6045 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6046 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6049 gen_jmp(s, s->pc - s->cs_base);
6053 /************************/
6055 case 0xc2: /* ret im */
6056 val = ldsw_code(s->pc);
6059 if (CODE64(s) && s->dflag)
6061 gen_stack_update(s, val + (2 << s->dflag));
6063 gen_op_andl_T0_ffff();
6067 case 0xc3: /* ret */
6071 gen_op_andl_T0_ffff();
6075 case 0xca: /* lret im */
6076 val = ldsw_code(s->pc);
6079 if (s->pe && !s->vm86) {
6080 if (s->cc_op != CC_OP_DYNAMIC)
6081 gen_op_set_cc_op(s->cc_op);
6082 gen_jmp_im(pc_start - s->cs_base);
6083 tcg_gen_helper_0_2(helper_lret_protected,
6084 tcg_const_i32(s->dflag),
6085 tcg_const_i32(val));
6089 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6091 gen_op_andl_T0_ffff();
6092 /* NOTE: keeping EIP updated is not a problem in case of
6096 gen_op_addl_A0_im(2 << s->dflag);
6097 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6098 gen_op_movl_seg_T0_vm(R_CS);
6099 /* add stack offset */
6100 gen_stack_update(s, val + (4 << s->dflag));
6104 case 0xcb: /* lret */
6107 case 0xcf: /* iret */
6108 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET);
6111 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6112 s->cc_op = CC_OP_EFLAGS;
6113 } else if (s->vm86) {
6115 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6117 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6118 s->cc_op = CC_OP_EFLAGS;
6121 if (s->cc_op != CC_OP_DYNAMIC)
6122 gen_op_set_cc_op(s->cc_op);
6123 gen_jmp_im(pc_start - s->cs_base);
6124 tcg_gen_helper_0_2(helper_iret_protected,
6125 tcg_const_i32(s->dflag),
6126 tcg_const_i32(s->pc - s->cs_base));
6127 s->cc_op = CC_OP_EFLAGS;
6131 case 0xe8: /* call im */
6134 tval = (int32_t)insn_get(s, OT_LONG);
6136 tval = (int16_t)insn_get(s, OT_WORD);
6137 next_eip = s->pc - s->cs_base;
6141 gen_movtl_T0_im(next_eip);
6146 case 0x9a: /* lcall im */
6148 unsigned int selector, offset;
6152 ot = dflag ? OT_LONG : OT_WORD;
6153 offset = insn_get(s, ot);
6154 selector = insn_get(s, OT_WORD);
6156 gen_op_movl_T0_im(selector);
6157 gen_op_movl_T1_imu(offset);
6160 case 0xe9: /* jmp im */
6162 tval = (int32_t)insn_get(s, OT_LONG);
6164 tval = (int16_t)insn_get(s, OT_WORD);
6165 tval += s->pc - s->cs_base;
6170 case 0xea: /* ljmp im */
6172 unsigned int selector, offset;
6176 ot = dflag ? OT_LONG : OT_WORD;
6177 offset = insn_get(s, ot);
6178 selector = insn_get(s, OT_WORD);
6180 gen_op_movl_T0_im(selector);
6181 gen_op_movl_T1_imu(offset);
6184 case 0xeb: /* jmp Jb */
6185 tval = (int8_t)insn_get(s, OT_BYTE);
6186 tval += s->pc - s->cs_base;
6191 case 0x70 ... 0x7f: /* jcc Jb */
6192 tval = (int8_t)insn_get(s, OT_BYTE);
6194 case 0x180 ... 0x18f: /* jcc Jv */
6196 tval = (int32_t)insn_get(s, OT_LONG);
6198 tval = (int16_t)insn_get(s, OT_WORD);
6201 next_eip = s->pc - s->cs_base;
6205 gen_jcc(s, b, tval, next_eip);
6208 case 0x190 ... 0x19f: /* setcc Gv */
6209 modrm = ldub_code(s->pc++);
6211 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
6213 case 0x140 ... 0x14f: /* cmov Gv, Ev */
6218 ot = dflag + OT_WORD;
6219 modrm = ldub_code(s->pc++);
6220 reg = ((modrm >> 3) & 7) | rex_r;
6221 mod = (modrm >> 6) & 3;
6222 t0 = tcg_temp_local_new(TCG_TYPE_TL);
6224 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6225 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
6227 rm = (modrm & 7) | REX_B(s);
6228 gen_op_mov_v_reg(ot, t0, rm);
6230 #ifdef TARGET_X86_64
6231 if (ot == OT_LONG) {
6232 /* XXX: specific Intel behaviour ? */
6233 l1 = gen_new_label();
6234 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6235 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
6237 tcg_gen_movi_tl(cpu_tmp0, 0);
6238 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
6242 l1 = gen_new_label();
6243 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6244 gen_op_mov_reg_v(ot, reg, t0);
6251 /************************/
6253 case 0x9c: /* pushf */
6254 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF);
6255 if (s->vm86 && s->iopl != 3) {
6256 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6258 if (s->cc_op != CC_OP_DYNAMIC)
6259 gen_op_set_cc_op(s->cc_op);
6260 tcg_gen_helper_1_0(helper_read_eflags, cpu_T[0]);
6264 case 0x9d: /* popf */
6265 gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF);
6266 if (s->vm86 && s->iopl != 3) {
6267 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6272 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6273 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK)));
6275 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6276 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK) & 0xffff));
6279 if (s->cpl <= s->iopl) {
6281 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6282 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK)));
6284 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6285 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK) & 0xffff));
6289 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6290 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK)));
6292 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6293 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK) & 0xffff));
6298 s->cc_op = CC_OP_EFLAGS;
6299 /* abort translation because TF flag may change */
6300 gen_jmp_im(s->pc - s->cs_base);
6304 case 0x9e: /* sahf */
6305 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
6307 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
6308 if (s->cc_op != CC_OP_DYNAMIC)
6309 gen_op_set_cc_op(s->cc_op);
6310 gen_compute_eflags(cpu_cc_src);
6311 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, CC_O);
6312 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_S | CC_Z | CC_A | CC_P | CC_C);
6313 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
6314 s->cc_op = CC_OP_EFLAGS;
6316 case 0x9f: /* lahf */
6317 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
6319 if (s->cc_op != CC_OP_DYNAMIC)
6320 gen_op_set_cc_op(s->cc_op);
6321 gen_compute_eflags(cpu_T[0]);
6322 /* Note: gen_compute_eflags() only gives the condition codes */
6323 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], 0x02);
6324 gen_op_mov_reg_T0(OT_BYTE, R_AH);
6326 case 0xf5: /* cmc */
6327 if (s->cc_op != CC_OP_DYNAMIC)
6328 gen_op_set_cc_op(s->cc_op);
6329 gen_compute_eflags(cpu_cc_src);
6330 tcg_gen_xori_tl(cpu_cc_src, cpu_cc_src, CC_C);
6331 s->cc_op = CC_OP_EFLAGS;
6333 case 0xf8: /* clc */
6334 if (s->cc_op != CC_OP_DYNAMIC)
6335 gen_op_set_cc_op(s->cc_op);
6336 gen_compute_eflags(cpu_cc_src);
6337 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_C);
6338 s->cc_op = CC_OP_EFLAGS;
6340 case 0xf9: /* stc */
6341 if (s->cc_op != CC_OP_DYNAMIC)
6342 gen_op_set_cc_op(s->cc_op);
6343 gen_compute_eflags(cpu_cc_src);
6344 tcg_gen_ori_tl(cpu_cc_src, cpu_cc_src, CC_C);
6345 s->cc_op = CC_OP_EFLAGS;
6347 case 0xfc: /* cld */
6348 tcg_gen_movi_i32(cpu_tmp2_i32, 1);
6349 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
6351 case 0xfd: /* std */
6352 tcg_gen_movi_i32(cpu_tmp2_i32, -1);
6353 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
6356 /************************/
6357 /* bit operations */
6358 case 0x1ba: /* bt/bts/btr/btc Gv, im */
6359 ot = dflag + OT_WORD;
6360 modrm = ldub_code(s->pc++);
6361 op = (modrm >> 3) & 7;
6362 mod = (modrm >> 6) & 3;
6363 rm = (modrm & 7) | REX_B(s);
6366 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6367 gen_op_ld_T0_A0(ot + s->mem_index);
6369 gen_op_mov_TN_reg(ot, 0, rm);
6372 val = ldub_code(s->pc++);
6373 gen_op_movl_T1_im(val);
6378 case 0x1a3: /* bt Gv, Ev */
6381 case 0x1ab: /* bts */
6384 case 0x1b3: /* btr */
6387 case 0x1bb: /* btc */
6390 ot = dflag + OT_WORD;
6391 modrm = ldub_code(s->pc++);
6392 reg = ((modrm >> 3) & 7) | rex_r;
6393 mod = (modrm >> 6) & 3;
6394 rm = (modrm & 7) | REX_B(s);
6395 gen_op_mov_TN_reg(OT_LONG, 1, reg);
6397 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6398 /* specific case: we need to add a displacement */
6399 gen_exts(ot, cpu_T[1]);
6400 tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
6401 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
6402 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
6403 gen_op_ld_T0_A0(ot + s->mem_index);
6405 gen_op_mov_TN_reg(ot, 0, rm);
6408 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
6411 tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
6412 tcg_gen_movi_tl(cpu_cc_dst, 0);
6415 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
6416 tcg_gen_movi_tl(cpu_tmp0, 1);
6417 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
6418 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
6421 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
6422 tcg_gen_movi_tl(cpu_tmp0, 1);
6423 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
6424 tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
6425 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
6429 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
6430 tcg_gen_movi_tl(cpu_tmp0, 1);
6431 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
6432 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
6435 s->cc_op = CC_OP_SARB + ot;
6438 gen_op_st_T0_A0(ot + s->mem_index);
6440 gen_op_mov_reg_T0(ot, rm);
6441 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
6442 tcg_gen_movi_tl(cpu_cc_dst, 0);
6445 case 0x1bc: /* bsf */
6446 case 0x1bd: /* bsr */
6451 ot = dflag + OT_WORD;
6452 modrm = ldub_code(s->pc++);
6453 reg = ((modrm >> 3) & 7) | rex_r;
6454 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
6455 gen_extu(ot, cpu_T[0]);
6456 label1 = gen_new_label();
6457 tcg_gen_movi_tl(cpu_cc_dst, 0);
6458 t0 = tcg_temp_local_new(TCG_TYPE_TL);
6459 tcg_gen_mov_tl(t0, cpu_T[0]);
6460 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, label1);
6462 tcg_gen_helper_1_1(helper_bsr, cpu_T[0], t0);
6464 tcg_gen_helper_1_1(helper_bsf, cpu_T[0], t0);
6466 gen_op_mov_reg_T0(ot, reg);
6467 tcg_gen_movi_tl(cpu_cc_dst, 1);
6468 gen_set_label(label1);
6469 tcg_gen_discard_tl(cpu_cc_src);
6470 s->cc_op = CC_OP_LOGICB + ot;
6474 /************************/
6476 case 0x27: /* daa */
6479 if (s->cc_op != CC_OP_DYNAMIC)
6480 gen_op_set_cc_op(s->cc_op);
6481 tcg_gen_helper_0_0(helper_daa);
6482 s->cc_op = CC_OP_EFLAGS;
6484 case 0x2f: /* das */
6487 if (s->cc_op != CC_OP_DYNAMIC)
6488 gen_op_set_cc_op(s->cc_op);
6489 tcg_gen_helper_0_0(helper_das);
6490 s->cc_op = CC_OP_EFLAGS;
6492 case 0x37: /* aaa */
6495 if (s->cc_op != CC_OP_DYNAMIC)
6496 gen_op_set_cc_op(s->cc_op);
6497 tcg_gen_helper_0_0(helper_aaa);
6498 s->cc_op = CC_OP_EFLAGS;
6500 case 0x3f: /* aas */
6503 if (s->cc_op != CC_OP_DYNAMIC)
6504 gen_op_set_cc_op(s->cc_op);
6505 tcg_gen_helper_0_0(helper_aas);
6506 s->cc_op = CC_OP_EFLAGS;
6508 case 0xd4: /* aam */
6511 val = ldub_code(s->pc++);
6513 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
6515 tcg_gen_helper_0_1(helper_aam, tcg_const_i32(val));
6516 s->cc_op = CC_OP_LOGICB;
6519 case 0xd5: /* aad */
6522 val = ldub_code(s->pc++);
6523 tcg_gen_helper_0_1(helper_aad, tcg_const_i32(val));
6524 s->cc_op = CC_OP_LOGICB;
6526 /************************/
6528 case 0x90: /* nop */
6529 /* XXX: xchg + rex handling */
6530 /* XXX: correct lock test for all insn */
6531 if (prefixes & PREFIX_LOCK)
6533 if (prefixes & PREFIX_REPZ) {
6534 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
6537 case 0x9b: /* fwait */
6538 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
6539 (HF_MP_MASK | HF_TS_MASK)) {
6540 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6542 if (s->cc_op != CC_OP_DYNAMIC)
6543 gen_op_set_cc_op(s->cc_op);
6544 gen_jmp_im(pc_start - s->cs_base);
6545 tcg_gen_helper_0_0(helper_fwait);
6548 case 0xcc: /* int3 */
6549 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
6551 case 0xcd: /* int N */
6552 val = ldub_code(s->pc++);
6553 if (s->vm86 && s->iopl != 3) {
6554 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6556 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
6559 case 0xce: /* into */
6562 if (s->cc_op != CC_OP_DYNAMIC)
6563 gen_op_set_cc_op(s->cc_op);
6564 gen_jmp_im(pc_start - s->cs_base);
6565 tcg_gen_helper_0_1(helper_into, tcg_const_i32(s->pc - pc_start));
6567 case 0xf1: /* icebp (undocumented, exits to external debugger) */
6568 gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP);
6570 gen_debug(s, pc_start - s->cs_base);
6573 tb_flush(cpu_single_env);
6574 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
6577 case 0xfa: /* cli */
6579 if (s->cpl <= s->iopl) {
6580 tcg_gen_helper_0_0(helper_cli);
6582 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6586 tcg_gen_helper_0_0(helper_cli);
6588 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6592 case 0xfb: /* sti */
6594 if (s->cpl <= s->iopl) {
6596 tcg_gen_helper_0_0(helper_sti);
6597 /* interruptions are enabled only the first insn after sti */
6598 /* If several instructions disable interrupts, only the
6600 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
6601 tcg_gen_helper_0_0(helper_set_inhibit_irq);
6602 /* give a chance to handle pending irqs */
6603 gen_jmp_im(s->pc - s->cs_base);
6606 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6612 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6616 case 0x62: /* bound */
6619 ot = dflag ? OT_LONG : OT_WORD;
6620 modrm = ldub_code(s->pc++);
6621 reg = (modrm >> 3) & 7;
6622 mod = (modrm >> 6) & 3;
6625 gen_op_mov_TN_reg(ot, 0, reg);
6626 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6627 gen_jmp_im(pc_start - s->cs_base);
6628 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6630 tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
6632 tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
6634 case 0x1c8 ... 0x1cf: /* bswap reg */
6635 reg = (b & 7) | REX_B(s);
6636 #ifdef TARGET_X86_64
6638 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
6639 tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
6640 gen_op_mov_reg_T0(OT_QUAD, reg);
6644 gen_op_mov_TN_reg(OT_LONG, 0, reg);
6646 tmp0 = tcg_temp_new(TCG_TYPE_I32);
6647 tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
6648 tcg_gen_bswap_i32(tmp0, tmp0);
6649 tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
6650 gen_op_mov_reg_T0(OT_LONG, reg);
6654 gen_op_mov_TN_reg(OT_LONG, 0, reg);
6655 tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
6656 gen_op_mov_reg_T0(OT_LONG, reg);
6660 case 0xd6: /* salc */
6663 if (s->cc_op != CC_OP_DYNAMIC)
6664 gen_op_set_cc_op(s->cc_op);
6665 gen_compute_eflags_c(cpu_T[0]);
6666 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
6667 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
6669 case 0xe0: /* loopnz */
6670 case 0xe1: /* loopz */
6671 case 0xe2: /* loop */
6672 case 0xe3: /* jecxz */
6676 tval = (int8_t)insn_get(s, OT_BYTE);
6677 next_eip = s->pc - s->cs_base;
6682 l1 = gen_new_label();
6683 l2 = gen_new_label();
6684 l3 = gen_new_label();
6687 case 0: /* loopnz */
6689 if (s->cc_op != CC_OP_DYNAMIC)
6690 gen_op_set_cc_op(s->cc_op);
6691 gen_op_add_reg_im(s->aflag, R_ECX, -1);
6692 gen_op_jz_ecx(s->aflag, l3);
6693 gen_compute_eflags(cpu_tmp0);
6694 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_Z);
6696 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
6698 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, l1);
6702 gen_op_add_reg_im(s->aflag, R_ECX, -1);
6703 gen_op_jnz_ecx(s->aflag, l1);
6707 gen_op_jz_ecx(s->aflag, l1);
6712 gen_jmp_im(next_eip);
6721 case 0x130: /* wrmsr */
6722 case 0x132: /* rdmsr */
6724 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6726 if (s->cc_op != CC_OP_DYNAMIC)
6727 gen_op_set_cc_op(s->cc_op);
6728 gen_jmp_im(pc_start - s->cs_base);
6730 tcg_gen_helper_0_0(helper_rdmsr);
6732 tcg_gen_helper_0_0(helper_wrmsr);
6736 case 0x131: /* rdtsc */
6737 if (s->cc_op != CC_OP_DYNAMIC)
6738 gen_op_set_cc_op(s->cc_op);
6739 gen_jmp_im(pc_start - s->cs_base);
6742 tcg_gen_helper_0_0(helper_rdtsc);
6745 gen_jmp(s, s->pc - s->cs_base);
6748 case 0x133: /* rdpmc */
6749 if (s->cc_op != CC_OP_DYNAMIC)
6750 gen_op_set_cc_op(s->cc_op);
6751 gen_jmp_im(pc_start - s->cs_base);
6752 tcg_gen_helper_0_0(helper_rdpmc);
6754 case 0x134: /* sysenter */
6755 /* For Intel SYSENTER is valid on 64-bit */
6756 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
6759 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6761 if (s->cc_op != CC_OP_DYNAMIC) {
6762 gen_op_set_cc_op(s->cc_op);
6763 s->cc_op = CC_OP_DYNAMIC;
6765 gen_jmp_im(pc_start - s->cs_base);
6766 tcg_gen_helper_0_0(helper_sysenter);
6770 case 0x135: /* sysexit */
6771 /* For Intel SYSEXIT is valid on 64-bit */
6772 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
6775 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6777 if (s->cc_op != CC_OP_DYNAMIC) {
6778 gen_op_set_cc_op(s->cc_op);
6779 s->cc_op = CC_OP_DYNAMIC;
6781 gen_jmp_im(pc_start - s->cs_base);
6782 tcg_gen_helper_0_1(helper_sysexit, tcg_const_i32(dflag));
6786 #ifdef TARGET_X86_64
6787 case 0x105: /* syscall */
6788 /* XXX: is it usable in real mode ? */
6789 if (s->cc_op != CC_OP_DYNAMIC) {
6790 gen_op_set_cc_op(s->cc_op);
6791 s->cc_op = CC_OP_DYNAMIC;
6793 gen_jmp_im(pc_start - s->cs_base);
6794 tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
6797 case 0x107: /* sysret */
6799 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6801 if (s->cc_op != CC_OP_DYNAMIC) {
6802 gen_op_set_cc_op(s->cc_op);
6803 s->cc_op = CC_OP_DYNAMIC;
6805 gen_jmp_im(pc_start - s->cs_base);
6806 tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
6807 /* condition codes are modified only in long mode */
6809 s->cc_op = CC_OP_EFLAGS;
6814 case 0x1a2: /* cpuid */
6815 if (s->cc_op != CC_OP_DYNAMIC)
6816 gen_op_set_cc_op(s->cc_op);
6817 gen_jmp_im(pc_start - s->cs_base);
6818 tcg_gen_helper_0_0(helper_cpuid);
6820 case 0xf4: /* hlt */
6822 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6824 if (s->cc_op != CC_OP_DYNAMIC)
6825 gen_op_set_cc_op(s->cc_op);
6826 gen_jmp_im(pc_start - s->cs_base);
6827 tcg_gen_helper_0_1(helper_hlt, tcg_const_i32(s->pc - pc_start));
6832 modrm = ldub_code(s->pc++);
6833 mod = (modrm >> 6) & 3;
6834 op = (modrm >> 3) & 7;
6837 if (!s->pe || s->vm86)
6839 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ);
6840 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,ldt.selector));
6844 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6847 if (!s->pe || s->vm86)
6850 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6852 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE);
6853 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6854 gen_jmp_im(pc_start - s->cs_base);
6855 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6856 tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
6860 if (!s->pe || s->vm86)
6862 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ);
6863 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,tr.selector));
6867 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6870 if (!s->pe || s->vm86)
6873 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6875 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE);
6876 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6877 gen_jmp_im(pc_start - s->cs_base);
6878 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6879 tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
6884 if (!s->pe || s->vm86)
6886 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6887 if (s->cc_op != CC_OP_DYNAMIC)
6888 gen_op_set_cc_op(s->cc_op);
6890 tcg_gen_helper_0_1(helper_verr, cpu_T[0]);
6892 tcg_gen_helper_0_1(helper_verw, cpu_T[0]);
6893 s->cc_op = CC_OP_EFLAGS;
6900 modrm = ldub_code(s->pc++);
6901 mod = (modrm >> 6) & 3;
6902 op = (modrm >> 3) & 7;
6908 gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ);
6909 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6910 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.limit));
6911 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6912 gen_add_A0_im(s, 2);
6913 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.base));
6915 gen_op_andl_T0_im(0xffffff);
6916 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6921 case 0: /* monitor */
6922 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6925 if (s->cc_op != CC_OP_DYNAMIC)
6926 gen_op_set_cc_op(s->cc_op);
6927 gen_jmp_im(pc_start - s->cs_base);
6928 #ifdef TARGET_X86_64
6929 if (s->aflag == 2) {
6930 gen_op_movq_A0_reg(R_EAX);
6934 gen_op_movl_A0_reg(R_EAX);
6936 gen_op_andl_A0_ffff();
6938 gen_add_A0_ds_seg(s);
6939 tcg_gen_helper_0_1(helper_monitor, cpu_A0);
6942 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6945 if (s->cc_op != CC_OP_DYNAMIC) {
6946 gen_op_set_cc_op(s->cc_op);
6947 s->cc_op = CC_OP_DYNAMIC;
6949 gen_jmp_im(pc_start - s->cs_base);
6950 tcg_gen_helper_0_1(helper_mwait, tcg_const_i32(s->pc - pc_start));
6957 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ);
6958 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6959 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.limit));
6960 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6961 gen_add_A0_im(s, 2);
6962 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.base));
6964 gen_op_andl_T0_im(0xffffff);
6965 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6971 if (s->cc_op != CC_OP_DYNAMIC)
6972 gen_op_set_cc_op(s->cc_op);
6973 gen_jmp_im(pc_start - s->cs_base);
6976 if (!(s->flags & HF_SVME_MASK) || !s->pe)
6979 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6982 tcg_gen_helper_0_2(helper_vmrun,
6983 tcg_const_i32(s->aflag),
6984 tcg_const_i32(s->pc - pc_start));
6989 case 1: /* VMMCALL */
6990 if (!(s->flags & HF_SVME_MASK))
6992 tcg_gen_helper_0_0(helper_vmmcall);
6994 case 2: /* VMLOAD */
6995 if (!(s->flags & HF_SVME_MASK) || !s->pe)
6998 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7001 tcg_gen_helper_0_1(helper_vmload,
7002 tcg_const_i32(s->aflag));
7005 case 3: /* VMSAVE */
7006 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7009 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7012 tcg_gen_helper_0_1(helper_vmsave,
7013 tcg_const_i32(s->aflag));
7017 if ((!(s->flags & HF_SVME_MASK) &&
7018 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7022 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7025 tcg_gen_helper_0_0(helper_stgi);
7029 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7032 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7035 tcg_gen_helper_0_0(helper_clgi);
7038 case 6: /* SKINIT */
7039 if ((!(s->flags & HF_SVME_MASK) &&
7040 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7043 tcg_gen_helper_0_0(helper_skinit);
7045 case 7: /* INVLPGA */
7046 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7049 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7052 tcg_gen_helper_0_1(helper_invlpga,
7053 tcg_const_i32(s->aflag));
7059 } else if (s->cpl != 0) {
7060 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7062 gen_svm_check_intercept(s, pc_start,
7063 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE);
7064 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7065 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
7066 gen_add_A0_im(s, 2);
7067 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7069 gen_op_andl_T0_im(0xffffff);
7071 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,gdt.base));
7072 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,gdt.limit));
7074 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,idt.base));
7075 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,idt.limit));
7080 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0);
7081 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]));
7082 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
7086 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7088 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
7089 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7090 tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
7091 gen_jmp_im(s->pc - s->cs_base);
7095 case 7: /* invlpg */
7097 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7100 #ifdef TARGET_X86_64
7101 if (CODE64(s) && rm == 0) {
7103 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7104 tcg_gen_ld_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,kernelgsbase));
7105 tcg_gen_st_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7106 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,kernelgsbase));
7113 if (s->cc_op != CC_OP_DYNAMIC)
7114 gen_op_set_cc_op(s->cc_op);
7115 gen_jmp_im(pc_start - s->cs_base);
7116 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7117 tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
7118 gen_jmp_im(s->pc - s->cs_base);
7127 case 0x108: /* invd */
7128 case 0x109: /* wbinvd */
7130 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7132 gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD);
7136 case 0x63: /* arpl or movslS (x86_64) */
7137 #ifdef TARGET_X86_64
7140 /* d_ot is the size of destination */
7141 d_ot = dflag + OT_WORD;
7143 modrm = ldub_code(s->pc++);
7144 reg = ((modrm >> 3) & 7) | rex_r;
7145 mod = (modrm >> 6) & 3;
7146 rm = (modrm & 7) | REX_B(s);
7149 gen_op_mov_TN_reg(OT_LONG, 0, rm);
7151 if (d_ot == OT_QUAD)
7152 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
7153 gen_op_mov_reg_T0(d_ot, reg);
7155 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7156 if (d_ot == OT_QUAD) {
7157 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
7159 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
7161 gen_op_mov_reg_T0(d_ot, reg);
7169 if (!s->pe || s->vm86)
7171 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7172 t1 = tcg_temp_local_new(TCG_TYPE_TL);
7173 t2 = tcg_temp_local_new(TCG_TYPE_TL);
7175 modrm = ldub_code(s->pc++);
7176 reg = (modrm >> 3) & 7;
7177 mod = (modrm >> 6) & 3;
7180 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7181 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
7183 gen_op_mov_v_reg(ot, t0, rm);
7185 gen_op_mov_v_reg(ot, t1, reg);
7186 tcg_gen_andi_tl(cpu_tmp0, t0, 3);
7187 tcg_gen_andi_tl(t1, t1, 3);
7188 tcg_gen_movi_tl(t2, 0);
7189 label1 = gen_new_label();
7190 tcg_gen_brcond_tl(TCG_COND_GE, cpu_tmp0, t1, label1);
7191 tcg_gen_andi_tl(t0, t0, ~3);
7192 tcg_gen_or_tl(t0, t0, t1);
7193 tcg_gen_movi_tl(t2, CC_Z);
7194 gen_set_label(label1);
7196 gen_op_st_v(ot + s->mem_index, t0, cpu_A0);
7198 gen_op_mov_reg_v(ot, rm, t0);
7200 if (s->cc_op != CC_OP_DYNAMIC)
7201 gen_op_set_cc_op(s->cc_op);
7202 gen_compute_eflags(cpu_cc_src);
7203 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_Z);
7204 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t2);
7205 s->cc_op = CC_OP_EFLAGS;
7211 case 0x102: /* lar */
7212 case 0x103: /* lsl */
7216 if (!s->pe || s->vm86)
7218 ot = dflag ? OT_LONG : OT_WORD;
7219 modrm = ldub_code(s->pc++);
7220 reg = ((modrm >> 3) & 7) | rex_r;
7221 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7222 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7223 if (s->cc_op != CC_OP_DYNAMIC)
7224 gen_op_set_cc_op(s->cc_op);
7226 tcg_gen_helper_1_1(helper_lar, t0, cpu_T[0]);
7228 tcg_gen_helper_1_1(helper_lsl, t0, cpu_T[0]);
7229 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
7230 label1 = gen_new_label();
7231 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
7232 gen_op_mov_reg_v(ot, reg, t0);
7233 gen_set_label(label1);
7234 s->cc_op = CC_OP_EFLAGS;
7239 modrm = ldub_code(s->pc++);
7240 mod = (modrm >> 6) & 3;
7241 op = (modrm >> 3) & 7;
7243 case 0: /* prefetchnta */
7244 case 1: /* prefetchnt0 */
7245 case 2: /* prefetchnt0 */
7246 case 3: /* prefetchnt0 */
7249 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7250 /* nothing more to do */
7252 default: /* nop (multi byte) */
7253 gen_nop_modrm(s, modrm);
7257 case 0x119 ... 0x11f: /* nop (multi byte) */
7258 modrm = ldub_code(s->pc++);
7259 gen_nop_modrm(s, modrm);
7261 case 0x120: /* mov reg, crN */
7262 case 0x122: /* mov crN, reg */
7264 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7266 modrm = ldub_code(s->pc++);
7267 if ((modrm & 0xc0) != 0xc0)
7269 rm = (modrm & 7) | REX_B(s);
7270 reg = ((modrm >> 3) & 7) | rex_r;
7281 if (s->cc_op != CC_OP_DYNAMIC)
7282 gen_op_set_cc_op(s->cc_op);
7283 gen_jmp_im(pc_start - s->cs_base);
7285 gen_op_mov_TN_reg(ot, 0, rm);
7286 tcg_gen_helper_0_2(helper_write_crN,
7287 tcg_const_i32(reg), cpu_T[0]);
7288 gen_jmp_im(s->pc - s->cs_base);
7291 tcg_gen_helper_1_1(helper_read_crN,
7292 cpu_T[0], tcg_const_i32(reg));
7293 gen_op_mov_reg_T0(ot, rm);
7301 case 0x121: /* mov reg, drN */
7302 case 0x123: /* mov drN, reg */
7304 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7306 modrm = ldub_code(s->pc++);
7307 if ((modrm & 0xc0) != 0xc0)
7309 rm = (modrm & 7) | REX_B(s);
7310 reg = ((modrm >> 3) & 7) | rex_r;
7315 /* XXX: do it dynamically with CR4.DE bit */
7316 if (reg == 4 || reg == 5 || reg >= 8)
7319 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
7320 gen_op_mov_TN_reg(ot, 0, rm);
7321 tcg_gen_helper_0_2(helper_movl_drN_T0,
7322 tcg_const_i32(reg), cpu_T[0]);
7323 gen_jmp_im(s->pc - s->cs_base);
7326 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
7327 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,dr[reg]));
7328 gen_op_mov_reg_T0(ot, rm);
7332 case 0x106: /* clts */
7334 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7336 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
7337 tcg_gen_helper_0_0(helper_clts);
7338 /* abort block because static cpu state changed */
7339 gen_jmp_im(s->pc - s->cs_base);
7343 /* MMX/3DNow!/SSE/SSE2/SSE3/SSSE3/SSE4 support */
7344 case 0x1c3: /* MOVNTI reg, mem */
7345 if (!(s->cpuid_features & CPUID_SSE2))
7347 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
7348 modrm = ldub_code(s->pc++);
7349 mod = (modrm >> 6) & 3;
7352 reg = ((modrm >> 3) & 7) | rex_r;
7353 /* generate a generic store */
7354 gen_ldst_modrm(s, modrm, ot, reg, 1);
7357 modrm = ldub_code(s->pc++);
7358 mod = (modrm >> 6) & 3;
7359 op = (modrm >> 3) & 7;
7361 case 0: /* fxsave */
7362 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
7363 (s->flags & HF_EM_MASK))
7365 if (s->flags & HF_TS_MASK) {
7366 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7369 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7370 if (s->cc_op != CC_OP_DYNAMIC)
7371 gen_op_set_cc_op(s->cc_op);
7372 gen_jmp_im(pc_start - s->cs_base);
7373 tcg_gen_helper_0_2(helper_fxsave,
7374 cpu_A0, tcg_const_i32((s->dflag == 2)));
7376 case 1: /* fxrstor */
7377 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
7378 (s->flags & HF_EM_MASK))
7380 if (s->flags & HF_TS_MASK) {
7381 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7384 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7385 if (s->cc_op != CC_OP_DYNAMIC)
7386 gen_op_set_cc_op(s->cc_op);
7387 gen_jmp_im(pc_start - s->cs_base);
7388 tcg_gen_helper_0_2(helper_fxrstor,
7389 cpu_A0, tcg_const_i32((s->dflag == 2)));
7391 case 2: /* ldmxcsr */
7392 case 3: /* stmxcsr */
7393 if (s->flags & HF_TS_MASK) {
7394 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7397 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
7400 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7402 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
7403 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
7405 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
7406 gen_op_st_T0_A0(OT_LONG + s->mem_index);
7409 case 5: /* lfence */
7410 case 6: /* mfence */
7411 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
7414 case 7: /* sfence / clflush */
7415 if ((modrm & 0xc7) == 0xc0) {
7417 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
7418 if (!(s->cpuid_features & CPUID_SSE))
7422 if (!(s->cpuid_features & CPUID_CLFLUSH))
7424 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7431 case 0x10d: /* 3DNow! prefetch(w) */
7432 modrm = ldub_code(s->pc++);
7433 mod = (modrm >> 6) & 3;
7436 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7437 /* ignore for now */
7439 case 0x1aa: /* rsm */
7440 gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM);
7441 if (!(s->flags & HF_SMM_MASK))
7443 if (s->cc_op != CC_OP_DYNAMIC) {
7444 gen_op_set_cc_op(s->cc_op);
7445 s->cc_op = CC_OP_DYNAMIC;
7447 gen_jmp_im(s->pc - s->cs_base);
7448 tcg_gen_helper_0_0(helper_rsm);
7451 case 0x1b8: /* SSE4.2 popcnt */
7452 if ((prefixes & (PREFIX_REPZ | PREFIX_LOCK | PREFIX_REPNZ)) !=
7455 if (!(s->cpuid_ext_features & CPUID_EXT_POPCNT))
7458 modrm = ldub_code(s->pc++);
7459 reg = ((modrm >> 3) & 7);
7461 if (s->prefix & PREFIX_DATA)
7463 else if (s->dflag != 2)
7468 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
7469 tcg_gen_helper_1_2(helper_popcnt,
7470 cpu_T[0], cpu_T[0], tcg_const_i32(ot));
7471 gen_op_mov_reg_T0(ot, reg);
7473 s->cc_op = CC_OP_EFLAGS;
7475 case 0x10e ... 0x10f:
7476 /* 3DNow! instructions, ignore prefixes */
7477 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
7478 case 0x110 ... 0x117:
7479 case 0x128 ... 0x12f:
7480 case 0x138 ... 0x13a:
7481 case 0x150 ... 0x177:
7482 case 0x17c ... 0x17f:
7484 case 0x1c4 ... 0x1c6:
7485 case 0x1d0 ... 0x1fe:
7486 gen_sse(s, b, pc_start, rex_r);
7491 /* lock generation */
7492 if (s->prefix & PREFIX_LOCK)
7493 tcg_gen_helper_0_0(helper_unlock);
7496 if (s->prefix & PREFIX_LOCK)
7497 tcg_gen_helper_0_0(helper_unlock);
7498 /* XXX: ensure that no lock was generated */
7499 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
7503 void optimize_flags_init(void)
7505 #if TCG_TARGET_REG_BITS == 32
7506 assert(sizeof(CCTable) == (1 << 3));
7508 assert(sizeof(CCTable) == (1 << 4));
7510 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
7511 cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
7512 TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
7513 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
7514 TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
7515 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
7516 TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
7517 cpu_cc_tmp = tcg_global_mem_new(TCG_TYPE_TL,
7518 TCG_AREG0, offsetof(CPUState, cc_tmp), "cc_tmp");
7520 /* register helpers */
7522 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
7526 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
7527 basic block 'tb'. If search_pc is TRUE, also generate PC
7528 information for each intermediate instruction. */
7529 static inline void gen_intermediate_code_internal(CPUState *env,
7530 TranslationBlock *tb,
7533 DisasContext dc1, *dc = &dc1;
7534 target_ulong pc_ptr;
7535 uint16_t *gen_opc_end;
7538 target_ulong pc_start;
7539 target_ulong cs_base;
7543 /* generate intermediate code */
7545 cs_base = tb->cs_base;
7547 cflags = tb->cflags;
7549 dc->pe = (flags >> HF_PE_SHIFT) & 1;
7550 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
7551 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
7552 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
7554 dc->vm86 = (flags >> VM_SHIFT) & 1;
7555 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
7556 dc->iopl = (flags >> IOPL_SHIFT) & 3;
7557 dc->tf = (flags >> TF_SHIFT) & 1;
7558 dc->singlestep_enabled = env->singlestep_enabled;
7559 dc->cc_op = CC_OP_DYNAMIC;
7560 dc->cs_base = cs_base;
7562 dc->popl_esp_hack = 0;
7563 /* select memory access functions */
7565 if (flags & HF_SOFTMMU_MASK) {
7567 dc->mem_index = 2 * 4;
7569 dc->mem_index = 1 * 4;
7571 dc->cpuid_features = env->cpuid_features;
7572 dc->cpuid_ext_features = env->cpuid_ext_features;
7573 dc->cpuid_ext2_features = env->cpuid_ext2_features;
7574 dc->cpuid_ext3_features = env->cpuid_ext3_features;
7575 #ifdef TARGET_X86_64
7576 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
7577 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
7580 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
7581 (flags & HF_INHIBIT_IRQ_MASK)
7582 #ifndef CONFIG_SOFTMMU
7583 || (flags & HF_SOFTMMU_MASK)
7587 /* check addseg logic */
7588 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
7589 printf("ERROR addseg\n");
7592 cpu_T[0] = tcg_temp_new(TCG_TYPE_TL);
7593 cpu_T[1] = tcg_temp_new(TCG_TYPE_TL);
7594 cpu_A0 = tcg_temp_new(TCG_TYPE_TL);
7595 cpu_T3 = tcg_temp_new(TCG_TYPE_TL);
7597 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
7598 cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
7599 cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
7600 cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
7601 cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
7602 cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
7603 cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
7604 cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
7605 cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
7607 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
7609 dc->is_jmp = DISAS_NEXT;
7613 max_insns = tb->cflags & CF_COUNT_MASK;
7615 max_insns = CF_COUNT_MASK;
7619 if (env->nb_breakpoints > 0) {
7620 for(j = 0; j < env->nb_breakpoints; j++) {
7621 if (env->breakpoints[j] == pc_ptr) {
7622 gen_debug(dc, pc_ptr - dc->cs_base);
7628 j = gen_opc_ptr - gen_opc_buf;
7632 gen_opc_instr_start[lj++] = 0;
7634 gen_opc_pc[lj] = pc_ptr;
7635 gen_opc_cc_op[lj] = dc->cc_op;
7636 gen_opc_instr_start[lj] = 1;
7637 gen_opc_icount[lj] = num_insns;
7639 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
7642 pc_ptr = disas_insn(dc, pc_ptr);
7644 /* stop translation if indicated */
7647 /* if single step mode, we generate only one instruction and
7648 generate an exception */
7649 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
7650 the flag and abort the translation to give the irqs a
7651 change to be happen */
7652 if (dc->tf || dc->singlestep_enabled ||
7653 (flags & HF_INHIBIT_IRQ_MASK)) {
7654 gen_jmp_im(pc_ptr - dc->cs_base);
7658 /* if too long translation, stop generation too */
7659 if (gen_opc_ptr >= gen_opc_end ||
7660 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32) ||
7661 num_insns >= max_insns) {
7662 gen_jmp_im(pc_ptr - dc->cs_base);
7667 if (tb->cflags & CF_LAST_IO)
7669 gen_icount_end(tb, num_insns);
7670 *gen_opc_ptr = INDEX_op_end;
7671 /* we don't forget to fill the last values */
7673 j = gen_opc_ptr - gen_opc_buf;
7676 gen_opc_instr_start[lj++] = 0;
7680 if (loglevel & CPU_LOG_TB_CPU) {
7681 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
7683 if (loglevel & CPU_LOG_TB_IN_ASM) {
7685 fprintf(logfile, "----------------\n");
7686 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
7687 #ifdef TARGET_X86_64
7692 disas_flags = !dc->code32;
7693 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
7694 fprintf(logfile, "\n");
7699 tb->size = pc_ptr - pc_start;
7700 tb->icount = num_insns;
7704 void gen_intermediate_code(CPUState *env, TranslationBlock *tb)
7706 gen_intermediate_code_internal(env, tb, 0);
7709 void gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
7711 gen_intermediate_code_internal(env, tb, 1);
7714 void gen_pc_load(CPUState *env, TranslationBlock *tb,
7715 unsigned long searched_pc, int pc_pos, void *puc)
7719 if (loglevel & CPU_LOG_TB_OP) {
7721 fprintf(logfile, "RESTORE:\n");
7722 for(i = 0;i <= pc_pos; i++) {
7723 if (gen_opc_instr_start[i]) {
7724 fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
7727 fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
7728 searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
7729 (uint32_t)tb->cs_base);
7732 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
7733 cc_op = gen_opc_cc_op[pc_pos];
7734 if (cc_op != CC_OP_DYNAMIC)