4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
34 #define PREFIX_REPZ 0x01
35 #define PREFIX_REPNZ 0x02
36 #define PREFIX_LOCK 0x04
37 #define PREFIX_DATA 0x08
38 #define PREFIX_ADR 0x10
41 #define X86_64_ONLY(x) x
42 #define X86_64_DEF(x...) x
43 #define CODE64(s) ((s)->code64)
44 #define REX_X(s) ((s)->rex_x)
45 #define REX_B(s) ((s)->rex_b)
46 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
48 #define BUGGY_64(x) NULL
51 #define X86_64_ONLY(x) NULL
52 #define X86_64_DEF(x...)
58 //#define MACRO_TEST 1
60 /* global register indexes */
61 static TCGv cpu_env, cpu_T[2], cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst;
63 /* local register indexes (only used inside old micro ops) */
64 static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
65 static TCGv cpu_tmp5, cpu_tmp6;
68 static int x86_64_hregs;
71 typedef struct DisasContext {
72 /* current insn context */
73 int override; /* -1 if no override */
76 target_ulong pc; /* pc = eip + cs_base */
77 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
78 static state change (stop translation) */
79 /* current block context */
80 target_ulong cs_base; /* base of CS segment */
81 int pe; /* protected mode */
82 int code32; /* 32 bit code segment */
84 int lma; /* long mode active */
85 int code64; /* 64 bit code segment */
88 int ss32; /* 32 bit stack segment */
89 int cc_op; /* current CC operation */
90 int addseg; /* non zero if either DS/ES/SS have a non zero base */
91 int f_st; /* currently unused */
92 int vm86; /* vm86 mode */
95 int tf; /* TF cpu flag */
96 int singlestep_enabled; /* "hardware" single step enabled */
97 int jmp_opt; /* use direct block chaining for direct jumps */
98 int mem_index; /* select memory access functions */
99 uint64_t flags; /* all execution flags */
100 struct TranslationBlock *tb;
101 int popl_esp_hack; /* for correct popl with esp base handling */
102 int rip_offset; /* only used in x86_64, but left for simplicity */
104 int cpuid_ext_features;
105 int cpuid_ext2_features;
106 int cpuid_ext3_features;
109 static void gen_eob(DisasContext *s);
110 static void gen_jmp(DisasContext *s, target_ulong eip);
111 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
113 /* i386 arith/logic operations */
133 OP_SHL1, /* undocumented */
157 /* I386 int registers */
158 OR_EAX, /* MUST be even numbered */
167 OR_TMP0 = 16, /* temporary operand register */
169 OR_A0, /* temporary register used when doing address evaluation */
172 static inline void gen_op_movl_T0_0(void)
174 tcg_gen_movi_tl(cpu_T[0], 0);
177 static inline void gen_op_movl_T0_im(int32_t val)
179 tcg_gen_movi_tl(cpu_T[0], val);
182 static inline void gen_op_movl_T0_imu(uint32_t val)
184 tcg_gen_movi_tl(cpu_T[0], val);
187 static inline void gen_op_movl_T1_im(int32_t val)
189 tcg_gen_movi_tl(cpu_T[1], val);
192 static inline void gen_op_movl_T1_imu(uint32_t val)
194 tcg_gen_movi_tl(cpu_T[1], val);
197 static inline void gen_op_movl_A0_im(uint32_t val)
199 tcg_gen_movi_tl(cpu_A0, val);
203 static inline void gen_op_movq_A0_im(int64_t val)
205 tcg_gen_movi_tl(cpu_A0, val);
209 static inline void gen_movtl_T0_im(target_ulong val)
211 tcg_gen_movi_tl(cpu_T[0], val);
214 static inline void gen_movtl_T1_im(target_ulong val)
216 tcg_gen_movi_tl(cpu_T[1], val);
219 static inline void gen_op_andl_T0_ffff(void)
221 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
224 static inline void gen_op_andl_T0_im(uint32_t val)
226 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
229 static inline void gen_op_movl_T0_T1(void)
231 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
234 static inline void gen_op_andl_A0_ffff(void)
236 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
241 #define NB_OP_SIZES 4
243 #else /* !TARGET_X86_64 */
245 #define NB_OP_SIZES 3
247 #endif /* !TARGET_X86_64 */
249 #if defined(WORDS_BIGENDIAN)
250 #define REG_B_OFFSET (sizeof(target_ulong) - 1)
251 #define REG_H_OFFSET (sizeof(target_ulong) - 2)
252 #define REG_W_OFFSET (sizeof(target_ulong) - 2)
253 #define REG_L_OFFSET (sizeof(target_ulong) - 4)
254 #define REG_LH_OFFSET (sizeof(target_ulong) - 8)
256 #define REG_B_OFFSET 0
257 #define REG_H_OFFSET 1
258 #define REG_W_OFFSET 0
259 #define REG_L_OFFSET 0
260 #define REG_LH_OFFSET 4
263 static inline void gen_op_mov_reg_TN(int ot, int t_index, int reg)
267 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
268 tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
270 tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
274 tcg_gen_st16_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
278 tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
279 /* high part of register set to zero */
280 tcg_gen_movi_tl(cpu_tmp0, 0);
281 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
285 tcg_gen_st_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
290 tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
296 static inline void gen_op_mov_reg_T0(int ot, int reg)
298 gen_op_mov_reg_TN(ot, 0, reg);
301 static inline void gen_op_mov_reg_T1(int ot, int reg)
303 gen_op_mov_reg_TN(ot, 1, reg);
306 static inline void gen_op_mov_reg_A0(int size, int reg)
310 tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
314 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
315 /* high part of register set to zero */
316 tcg_gen_movi_tl(cpu_tmp0, 0);
317 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
321 tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
326 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
332 static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
336 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
339 tcg_gen_ld8u_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
344 tcg_gen_ld_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
349 static inline void gen_op_movl_A0_reg(int reg)
351 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
354 static inline void gen_op_addl_A0_im(int32_t val)
356 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
358 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
363 static inline void gen_op_addq_A0_im(int64_t val)
365 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
369 static void gen_add_A0_im(DisasContext *s, int val)
373 gen_op_addq_A0_im(val);
376 gen_op_addl_A0_im(val);
379 static inline void gen_op_addl_T0_T1(void)
381 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
384 static inline void gen_op_jmp_T0(void)
386 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
389 static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
393 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
394 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
395 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
398 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
399 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
401 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
403 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
407 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
408 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
409 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
415 static inline void gen_op_add_reg_T0(int size, int reg)
419 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
420 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
421 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
424 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
425 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
427 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
429 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
433 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
434 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
435 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
441 static inline void gen_op_set_cc_op(int32_t val)
443 tcg_gen_movi_i32(cpu_cc_op, val);
446 static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
448 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
450 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
451 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
453 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
457 static inline void gen_op_movl_A0_seg(int reg)
459 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
462 static inline void gen_op_addl_A0_seg(int reg)
464 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
465 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
467 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
472 static inline void gen_op_movq_A0_seg(int reg)
474 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
477 static inline void gen_op_addq_A0_seg(int reg)
479 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
480 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
483 static inline void gen_op_movq_A0_reg(int reg)
485 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
488 static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
490 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
492 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
493 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
497 static inline void gen_op_lds_T0_A0(int idx)
499 int mem_index = (idx >> 2) - 1;
502 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
505 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
509 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
514 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
515 static inline void gen_op_ld_T0_A0(int idx)
517 int mem_index = (idx >> 2) - 1;
520 tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0, mem_index);
523 tcg_gen_qemu_ld16u(cpu_T[0], cpu_A0, mem_index);
526 tcg_gen_qemu_ld32u(cpu_T[0], cpu_A0, mem_index);
530 tcg_gen_qemu_ld64(cpu_T[0], cpu_A0, mem_index);
535 static inline void gen_op_ldu_T0_A0(int idx)
537 gen_op_ld_T0_A0(idx);
540 static inline void gen_op_ld_T1_A0(int idx)
542 int mem_index = (idx >> 2) - 1;
545 tcg_gen_qemu_ld8u(cpu_T[1], cpu_A0, mem_index);
548 tcg_gen_qemu_ld16u(cpu_T[1], cpu_A0, mem_index);
551 tcg_gen_qemu_ld32u(cpu_T[1], cpu_A0, mem_index);
555 tcg_gen_qemu_ld64(cpu_T[1], cpu_A0, mem_index);
560 static inline void gen_op_st_T0_A0(int idx)
562 int mem_index = (idx >> 2) - 1;
565 tcg_gen_qemu_st8(cpu_T[0], cpu_A0, mem_index);
568 tcg_gen_qemu_st16(cpu_T[0], cpu_A0, mem_index);
571 tcg_gen_qemu_st32(cpu_T[0], cpu_A0, mem_index);
575 tcg_gen_qemu_st64(cpu_T[0], cpu_A0, mem_index);
580 static inline void gen_op_st_T1_A0(int idx)
582 int mem_index = (idx >> 2) - 1;
585 tcg_gen_qemu_st8(cpu_T[1], cpu_A0, mem_index);
588 tcg_gen_qemu_st16(cpu_T[1], cpu_A0, mem_index);
591 tcg_gen_qemu_st32(cpu_T[1], cpu_A0, mem_index);
595 tcg_gen_qemu_st64(cpu_T[1], cpu_A0, mem_index);
600 static inline void gen_jmp_im(target_ulong pc)
602 tcg_gen_movi_tl(cpu_tmp0, pc);
603 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
606 static inline void gen_string_movl_A0_ESI(DisasContext *s)
610 override = s->override;
614 gen_op_movq_A0_seg(override);
615 gen_op_addq_A0_reg_sN(0, R_ESI);
617 gen_op_movq_A0_reg(R_ESI);
623 if (s->addseg && override < 0)
626 gen_op_movl_A0_seg(override);
627 gen_op_addl_A0_reg_sN(0, R_ESI);
629 gen_op_movl_A0_reg(R_ESI);
632 /* 16 address, always override */
635 gen_op_movl_A0_reg(R_ESI);
636 gen_op_andl_A0_ffff();
637 gen_op_addl_A0_seg(override);
641 static inline void gen_string_movl_A0_EDI(DisasContext *s)
645 gen_op_movq_A0_reg(R_EDI);
650 gen_op_movl_A0_seg(R_ES);
651 gen_op_addl_A0_reg_sN(0, R_EDI);
653 gen_op_movl_A0_reg(R_EDI);
656 gen_op_movl_A0_reg(R_EDI);
657 gen_op_andl_A0_ffff();
658 gen_op_addl_A0_seg(R_ES);
662 static inline void gen_op_movl_T0_Dshift(int ot)
664 tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
665 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
668 static void gen_extu(int ot, TCGv reg)
672 tcg_gen_ext8u_tl(reg, reg);
675 tcg_gen_ext16u_tl(reg, reg);
678 tcg_gen_ext32u_tl(reg, reg);
685 static void gen_exts(int ot, TCGv reg)
689 tcg_gen_ext8s_tl(reg, reg);
692 tcg_gen_ext16s_tl(reg, reg);
695 tcg_gen_ext32s_tl(reg, reg);
702 static inline void gen_op_jnz_ecx(int size, int label1)
704 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
705 gen_extu(size + 1, cpu_tmp0);
706 tcg_gen_brcond_tl(TCG_COND_NE, cpu_tmp0, tcg_const_tl(0), label1);
709 static inline void gen_op_jz_ecx(int size, int label1)
711 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
712 gen_extu(size + 1, cpu_tmp0);
713 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_tmp0, tcg_const_tl(0), label1);
716 static void *helper_in_func[3] = {
722 static void *helper_out_func[3] = {
728 static void *gen_check_io_func[3] = {
734 static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
738 target_ulong next_eip;
741 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
742 if (s->cc_op != CC_OP_DYNAMIC)
743 gen_op_set_cc_op(s->cc_op);
746 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
747 tcg_gen_helper_0_1(gen_check_io_func[ot],
750 if(s->flags & (1ULL << INTERCEPT_IOIO_PROT)) {
752 if (s->cc_op != CC_OP_DYNAMIC)
753 gen_op_set_cc_op(s->cc_op);
757 svm_flags |= (1 << (4 + ot));
758 next_eip = s->pc - s->cs_base;
759 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
760 tcg_gen_helper_0_3(helper_svm_check_io,
762 tcg_const_i32(svm_flags),
763 tcg_const_i32(next_eip - cur_eip));
767 static inline void gen_movs(DisasContext *s, int ot)
769 gen_string_movl_A0_ESI(s);
770 gen_op_ld_T0_A0(ot + s->mem_index);
771 gen_string_movl_A0_EDI(s);
772 gen_op_st_T0_A0(ot + s->mem_index);
773 gen_op_movl_T0_Dshift(ot);
774 gen_op_add_reg_T0(s->aflag, R_ESI);
775 gen_op_add_reg_T0(s->aflag, R_EDI);
778 static inline void gen_update_cc_op(DisasContext *s)
780 if (s->cc_op != CC_OP_DYNAMIC) {
781 gen_op_set_cc_op(s->cc_op);
782 s->cc_op = CC_OP_DYNAMIC;
786 static void gen_op_update1_cc(void)
788 tcg_gen_discard_tl(cpu_cc_src);
789 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
792 static void gen_op_update2_cc(void)
794 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
795 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
798 static inline void gen_op_cmpl_T0_T1_cc(void)
800 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
801 tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
804 static inline void gen_op_testl_T0_T1_cc(void)
806 tcg_gen_discard_tl(cpu_cc_src);
807 tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
810 static void gen_op_update_neg_cc(void)
812 tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
813 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
816 /* compute eflags.C to reg */
817 static void gen_compute_eflags_c(TCGv reg)
819 #if TCG_TARGET_REG_BITS == 32
820 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
821 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
822 (long)cc_table + offsetof(CCTable, compute_c));
823 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
824 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
825 1, &cpu_tmp2_i32, 0, NULL);
827 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
828 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
829 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
830 (long)cc_table + offsetof(CCTable, compute_c));
831 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
832 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
833 1, &cpu_tmp2_i32, 0, NULL);
835 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
838 /* compute all eflags to cc_src */
839 static void gen_compute_eflags(TCGv reg)
841 #if TCG_TARGET_REG_BITS == 32
842 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
843 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
844 (long)cc_table + offsetof(CCTable, compute_all));
845 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
846 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
847 1, &cpu_tmp2_i32, 0, NULL);
849 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
850 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
851 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
852 (long)cc_table + offsetof(CCTable, compute_all));
853 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
854 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
855 1, &cpu_tmp2_i32, 0, NULL);
857 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
860 static inline void gen_setcc_slow_T0(int op)
864 gen_compute_eflags(cpu_T[0]);
865 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 11);
866 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
869 gen_compute_eflags_c(cpu_T[0]);
872 gen_compute_eflags(cpu_T[0]);
873 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 6);
874 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
877 gen_compute_eflags(cpu_tmp0);
878 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 6);
879 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
880 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
883 gen_compute_eflags(cpu_T[0]);
884 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 7);
885 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
888 gen_compute_eflags(cpu_T[0]);
889 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 2);
890 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
893 gen_compute_eflags(cpu_tmp0);
894 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
895 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 7); /* CC_S */
896 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
897 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
901 gen_compute_eflags(cpu_tmp0);
902 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
903 tcg_gen_shri_tl(cpu_tmp4, cpu_tmp0, 7); /* CC_S */
904 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 6); /* CC_Z */
905 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
906 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
907 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
912 /* return true if setcc_slow is not needed (WARNING: must be kept in
913 sync with gen_jcc1) */
914 static int is_fast_jcc_case(DisasContext *s, int b)
917 jcc_op = (b >> 1) & 7;
919 /* we optimize the cmp/jcc case */
924 if (jcc_op == JCC_O || jcc_op == JCC_P)
928 /* some jumps are easy to compute */
953 if (jcc_op != JCC_Z && jcc_op != JCC_S)
963 /* generate a conditional jump to label 'l1' according to jump opcode
964 value 'b'. In the fast case, T0 is guaranted not to be used. */
965 static inline void gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
967 int inv, jcc_op, size, cond;
971 jcc_op = (b >> 1) & 7;
974 /* we optimize the cmp/jcc case */
980 size = cc_op - CC_OP_SUBB;
986 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xff);
990 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffff);
995 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffffffff);
1003 tcg_gen_brcond_tl(inv ? TCG_COND_NE : TCG_COND_EQ, t0,
1004 tcg_const_tl(0), l1);
1010 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80);
1011 tcg_gen_brcond_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1012 tcg_const_tl(0), l1);
1015 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x8000);
1016 tcg_gen_brcond_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1017 tcg_const_tl(0), l1);
1019 #ifdef TARGET_X86_64
1021 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80000000);
1022 tcg_gen_brcond_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1023 tcg_const_tl(0), l1);
1027 tcg_gen_brcond_tl(inv ? TCG_COND_GE : TCG_COND_LT, cpu_cc_dst,
1028 tcg_const_tl(0), l1);
1034 cond = inv ? TCG_COND_GEU : TCG_COND_LTU;
1037 cond = inv ? TCG_COND_GTU : TCG_COND_LEU;
1039 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1043 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xff);
1044 tcg_gen_andi_tl(t0, cpu_cc_src, 0xff);
1048 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffff);
1049 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffff);
1051 #ifdef TARGET_X86_64
1054 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffffffff);
1055 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffffffff);
1062 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1066 cond = inv ? TCG_COND_GE : TCG_COND_LT;
1069 cond = inv ? TCG_COND_GT : TCG_COND_LE;
1071 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1075 tcg_gen_ext8s_tl(cpu_tmp4, cpu_tmp4);
1076 tcg_gen_ext8s_tl(t0, cpu_cc_src);
1080 tcg_gen_ext16s_tl(cpu_tmp4, cpu_tmp4);
1081 tcg_gen_ext16s_tl(t0, cpu_cc_src);
1083 #ifdef TARGET_X86_64
1086 tcg_gen_ext32s_tl(cpu_tmp4, cpu_tmp4);
1087 tcg_gen_ext32s_tl(t0, cpu_cc_src);
1094 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1102 /* some jumps are easy to compute */
1144 size = (cc_op - CC_OP_ADDB) & 3;
1147 size = (cc_op - CC_OP_ADDB) & 3;
1155 gen_setcc_slow_T0(jcc_op);
1156 tcg_gen_brcond_tl(inv ? TCG_COND_EQ : TCG_COND_NE,
1157 cpu_T[0], tcg_const_tl(0), l1);
1162 /* XXX: does not work with gdbstub "ice" single step - not a
1164 static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1168 l1 = gen_new_label();
1169 l2 = gen_new_label();
1170 gen_op_jnz_ecx(s->aflag, l1);
1172 gen_jmp_tb(s, next_eip, 1);
1177 static inline void gen_stos(DisasContext *s, int ot)
1179 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1180 gen_string_movl_A0_EDI(s);
1181 gen_op_st_T0_A0(ot + s->mem_index);
1182 gen_op_movl_T0_Dshift(ot);
1183 gen_op_add_reg_T0(s->aflag, R_EDI);
1186 static inline void gen_lods(DisasContext *s, int ot)
1188 gen_string_movl_A0_ESI(s);
1189 gen_op_ld_T0_A0(ot + s->mem_index);
1190 gen_op_mov_reg_T0(ot, R_EAX);
1191 gen_op_movl_T0_Dshift(ot);
1192 gen_op_add_reg_T0(s->aflag, R_ESI);
1195 static inline void gen_scas(DisasContext *s, int ot)
1197 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1198 gen_string_movl_A0_EDI(s);
1199 gen_op_ld_T1_A0(ot + s->mem_index);
1200 gen_op_cmpl_T0_T1_cc();
1201 gen_op_movl_T0_Dshift(ot);
1202 gen_op_add_reg_T0(s->aflag, R_EDI);
1205 static inline void gen_cmps(DisasContext *s, int ot)
1207 gen_string_movl_A0_ESI(s);
1208 gen_op_ld_T0_A0(ot + s->mem_index);
1209 gen_string_movl_A0_EDI(s);
1210 gen_op_ld_T1_A0(ot + s->mem_index);
1211 gen_op_cmpl_T0_T1_cc();
1212 gen_op_movl_T0_Dshift(ot);
1213 gen_op_add_reg_T0(s->aflag, R_ESI);
1214 gen_op_add_reg_T0(s->aflag, R_EDI);
1217 static inline void gen_ins(DisasContext *s, int ot)
1219 gen_string_movl_A0_EDI(s);
1220 /* Note: we must do this dummy write first to be restartable in
1221 case of page fault. */
1223 gen_op_st_T0_A0(ot + s->mem_index);
1224 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1225 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1226 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1227 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
1228 gen_op_st_T0_A0(ot + s->mem_index);
1229 gen_op_movl_T0_Dshift(ot);
1230 gen_op_add_reg_T0(s->aflag, R_EDI);
1233 static inline void gen_outs(DisasContext *s, int ot)
1235 gen_string_movl_A0_ESI(s);
1236 gen_op_ld_T0_A0(ot + s->mem_index);
1238 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1239 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1240 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1241 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
1242 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
1244 gen_op_movl_T0_Dshift(ot);
1245 gen_op_add_reg_T0(s->aflag, R_ESI);
1248 /* same method as Valgrind : we generate jumps to current or next
1250 #define GEN_REPZ(op) \
1251 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1252 target_ulong cur_eip, target_ulong next_eip) \
1255 gen_update_cc_op(s); \
1256 l2 = gen_jz_ecx_string(s, next_eip); \
1257 gen_ ## op(s, ot); \
1258 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1259 /* a loop would cause two single step exceptions if ECX = 1 \
1260 before rep string_insn */ \
1262 gen_op_jz_ecx(s->aflag, l2); \
1263 gen_jmp(s, cur_eip); \
1266 #define GEN_REPZ2(op) \
1267 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1268 target_ulong cur_eip, \
1269 target_ulong next_eip, \
1273 gen_update_cc_op(s); \
1274 l2 = gen_jz_ecx_string(s, next_eip); \
1275 gen_ ## op(s, ot); \
1276 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1277 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1278 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1280 gen_op_jz_ecx(s->aflag, l2); \
1281 gen_jmp(s, cur_eip); \
1292 static void *helper_fp_arith_ST0_FT0[8] = {
1293 helper_fadd_ST0_FT0,
1294 helper_fmul_ST0_FT0,
1295 helper_fcom_ST0_FT0,
1296 helper_fcom_ST0_FT0,
1297 helper_fsub_ST0_FT0,
1298 helper_fsubr_ST0_FT0,
1299 helper_fdiv_ST0_FT0,
1300 helper_fdivr_ST0_FT0,
1303 /* NOTE the exception in "r" op ordering */
1304 static void *helper_fp_arith_STN_ST0[8] = {
1305 helper_fadd_STN_ST0,
1306 helper_fmul_STN_ST0,
1309 helper_fsubr_STN_ST0,
1310 helper_fsub_STN_ST0,
1311 helper_fdivr_STN_ST0,
1312 helper_fdiv_STN_ST0,
1315 /* if d == OR_TMP0, it means memory operand (address in A0) */
1316 static void gen_op(DisasContext *s1, int op, int ot, int d)
1319 gen_op_mov_TN_reg(ot, 0, d);
1321 gen_op_ld_T0_A0(ot + s1->mem_index);
1325 if (s1->cc_op != CC_OP_DYNAMIC)
1326 gen_op_set_cc_op(s1->cc_op);
1327 gen_compute_eflags_c(cpu_tmp4);
1328 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1329 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1331 gen_op_mov_reg_T0(ot, d);
1333 gen_op_st_T0_A0(ot + s1->mem_index);
1334 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1335 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1336 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1337 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1338 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1339 s1->cc_op = CC_OP_DYNAMIC;
1342 if (s1->cc_op != CC_OP_DYNAMIC)
1343 gen_op_set_cc_op(s1->cc_op);
1344 gen_compute_eflags_c(cpu_tmp4);
1345 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1346 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1348 gen_op_mov_reg_T0(ot, d);
1350 gen_op_st_T0_A0(ot + s1->mem_index);
1351 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1352 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1353 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1354 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1355 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1356 s1->cc_op = CC_OP_DYNAMIC;
1359 gen_op_addl_T0_T1();
1361 gen_op_mov_reg_T0(ot, d);
1363 gen_op_st_T0_A0(ot + s1->mem_index);
1364 gen_op_update2_cc();
1365 s1->cc_op = CC_OP_ADDB + ot;
1368 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1370 gen_op_mov_reg_T0(ot, d);
1372 gen_op_st_T0_A0(ot + s1->mem_index);
1373 gen_op_update2_cc();
1374 s1->cc_op = CC_OP_SUBB + ot;
1378 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1380 gen_op_mov_reg_T0(ot, d);
1382 gen_op_st_T0_A0(ot + s1->mem_index);
1383 gen_op_update1_cc();
1384 s1->cc_op = CC_OP_LOGICB + ot;
1387 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1389 gen_op_mov_reg_T0(ot, d);
1391 gen_op_st_T0_A0(ot + s1->mem_index);
1392 gen_op_update1_cc();
1393 s1->cc_op = CC_OP_LOGICB + ot;
1396 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1398 gen_op_mov_reg_T0(ot, d);
1400 gen_op_st_T0_A0(ot + s1->mem_index);
1401 gen_op_update1_cc();
1402 s1->cc_op = CC_OP_LOGICB + ot;
1405 gen_op_cmpl_T0_T1_cc();
1406 s1->cc_op = CC_OP_SUBB + ot;
1411 /* if d == OR_TMP0, it means memory operand (address in A0) */
1412 static void gen_inc(DisasContext *s1, int ot, int d, int c)
1415 gen_op_mov_TN_reg(ot, 0, d);
1417 gen_op_ld_T0_A0(ot + s1->mem_index);
1418 if (s1->cc_op != CC_OP_DYNAMIC)
1419 gen_op_set_cc_op(s1->cc_op);
1421 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1422 s1->cc_op = CC_OP_INCB + ot;
1424 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1425 s1->cc_op = CC_OP_DECB + ot;
1428 gen_op_mov_reg_T0(ot, d);
1430 gen_op_st_T0_A0(ot + s1->mem_index);
1431 gen_compute_eflags_c(cpu_cc_src);
1432 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1435 static void gen_shift_rm_T1(DisasContext *s, int ot, int op1,
1436 int is_right, int is_arith)
1448 gen_op_ld_T0_A0(ot + s->mem_index);
1450 gen_op_mov_TN_reg(ot, 0, op1);
1452 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1454 tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1458 gen_exts(ot, cpu_T[0]);
1459 tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1460 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1462 gen_extu(ot, cpu_T[0]);
1463 tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1464 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1467 tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1468 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1473 gen_op_st_T0_A0(ot + s->mem_index);
1475 gen_op_mov_reg_T0(ot, op1);
1477 /* update eflags if non zero shift */
1478 if (s->cc_op != CC_OP_DYNAMIC)
1479 gen_op_set_cc_op(s->cc_op);
1481 shift_label = gen_new_label();
1482 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), shift_label);
1484 tcg_gen_mov_tl(cpu_cc_src, cpu_T3);
1485 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1487 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1489 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1491 gen_set_label(shift_label);
1492 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1495 static void gen_shift_rm_im(DisasContext *s, int ot, int op1, int op2,
1496 int is_right, int is_arith)
1507 gen_op_ld_T0_A0(ot + s->mem_index);
1509 gen_op_mov_TN_reg(ot, 0, op1);
1515 gen_exts(ot, cpu_T[0]);
1516 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], op2 - 1);
1517 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], op2);
1519 gen_extu(ot, cpu_T[0]);
1520 tcg_gen_shri_tl(cpu_tmp0, cpu_T[0], op2 - 1);
1521 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], op2);
1524 tcg_gen_shli_tl(cpu_tmp0, cpu_T[0], op2 - 1);
1525 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], op2);
1531 gen_op_st_T0_A0(ot + s->mem_index);
1533 gen_op_mov_reg_T0(ot, op1);
1535 /* update eflags if non zero shift */
1537 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp0);
1538 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1540 s->cc_op = CC_OP_SARB + ot;
1542 s->cc_op = CC_OP_SHLB + ot;
1546 static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1549 tcg_gen_shli_tl(ret, arg1, arg2);
1551 tcg_gen_shri_tl(ret, arg1, -arg2);
1554 /* XXX: add faster immediate case */
1555 static void gen_rot_rm_T1(DisasContext *s, int ot, int op1,
1559 int label1, label2, data_bits;
1568 gen_op_ld_T0_A0(ot + s->mem_index);
1570 gen_op_mov_TN_reg(ot, 0, op1);
1572 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1574 /* Must test zero case to avoid using undefined behaviour in TCG
1576 label1 = gen_new_label();
1577 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), label1);
1580 tcg_gen_andi_tl(cpu_tmp0, cpu_T[1], (1 << (3 + ot)) - 1);
1582 tcg_gen_mov_tl(cpu_tmp0, cpu_T[1]);
1584 gen_extu(ot, cpu_T[0]);
1585 tcg_gen_mov_tl(cpu_T3, cpu_T[0]);
1587 data_bits = 8 << ot;
1588 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
1589 fix TCG definition) */
1591 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp0);
1592 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1593 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1595 tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp0);
1596 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1597 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1599 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1601 gen_set_label(label1);
1604 gen_op_st_T0_A0(ot + s->mem_index);
1606 gen_op_mov_reg_T0(ot, op1);
1609 if (s->cc_op != CC_OP_DYNAMIC)
1610 gen_op_set_cc_op(s->cc_op);
1612 label2 = gen_new_label();
1613 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), label2);
1615 gen_compute_eflags(cpu_cc_src);
1616 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
1617 tcg_gen_xor_tl(cpu_tmp0, cpu_T3, cpu_T[0]);
1618 tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
1619 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
1620 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
1622 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], data_bits - 1);
1624 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_C);
1625 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
1627 tcg_gen_discard_tl(cpu_cc_dst);
1628 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1630 gen_set_label(label2);
1631 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1634 static void *helper_rotc[8] = {
1638 X86_64_ONLY(helper_rclq),
1642 X86_64_ONLY(helper_rcrq),
1645 /* XXX: add faster immediate = 1 case */
1646 static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1,
1651 if (s->cc_op != CC_OP_DYNAMIC)
1652 gen_op_set_cc_op(s->cc_op);
1656 gen_op_ld_T0_A0(ot + s->mem_index);
1658 gen_op_mov_TN_reg(ot, 0, op1);
1660 tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
1661 cpu_T[0], cpu_T[0], cpu_T[1]);
1664 gen_op_st_T0_A0(ot + s->mem_index);
1666 gen_op_mov_reg_T0(ot, op1);
1669 label1 = gen_new_label();
1670 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(-1), label1);
1672 tcg_gen_mov_tl(cpu_cc_src, cpu_T3);
1673 tcg_gen_discard_tl(cpu_cc_dst);
1674 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1676 gen_set_label(label1);
1677 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1680 /* XXX: add faster immediate case */
1681 static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1,
1684 int label1, label2, data_bits;
1694 gen_op_ld_T0_A0(ot + s->mem_index);
1696 gen_op_mov_TN_reg(ot, 0, op1);
1698 tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
1699 /* Must test zero case to avoid using undefined behaviour in TCG
1701 label1 = gen_new_label();
1702 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label1);
1704 tcg_gen_addi_tl(cpu_tmp5, cpu_T3, -1);
1705 if (ot == OT_WORD) {
1706 /* Note: we implement the Intel behaviour for shift count > 16 */
1708 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
1709 tcg_gen_shli_tl(cpu_tmp0, cpu_T[1], 16);
1710 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1711 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
1713 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1715 /* only needed if count > 16, but a test would complicate */
1716 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), cpu_T3);
1717 tcg_gen_shl_tl(cpu_tmp0, cpu_T[0], cpu_tmp5);
1719 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T3);
1721 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1723 /* XXX: not optimal */
1724 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
1725 tcg_gen_shli_tl(cpu_T[1], cpu_T[1], 16);
1726 tcg_gen_or_tl(cpu_T[1], cpu_T[1], cpu_T[0]);
1727 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
1729 tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1730 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
1731 tcg_gen_shr_tl(cpu_tmp6, cpu_T[1], cpu_tmp0);
1732 tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
1734 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T3);
1735 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), cpu_T3);
1736 tcg_gen_shr_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1737 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1740 data_bits = 8 << ot;
1743 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
1745 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1747 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T3);
1748 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), cpu_T3);
1749 tcg_gen_shl_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1750 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1754 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
1756 tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1758 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T3);
1759 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), cpu_T3);
1760 tcg_gen_shr_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1761 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1764 tcg_gen_mov_tl(cpu_T[1], cpu_tmp4);
1766 gen_set_label(label1);
1769 gen_op_st_T0_A0(ot + s->mem_index);
1771 gen_op_mov_reg_T0(ot, op1);
1774 if (s->cc_op != CC_OP_DYNAMIC)
1775 gen_op_set_cc_op(s->cc_op);
1777 label2 = gen_new_label();
1778 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label2);
1780 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1781 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1783 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1785 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1787 gen_set_label(label2);
1788 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1791 static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1794 gen_op_mov_TN_reg(ot, 1, s);
1797 gen_rot_rm_T1(s1, ot, d, 0);
1800 gen_rot_rm_T1(s1, ot, d, 1);
1804 gen_shift_rm_T1(s1, ot, d, 0, 0);
1807 gen_shift_rm_T1(s1, ot, d, 1, 0);
1810 gen_shift_rm_T1(s1, ot, d, 1, 1);
1813 gen_rotc_rm_T1(s1, ot, d, 0);
1816 gen_rotc_rm_T1(s1, ot, d, 1);
1821 static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1826 gen_shift_rm_im(s1, ot, d, c, 0, 0);
1829 gen_shift_rm_im(s1, ot, d, c, 1, 0);
1832 gen_shift_rm_im(s1, ot, d, c, 1, 1);
1835 /* currently not optimized */
1836 gen_op_movl_T1_im(c);
1837 gen_shift(s1, op, ot, d, OR_TMP1);
1842 static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1850 int mod, rm, code, override, must_add_seg;
1852 override = s->override;
1853 must_add_seg = s->addseg;
1856 mod = (modrm >> 6) & 3;
1868 code = ldub_code(s->pc++);
1869 scale = (code >> 6) & 3;
1870 index = ((code >> 3) & 7) | REX_X(s);
1877 if ((base & 7) == 5) {
1879 disp = (int32_t)ldl_code(s->pc);
1881 if (CODE64(s) && !havesib) {
1882 disp += s->pc + s->rip_offset;
1889 disp = (int8_t)ldub_code(s->pc++);
1893 disp = ldl_code(s->pc);
1899 /* for correct popl handling with esp */
1900 if (base == 4 && s->popl_esp_hack)
1901 disp += s->popl_esp_hack;
1902 #ifdef TARGET_X86_64
1903 if (s->aflag == 2) {
1904 gen_op_movq_A0_reg(base);
1906 gen_op_addq_A0_im(disp);
1911 gen_op_movl_A0_reg(base);
1913 gen_op_addl_A0_im(disp);
1916 #ifdef TARGET_X86_64
1917 if (s->aflag == 2) {
1918 gen_op_movq_A0_im(disp);
1922 gen_op_movl_A0_im(disp);
1925 /* XXX: index == 4 is always invalid */
1926 if (havesib && (index != 4 || scale != 0)) {
1927 #ifdef TARGET_X86_64
1928 if (s->aflag == 2) {
1929 gen_op_addq_A0_reg_sN(scale, index);
1933 gen_op_addl_A0_reg_sN(scale, index);
1938 if (base == R_EBP || base == R_ESP)
1943 #ifdef TARGET_X86_64
1944 if (s->aflag == 2) {
1945 gen_op_addq_A0_seg(override);
1949 gen_op_addl_A0_seg(override);
1956 disp = lduw_code(s->pc);
1958 gen_op_movl_A0_im(disp);
1959 rm = 0; /* avoid SS override */
1966 disp = (int8_t)ldub_code(s->pc++);
1970 disp = lduw_code(s->pc);
1976 gen_op_movl_A0_reg(R_EBX);
1977 gen_op_addl_A0_reg_sN(0, R_ESI);
1980 gen_op_movl_A0_reg(R_EBX);
1981 gen_op_addl_A0_reg_sN(0, R_EDI);
1984 gen_op_movl_A0_reg(R_EBP);
1985 gen_op_addl_A0_reg_sN(0, R_ESI);
1988 gen_op_movl_A0_reg(R_EBP);
1989 gen_op_addl_A0_reg_sN(0, R_EDI);
1992 gen_op_movl_A0_reg(R_ESI);
1995 gen_op_movl_A0_reg(R_EDI);
1998 gen_op_movl_A0_reg(R_EBP);
2002 gen_op_movl_A0_reg(R_EBX);
2006 gen_op_addl_A0_im(disp);
2007 gen_op_andl_A0_ffff();
2011 if (rm == 2 || rm == 3 || rm == 6)
2016 gen_op_addl_A0_seg(override);
2026 static void gen_nop_modrm(DisasContext *s, int modrm)
2028 int mod, rm, base, code;
2030 mod = (modrm >> 6) & 3;
2040 code = ldub_code(s->pc++);
2076 /* used for LEA and MOV AX, mem */
2077 static void gen_add_A0_ds_seg(DisasContext *s)
2079 int override, must_add_seg;
2080 must_add_seg = s->addseg;
2082 if (s->override >= 0) {
2083 override = s->override;
2089 #ifdef TARGET_X86_64
2091 gen_op_addq_A0_seg(override);
2095 gen_op_addl_A0_seg(override);
2100 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
2102 static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
2104 int mod, rm, opreg, disp;
2106 mod = (modrm >> 6) & 3;
2107 rm = (modrm & 7) | REX_B(s);
2111 gen_op_mov_TN_reg(ot, 0, reg);
2112 gen_op_mov_reg_T0(ot, rm);
2114 gen_op_mov_TN_reg(ot, 0, rm);
2116 gen_op_mov_reg_T0(ot, reg);
2119 gen_lea_modrm(s, modrm, &opreg, &disp);
2122 gen_op_mov_TN_reg(ot, 0, reg);
2123 gen_op_st_T0_A0(ot + s->mem_index);
2125 gen_op_ld_T0_A0(ot + s->mem_index);
2127 gen_op_mov_reg_T0(ot, reg);
2132 static inline uint32_t insn_get(DisasContext *s, int ot)
2138 ret = ldub_code(s->pc);
2142 ret = lduw_code(s->pc);
2147 ret = ldl_code(s->pc);
2154 static inline int insn_const_size(unsigned int ot)
2162 static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2164 TranslationBlock *tb;
2167 pc = s->cs_base + eip;
2169 /* NOTE: we handle the case where the TB spans two pages here */
2170 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2171 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
2172 /* jump to same page: we can use a direct jump */
2173 tcg_gen_goto_tb(tb_num);
2175 tcg_gen_exit_tb((long)tb + tb_num);
2177 /* jump to another page: currently not optimized */
2183 static inline void gen_jcc(DisasContext *s, int b,
2184 target_ulong val, target_ulong next_eip)
2189 if (s->cc_op != CC_OP_DYNAMIC) {
2190 gen_op_set_cc_op(s->cc_op);
2191 s->cc_op = CC_OP_DYNAMIC;
2194 l1 = gen_new_label();
2195 gen_jcc1(s, cc_op, b, l1);
2197 gen_goto_tb(s, 0, next_eip);
2200 gen_goto_tb(s, 1, val);
2204 l1 = gen_new_label();
2205 l2 = gen_new_label();
2206 gen_jcc1(s, cc_op, b, l1);
2208 gen_jmp_im(next_eip);
2218 static void gen_setcc(DisasContext *s, int b)
2220 int inv, jcc_op, l1;
2222 if (is_fast_jcc_case(s, b)) {
2223 /* nominal case: we use a jump */
2224 tcg_gen_movi_tl(cpu_T[0], 0);
2225 l1 = gen_new_label();
2226 gen_jcc1(s, s->cc_op, b ^ 1, l1);
2227 tcg_gen_movi_tl(cpu_T[0], 1);
2230 /* slow case: it is more efficient not to generate a jump,
2231 although it is questionnable whether this optimization is
2234 jcc_op = (b >> 1) & 7;
2235 if (s->cc_op != CC_OP_DYNAMIC)
2236 gen_op_set_cc_op(s->cc_op);
2237 gen_setcc_slow_T0(jcc_op);
2239 tcg_gen_xori_tl(cpu_T[0], cpu_T[0], 1);
2244 static inline void gen_op_movl_T0_seg(int seg_reg)
2246 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2247 offsetof(CPUX86State,segs[seg_reg].selector));
2250 static inline void gen_op_movl_seg_T0_vm(int seg_reg)
2252 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
2253 tcg_gen_st32_tl(cpu_T[0], cpu_env,
2254 offsetof(CPUX86State,segs[seg_reg].selector));
2255 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], 4);
2256 tcg_gen_st_tl(cpu_T[0], cpu_env,
2257 offsetof(CPUX86State,segs[seg_reg].base));
2260 /* move T0 to seg_reg and compute if the CPU state may change. Never
2261 call this function with seg_reg == R_CS */
2262 static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2264 if (s->pe && !s->vm86) {
2265 /* XXX: optimize by finding processor state dynamically */
2266 if (s->cc_op != CC_OP_DYNAMIC)
2267 gen_op_set_cc_op(s->cc_op);
2268 gen_jmp_im(cur_eip);
2269 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2270 tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2271 /* abort translation because the addseg value may change or
2272 because ss32 may change. For R_SS, translation must always
2273 stop as a special handling must be done to disable hardware
2274 interrupts for the next instruction */
2275 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2278 gen_op_movl_seg_T0_vm(seg_reg);
2279 if (seg_reg == R_SS)
2284 static inline int svm_is_rep(int prefixes)
2286 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2290 gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2291 uint32_t type, uint64_t param)
2293 if(!(s->flags & (INTERCEPT_SVM_MASK)))
2294 /* no SVM activated */
2297 /* CRx and DRx reads/writes */
2298 case SVM_EXIT_READ_CR0 ... SVM_EXIT_EXCP_BASE - 1:
2299 if (s->cc_op != CC_OP_DYNAMIC) {
2300 gen_op_set_cc_op(s->cc_op);
2302 gen_jmp_im(pc_start - s->cs_base);
2303 tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2304 tcg_const_i32(type), tcg_const_i64(param));
2305 /* this is a special case as we do not know if the interception occurs
2306 so we assume there was none */
2309 if(s->flags & (1ULL << INTERCEPT_MSR_PROT)) {
2310 if (s->cc_op != CC_OP_DYNAMIC) {
2311 gen_op_set_cc_op(s->cc_op);
2313 gen_jmp_im(pc_start - s->cs_base);
2314 tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2315 tcg_const_i32(type), tcg_const_i64(param));
2316 /* this is a special case as we do not know if the interception occurs
2317 so we assume there was none */
2322 if(s->flags & (1ULL << ((type - SVM_EXIT_INTR) + INTERCEPT_INTR))) {
2323 if (s->cc_op != CC_OP_DYNAMIC) {
2324 gen_op_set_cc_op(s->cc_op);
2326 gen_jmp_im(pc_start - s->cs_base);
2327 tcg_gen_helper_0_2(helper_vmexit,
2328 tcg_const_i32(type), tcg_const_i64(param));
2329 /* we can optimize this one so TBs don't get longer
2330 than up to vmexit */
2339 gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2341 return gen_svm_check_intercept_param(s, pc_start, type, 0);
2344 static inline void gen_stack_update(DisasContext *s, int addend)
2346 #ifdef TARGET_X86_64
2348 gen_op_add_reg_im(2, R_ESP, addend);
2352 gen_op_add_reg_im(1, R_ESP, addend);
2354 gen_op_add_reg_im(0, R_ESP, addend);
2358 /* generate a push. It depends on ss32, addseg and dflag */
2359 static void gen_push_T0(DisasContext *s)
2361 #ifdef TARGET_X86_64
2363 gen_op_movq_A0_reg(R_ESP);
2365 gen_op_addq_A0_im(-8);
2366 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2368 gen_op_addq_A0_im(-2);
2369 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2371 gen_op_mov_reg_A0(2, R_ESP);
2375 gen_op_movl_A0_reg(R_ESP);
2377 gen_op_addl_A0_im(-2);
2379 gen_op_addl_A0_im(-4);
2382 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2383 gen_op_addl_A0_seg(R_SS);
2386 gen_op_andl_A0_ffff();
2387 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2388 gen_op_addl_A0_seg(R_SS);
2390 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2391 if (s->ss32 && !s->addseg)
2392 gen_op_mov_reg_A0(1, R_ESP);
2394 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2398 /* generate a push. It depends on ss32, addseg and dflag */
2399 /* slower version for T1, only used for call Ev */
2400 static void gen_push_T1(DisasContext *s)
2402 #ifdef TARGET_X86_64
2404 gen_op_movq_A0_reg(R_ESP);
2406 gen_op_addq_A0_im(-8);
2407 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2409 gen_op_addq_A0_im(-2);
2410 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2412 gen_op_mov_reg_A0(2, R_ESP);
2416 gen_op_movl_A0_reg(R_ESP);
2418 gen_op_addl_A0_im(-2);
2420 gen_op_addl_A0_im(-4);
2423 gen_op_addl_A0_seg(R_SS);
2426 gen_op_andl_A0_ffff();
2427 gen_op_addl_A0_seg(R_SS);
2429 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2431 if (s->ss32 && !s->addseg)
2432 gen_op_mov_reg_A0(1, R_ESP);
2434 gen_stack_update(s, (-2) << s->dflag);
2438 /* two step pop is necessary for precise exceptions */
2439 static void gen_pop_T0(DisasContext *s)
2441 #ifdef TARGET_X86_64
2443 gen_op_movq_A0_reg(R_ESP);
2444 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2448 gen_op_movl_A0_reg(R_ESP);
2451 gen_op_addl_A0_seg(R_SS);
2453 gen_op_andl_A0_ffff();
2454 gen_op_addl_A0_seg(R_SS);
2456 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2460 static void gen_pop_update(DisasContext *s)
2462 #ifdef TARGET_X86_64
2463 if (CODE64(s) && s->dflag) {
2464 gen_stack_update(s, 8);
2468 gen_stack_update(s, 2 << s->dflag);
2472 static void gen_stack_A0(DisasContext *s)
2474 gen_op_movl_A0_reg(R_ESP);
2476 gen_op_andl_A0_ffff();
2477 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2479 gen_op_addl_A0_seg(R_SS);
2482 /* NOTE: wrap around in 16 bit not fully handled */
2483 static void gen_pusha(DisasContext *s)
2486 gen_op_movl_A0_reg(R_ESP);
2487 gen_op_addl_A0_im(-16 << s->dflag);
2489 gen_op_andl_A0_ffff();
2490 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2492 gen_op_addl_A0_seg(R_SS);
2493 for(i = 0;i < 8; i++) {
2494 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2495 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2496 gen_op_addl_A0_im(2 << s->dflag);
2498 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2501 /* NOTE: wrap around in 16 bit not fully handled */
2502 static void gen_popa(DisasContext *s)
2505 gen_op_movl_A0_reg(R_ESP);
2507 gen_op_andl_A0_ffff();
2508 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2509 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 << s->dflag);
2511 gen_op_addl_A0_seg(R_SS);
2512 for(i = 0;i < 8; i++) {
2513 /* ESP is not reloaded */
2515 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2516 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2518 gen_op_addl_A0_im(2 << s->dflag);
2520 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2523 static void gen_enter(DisasContext *s, int esp_addend, int level)
2528 #ifdef TARGET_X86_64
2530 ot = s->dflag ? OT_QUAD : OT_WORD;
2533 gen_op_movl_A0_reg(R_ESP);
2534 gen_op_addq_A0_im(-opsize);
2535 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2538 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2539 gen_op_st_T0_A0(ot + s->mem_index);
2541 /* XXX: must save state */
2542 tcg_gen_helper_0_3(helper_enter64_level,
2543 tcg_const_i32(level),
2544 tcg_const_i32((ot == OT_QUAD)),
2547 gen_op_mov_reg_T1(ot, R_EBP);
2548 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2549 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2553 ot = s->dflag + OT_WORD;
2554 opsize = 2 << s->dflag;
2556 gen_op_movl_A0_reg(R_ESP);
2557 gen_op_addl_A0_im(-opsize);
2559 gen_op_andl_A0_ffff();
2560 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2562 gen_op_addl_A0_seg(R_SS);
2564 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2565 gen_op_st_T0_A0(ot + s->mem_index);
2567 /* XXX: must save state */
2568 tcg_gen_helper_0_3(helper_enter_level,
2569 tcg_const_i32(level),
2570 tcg_const_i32(s->dflag),
2573 gen_op_mov_reg_T1(ot, R_EBP);
2574 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2575 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2579 static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2581 if (s->cc_op != CC_OP_DYNAMIC)
2582 gen_op_set_cc_op(s->cc_op);
2583 gen_jmp_im(cur_eip);
2584 tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
2588 /* an interrupt is different from an exception because of the
2590 static void gen_interrupt(DisasContext *s, int intno,
2591 target_ulong cur_eip, target_ulong next_eip)
2593 if (s->cc_op != CC_OP_DYNAMIC)
2594 gen_op_set_cc_op(s->cc_op);
2595 gen_jmp_im(cur_eip);
2596 tcg_gen_helper_0_2(helper_raise_interrupt,
2597 tcg_const_i32(intno),
2598 tcg_const_i32(next_eip - cur_eip));
2602 static void gen_debug(DisasContext *s, target_ulong cur_eip)
2604 if (s->cc_op != CC_OP_DYNAMIC)
2605 gen_op_set_cc_op(s->cc_op);
2606 gen_jmp_im(cur_eip);
2607 tcg_gen_helper_0_0(helper_debug);
2611 /* generate a generic end of block. Trace exception is also generated
2613 static void gen_eob(DisasContext *s)
2615 if (s->cc_op != CC_OP_DYNAMIC)
2616 gen_op_set_cc_op(s->cc_op);
2617 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2618 tcg_gen_helper_0_0(helper_reset_inhibit_irq);
2620 if (s->singlestep_enabled) {
2621 tcg_gen_helper_0_0(helper_debug);
2623 tcg_gen_helper_0_0(helper_single_step);
2630 /* generate a jump to eip. No segment change must happen before as a
2631 direct call to the next block may occur */
2632 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2635 if (s->cc_op != CC_OP_DYNAMIC) {
2636 gen_op_set_cc_op(s->cc_op);
2637 s->cc_op = CC_OP_DYNAMIC;
2639 gen_goto_tb(s, tb_num, eip);
2647 static void gen_jmp(DisasContext *s, target_ulong eip)
2649 gen_jmp_tb(s, eip, 0);
2652 static inline void gen_ldq_env_A0(int idx, int offset)
2654 int mem_index = (idx >> 2) - 1;
2655 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2656 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
2659 static inline void gen_stq_env_A0(int idx, int offset)
2661 int mem_index = (idx >> 2) - 1;
2662 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
2663 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2666 static inline void gen_ldo_env_A0(int idx, int offset)
2668 int mem_index = (idx >> 2) - 1;
2669 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2670 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2671 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2672 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2673 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2676 static inline void gen_sto_env_A0(int idx, int offset)
2678 int mem_index = (idx >> 2) - 1;
2679 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2680 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2681 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2682 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2683 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2686 static inline void gen_op_movo(int d_offset, int s_offset)
2688 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2689 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2690 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
2691 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
2694 static inline void gen_op_movq(int d_offset, int s_offset)
2696 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2697 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2700 static inline void gen_op_movl(int d_offset, int s_offset)
2702 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
2703 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
2706 static inline void gen_op_movq_env_0(int d_offset)
2708 tcg_gen_movi_i64(cpu_tmp1_i64, 0);
2709 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2712 #define SSE_SPECIAL ((void *)1)
2713 #define SSE_DUMMY ((void *)2)
2715 #define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
2716 #define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
2717 helper_ ## x ## ss, helper_ ## x ## sd, }
2719 static void *sse_op_table1[256][4] = {
2720 /* 3DNow! extensions */
2721 [0x0e] = { SSE_DUMMY }, /* femms */
2722 [0x0f] = { SSE_DUMMY }, /* pf... */
2723 /* pure SSE operations */
2724 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2725 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2726 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2727 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2728 [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
2729 [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
2730 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2731 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2733 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2734 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2735 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2736 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
2737 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2738 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2739 [0x2e] = { helper_ucomiss, helper_ucomisd },
2740 [0x2f] = { helper_comiss, helper_comisd },
2741 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2742 [0x51] = SSE_FOP(sqrt),
2743 [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
2744 [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
2745 [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
2746 [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
2747 [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
2748 [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
2749 [0x58] = SSE_FOP(add),
2750 [0x59] = SSE_FOP(mul),
2751 [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
2752 helper_cvtss2sd, helper_cvtsd2ss },
2753 [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
2754 [0x5c] = SSE_FOP(sub),
2755 [0x5d] = SSE_FOP(min),
2756 [0x5e] = SSE_FOP(div),
2757 [0x5f] = SSE_FOP(max),
2759 [0xc2] = SSE_FOP(cmpeq),
2760 [0xc6] = { helper_shufps, helper_shufpd },
2762 /* MMX ops and their SSE extensions */
2763 [0x60] = MMX_OP2(punpcklbw),
2764 [0x61] = MMX_OP2(punpcklwd),
2765 [0x62] = MMX_OP2(punpckldq),
2766 [0x63] = MMX_OP2(packsswb),
2767 [0x64] = MMX_OP2(pcmpgtb),
2768 [0x65] = MMX_OP2(pcmpgtw),
2769 [0x66] = MMX_OP2(pcmpgtl),
2770 [0x67] = MMX_OP2(packuswb),
2771 [0x68] = MMX_OP2(punpckhbw),
2772 [0x69] = MMX_OP2(punpckhwd),
2773 [0x6a] = MMX_OP2(punpckhdq),
2774 [0x6b] = MMX_OP2(packssdw),
2775 [0x6c] = { NULL, helper_punpcklqdq_xmm },
2776 [0x6d] = { NULL, helper_punpckhqdq_xmm },
2777 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2778 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2779 [0x70] = { helper_pshufw_mmx,
2782 helper_pshuflw_xmm },
2783 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2784 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2785 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2786 [0x74] = MMX_OP2(pcmpeqb),
2787 [0x75] = MMX_OP2(pcmpeqw),
2788 [0x76] = MMX_OP2(pcmpeql),
2789 [0x77] = { SSE_DUMMY }, /* emms */
2790 [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
2791 [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
2792 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2793 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2794 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2795 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2796 [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
2797 [0xd1] = MMX_OP2(psrlw),
2798 [0xd2] = MMX_OP2(psrld),
2799 [0xd3] = MMX_OP2(psrlq),
2800 [0xd4] = MMX_OP2(paddq),
2801 [0xd5] = MMX_OP2(pmullw),
2802 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2803 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2804 [0xd8] = MMX_OP2(psubusb),
2805 [0xd9] = MMX_OP2(psubusw),
2806 [0xda] = MMX_OP2(pminub),
2807 [0xdb] = MMX_OP2(pand),
2808 [0xdc] = MMX_OP2(paddusb),
2809 [0xdd] = MMX_OP2(paddusw),
2810 [0xde] = MMX_OP2(pmaxub),
2811 [0xdf] = MMX_OP2(pandn),
2812 [0xe0] = MMX_OP2(pavgb),
2813 [0xe1] = MMX_OP2(psraw),
2814 [0xe2] = MMX_OP2(psrad),
2815 [0xe3] = MMX_OP2(pavgw),
2816 [0xe4] = MMX_OP2(pmulhuw),
2817 [0xe5] = MMX_OP2(pmulhw),
2818 [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
2819 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2820 [0xe8] = MMX_OP2(psubsb),
2821 [0xe9] = MMX_OP2(psubsw),
2822 [0xea] = MMX_OP2(pminsw),
2823 [0xeb] = MMX_OP2(por),
2824 [0xec] = MMX_OP2(paddsb),
2825 [0xed] = MMX_OP2(paddsw),
2826 [0xee] = MMX_OP2(pmaxsw),
2827 [0xef] = MMX_OP2(pxor),
2828 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2829 [0xf1] = MMX_OP2(psllw),
2830 [0xf2] = MMX_OP2(pslld),
2831 [0xf3] = MMX_OP2(psllq),
2832 [0xf4] = MMX_OP2(pmuludq),
2833 [0xf5] = MMX_OP2(pmaddwd),
2834 [0xf6] = MMX_OP2(psadbw),
2835 [0xf7] = MMX_OP2(maskmov),
2836 [0xf8] = MMX_OP2(psubb),
2837 [0xf9] = MMX_OP2(psubw),
2838 [0xfa] = MMX_OP2(psubl),
2839 [0xfb] = MMX_OP2(psubq),
2840 [0xfc] = MMX_OP2(paddb),
2841 [0xfd] = MMX_OP2(paddw),
2842 [0xfe] = MMX_OP2(paddl),
2845 static void *sse_op_table2[3 * 8][2] = {
2846 [0 + 2] = MMX_OP2(psrlw),
2847 [0 + 4] = MMX_OP2(psraw),
2848 [0 + 6] = MMX_OP2(psllw),
2849 [8 + 2] = MMX_OP2(psrld),
2850 [8 + 4] = MMX_OP2(psrad),
2851 [8 + 6] = MMX_OP2(pslld),
2852 [16 + 2] = MMX_OP2(psrlq),
2853 [16 + 3] = { NULL, helper_psrldq_xmm },
2854 [16 + 6] = MMX_OP2(psllq),
2855 [16 + 7] = { NULL, helper_pslldq_xmm },
2858 static void *sse_op_table3[4 * 3] = {
2861 X86_64_ONLY(helper_cvtsq2ss),
2862 X86_64_ONLY(helper_cvtsq2sd),
2866 X86_64_ONLY(helper_cvttss2sq),
2867 X86_64_ONLY(helper_cvttsd2sq),
2871 X86_64_ONLY(helper_cvtss2sq),
2872 X86_64_ONLY(helper_cvtsd2sq),
2875 static void *sse_op_table4[8][4] = {
2886 static void *sse_op_table5[256] = {
2887 [0x0c] = helper_pi2fw,
2888 [0x0d] = helper_pi2fd,
2889 [0x1c] = helper_pf2iw,
2890 [0x1d] = helper_pf2id,
2891 [0x8a] = helper_pfnacc,
2892 [0x8e] = helper_pfpnacc,
2893 [0x90] = helper_pfcmpge,
2894 [0x94] = helper_pfmin,
2895 [0x96] = helper_pfrcp,
2896 [0x97] = helper_pfrsqrt,
2897 [0x9a] = helper_pfsub,
2898 [0x9e] = helper_pfadd,
2899 [0xa0] = helper_pfcmpgt,
2900 [0xa4] = helper_pfmax,
2901 [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
2902 [0xa7] = helper_movq, /* pfrsqit1 */
2903 [0xaa] = helper_pfsubr,
2904 [0xae] = helper_pfacc,
2905 [0xb0] = helper_pfcmpeq,
2906 [0xb4] = helper_pfmul,
2907 [0xb6] = helper_movq, /* pfrcpit2 */
2908 [0xb7] = helper_pmulhrw_mmx,
2909 [0xbb] = helper_pswapd,
2910 [0xbf] = helper_pavgb_mmx /* pavgusb */
2913 static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2915 int b1, op1_offset, op2_offset, is_xmm, val, ot;
2916 int modrm, mod, rm, reg, reg_addr, offset_addr;
2920 if (s->prefix & PREFIX_DATA)
2922 else if (s->prefix & PREFIX_REPZ)
2924 else if (s->prefix & PREFIX_REPNZ)
2928 sse_op2 = sse_op_table1[b][b1];
2931 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
2941 /* simple MMX/SSE operation */
2942 if (s->flags & HF_TS_MASK) {
2943 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2946 if (s->flags & HF_EM_MASK) {
2948 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2951 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2954 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
2957 tcg_gen_helper_0_0(helper_emms);
2962 tcg_gen_helper_0_0(helper_emms);
2965 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2966 the static cpu state) */
2968 tcg_gen_helper_0_0(helper_enter_mmx);
2971 modrm = ldub_code(s->pc++);
2972 reg = ((modrm >> 3) & 7);
2975 mod = (modrm >> 6) & 3;
2976 if (sse_op2 == SSE_SPECIAL) {
2979 case 0x0e7: /* movntq */
2982 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2983 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2985 case 0x1e7: /* movntdq */
2986 case 0x02b: /* movntps */
2987 case 0x12b: /* movntps */
2988 case 0x3f0: /* lddqu */
2991 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2992 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2994 case 0x6e: /* movd mm, ea */
2995 #ifdef TARGET_X86_64
2996 if (s->dflag == 2) {
2997 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2998 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
3002 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3003 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3004 offsetof(CPUX86State,fpregs[reg].mmx));
3005 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
3008 case 0x16e: /* movd xmm, ea */
3009 #ifdef TARGET_X86_64
3010 if (s->dflag == 2) {
3011 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3012 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3013 offsetof(CPUX86State,xmm_regs[reg]));
3014 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
3018 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3019 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3020 offsetof(CPUX86State,xmm_regs[reg]));
3021 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3022 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
3025 case 0x6f: /* movq mm, ea */
3027 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3028 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3031 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3032 offsetof(CPUX86State,fpregs[rm].mmx));
3033 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3034 offsetof(CPUX86State,fpregs[reg].mmx));
3037 case 0x010: /* movups */
3038 case 0x110: /* movupd */
3039 case 0x028: /* movaps */
3040 case 0x128: /* movapd */
3041 case 0x16f: /* movdqa xmm, ea */
3042 case 0x26f: /* movdqu xmm, ea */
3044 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3045 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3047 rm = (modrm & 7) | REX_B(s);
3048 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
3049 offsetof(CPUX86State,xmm_regs[rm]));
3052 case 0x210: /* movss xmm, ea */
3054 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3055 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3056 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3058 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3059 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3060 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3062 rm = (modrm & 7) | REX_B(s);
3063 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3064 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3067 case 0x310: /* movsd xmm, ea */
3069 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3070 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3072 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3073 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3075 rm = (modrm & 7) | REX_B(s);
3076 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3077 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3080 case 0x012: /* movlps */
3081 case 0x112: /* movlpd */
3083 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3084 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3087 rm = (modrm & 7) | REX_B(s);
3088 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3089 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3092 case 0x212: /* movsldup */
3094 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3095 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3097 rm = (modrm & 7) | REX_B(s);
3098 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3099 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3100 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3101 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3103 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3104 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3105 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3106 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3108 case 0x312: /* movddup */
3110 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3111 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3113 rm = (modrm & 7) | REX_B(s);
3114 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3115 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3117 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3118 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3120 case 0x016: /* movhps */
3121 case 0x116: /* movhpd */
3123 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3124 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3127 rm = (modrm & 7) | REX_B(s);
3128 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3129 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3132 case 0x216: /* movshdup */
3134 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3135 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3137 rm = (modrm & 7) | REX_B(s);
3138 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3139 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3140 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3141 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3143 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3144 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3145 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3146 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3148 case 0x7e: /* movd ea, mm */
3149 #ifdef TARGET_X86_64
3150 if (s->dflag == 2) {
3151 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3152 offsetof(CPUX86State,fpregs[reg].mmx));
3153 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3157 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3158 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3159 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3162 case 0x17e: /* movd ea, xmm */
3163 #ifdef TARGET_X86_64
3164 if (s->dflag == 2) {
3165 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3166 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3167 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3171 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3172 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3173 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3176 case 0x27e: /* movq xmm, ea */
3178 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3179 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3181 rm = (modrm & 7) | REX_B(s);
3182 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3183 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3185 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3187 case 0x7f: /* movq ea, mm */
3189 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3190 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3193 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3194 offsetof(CPUX86State,fpregs[reg].mmx));
3197 case 0x011: /* movups */
3198 case 0x111: /* movupd */
3199 case 0x029: /* movaps */
3200 case 0x129: /* movapd */
3201 case 0x17f: /* movdqa ea, xmm */
3202 case 0x27f: /* movdqu ea, xmm */
3204 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3205 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3207 rm = (modrm & 7) | REX_B(s);
3208 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3209 offsetof(CPUX86State,xmm_regs[reg]));
3212 case 0x211: /* movss ea, xmm */
3214 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3215 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3216 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3218 rm = (modrm & 7) | REX_B(s);
3219 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3220 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3223 case 0x311: /* movsd ea, xmm */
3225 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3226 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3228 rm = (modrm & 7) | REX_B(s);
3229 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3230 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3233 case 0x013: /* movlps */
3234 case 0x113: /* movlpd */
3236 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3237 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3242 case 0x017: /* movhps */
3243 case 0x117: /* movhpd */
3245 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3246 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3251 case 0x71: /* shift mm, im */
3254 case 0x171: /* shift xmm, im */
3257 val = ldub_code(s->pc++);
3259 gen_op_movl_T0_im(val);
3260 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3262 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3263 op1_offset = offsetof(CPUX86State,xmm_t0);
3265 gen_op_movl_T0_im(val);
3266 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3268 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3269 op1_offset = offsetof(CPUX86State,mmx_t0);
3271 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3275 rm = (modrm & 7) | REX_B(s);
3276 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3279 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3281 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3282 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3283 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3285 case 0x050: /* movmskps */
3286 rm = (modrm & 7) | REX_B(s);
3287 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3288 offsetof(CPUX86State,xmm_regs[rm]));
3289 tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3290 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3291 gen_op_mov_reg_T0(OT_LONG, reg);
3293 case 0x150: /* movmskpd */
3294 rm = (modrm & 7) | REX_B(s);
3295 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3296 offsetof(CPUX86State,xmm_regs[rm]));
3297 tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3298 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3299 gen_op_mov_reg_T0(OT_LONG, reg);
3301 case 0x02a: /* cvtpi2ps */
3302 case 0x12a: /* cvtpi2pd */
3303 tcg_gen_helper_0_0(helper_enter_mmx);
3305 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3306 op2_offset = offsetof(CPUX86State,mmx_t0);
3307 gen_ldq_env_A0(s->mem_index, op2_offset);
3310 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3312 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3313 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3314 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3317 tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3321 tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3325 case 0x22a: /* cvtsi2ss */
3326 case 0x32a: /* cvtsi2sd */
3327 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3328 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3329 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3330 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3331 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3332 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3333 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
3335 case 0x02c: /* cvttps2pi */
3336 case 0x12c: /* cvttpd2pi */
3337 case 0x02d: /* cvtps2pi */
3338 case 0x12d: /* cvtpd2pi */
3339 tcg_gen_helper_0_0(helper_enter_mmx);
3341 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3342 op2_offset = offsetof(CPUX86State,xmm_t0);
3343 gen_ldo_env_A0(s->mem_index, op2_offset);
3345 rm = (modrm & 7) | REX_B(s);
3346 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3348 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3349 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3350 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3353 tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3356 tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3359 tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3362 tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
3366 case 0x22c: /* cvttss2si */
3367 case 0x32c: /* cvttsd2si */
3368 case 0x22d: /* cvtss2si */
3369 case 0x32d: /* cvtsd2si */
3370 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3372 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3374 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3376 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3377 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3379 op2_offset = offsetof(CPUX86State,xmm_t0);
3381 rm = (modrm & 7) | REX_B(s);
3382 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3384 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3386 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3387 if (ot == OT_LONG) {
3388 tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
3389 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3391 tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
3393 gen_op_mov_reg_T0(ot, reg);
3395 case 0xc4: /* pinsrw */
3398 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3399 val = ldub_code(s->pc++);
3402 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3403 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
3406 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3407 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
3410 case 0xc5: /* pextrw */
3414 val = ldub_code(s->pc++);
3417 rm = (modrm & 7) | REX_B(s);
3418 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3419 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
3423 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3424 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
3426 reg = ((modrm >> 3) & 7) | rex_r;
3427 gen_op_mov_reg_T0(OT_LONG, reg);
3429 case 0x1d6: /* movq ea, xmm */
3431 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3432 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3434 rm = (modrm & 7) | REX_B(s);
3435 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3436 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3437 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3440 case 0x2d6: /* movq2dq */
3441 tcg_gen_helper_0_0(helper_enter_mmx);
3443 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3444 offsetof(CPUX86State,fpregs[rm].mmx));
3445 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3447 case 0x3d6: /* movdq2q */
3448 tcg_gen_helper_0_0(helper_enter_mmx);
3449 rm = (modrm & 7) | REX_B(s);
3450 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3451 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3453 case 0xd7: /* pmovmskb */
3458 rm = (modrm & 7) | REX_B(s);
3459 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
3460 tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
3463 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
3464 tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
3466 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3467 reg = ((modrm >> 3) & 7) | rex_r;
3468 gen_op_mov_reg_T0(OT_LONG, reg);
3474 /* generic MMX or SSE operation */
3476 case 0x70: /* pshufx insn */
3477 case 0xc6: /* pshufx insn */
3478 case 0xc2: /* compare insns */
3485 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3487 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3488 op2_offset = offsetof(CPUX86State,xmm_t0);
3489 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3491 /* specific case for SSE single instructions */
3494 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3495 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3498 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3501 gen_ldo_env_A0(s->mem_index, op2_offset);
3504 rm = (modrm & 7) | REX_B(s);
3505 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3508 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3510 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3511 op2_offset = offsetof(CPUX86State,mmx_t0);
3512 gen_ldq_env_A0(s->mem_index, op2_offset);
3515 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3519 case 0x0f: /* 3DNow! data insns */
3520 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3522 val = ldub_code(s->pc++);
3523 sse_op2 = sse_op_table5[val];
3526 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3527 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3528 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3530 case 0x70: /* pshufx insn */
3531 case 0xc6: /* pshufx insn */
3532 val = ldub_code(s->pc++);
3533 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3534 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3535 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
3539 val = ldub_code(s->pc++);
3542 sse_op2 = sse_op_table4[val][b1];
3543 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3544 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3545 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3548 /* maskmov : we must prepare A0 */
3551 #ifdef TARGET_X86_64
3552 if (s->aflag == 2) {
3553 gen_op_movq_A0_reg(R_EDI);
3557 gen_op_movl_A0_reg(R_EDI);
3559 gen_op_andl_A0_ffff();
3561 gen_add_A0_ds_seg(s);
3563 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3564 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3565 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
3568 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3569 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3570 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3573 if (b == 0x2e || b == 0x2f) {
3574 s->cc_op = CC_OP_EFLAGS;
3579 /* convert one instruction. s->is_jmp is set if the translation must
3580 be stopped. Return the next pc value */
3581 static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3583 int b, prefixes, aflag, dflag;
3585 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3586 target_ulong next_eip, tval;
3589 if (unlikely(loglevel & CPU_LOG_TB_OP))
3590 tcg_gen_debug_insn_start(pc_start);
3598 #ifdef TARGET_X86_64
3603 s->rip_offset = 0; /* for relative ip address */
3605 b = ldub_code(s->pc);
3607 /* check prefixes */
3608 #ifdef TARGET_X86_64
3612 prefixes |= PREFIX_REPZ;
3615 prefixes |= PREFIX_REPNZ;
3618 prefixes |= PREFIX_LOCK;
3639 prefixes |= PREFIX_DATA;
3642 prefixes |= PREFIX_ADR;
3646 rex_w = (b >> 3) & 1;
3647 rex_r = (b & 0x4) << 1;
3648 s->rex_x = (b & 0x2) << 2;
3649 REX_B(s) = (b & 0x1) << 3;
3650 x86_64_hregs = 1; /* select uniform byte register addressing */
3654 /* 0x66 is ignored if rex.w is set */
3657 if (prefixes & PREFIX_DATA)
3660 if (!(prefixes & PREFIX_ADR))
3667 prefixes |= PREFIX_REPZ;
3670 prefixes |= PREFIX_REPNZ;
3673 prefixes |= PREFIX_LOCK;
3694 prefixes |= PREFIX_DATA;
3697 prefixes |= PREFIX_ADR;
3700 if (prefixes & PREFIX_DATA)
3702 if (prefixes & PREFIX_ADR)
3706 s->prefix = prefixes;
3710 /* lock generation */
3711 if (prefixes & PREFIX_LOCK)
3712 tcg_gen_helper_0_0(helper_lock);
3714 /* now check op code */
3718 /**************************/
3719 /* extended op code */
3720 b = ldub_code(s->pc++) | 0x100;
3723 /**************************/
3741 ot = dflag + OT_WORD;
3744 case 0: /* OP Ev, Gv */
3745 modrm = ldub_code(s->pc++);
3746 reg = ((modrm >> 3) & 7) | rex_r;
3747 mod = (modrm >> 6) & 3;
3748 rm = (modrm & 7) | REX_B(s);
3750 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3752 } else if (op == OP_XORL && rm == reg) {
3754 /* xor reg, reg optimisation */
3756 s->cc_op = CC_OP_LOGICB + ot;
3757 gen_op_mov_reg_T0(ot, reg);
3758 gen_op_update1_cc();
3763 gen_op_mov_TN_reg(ot, 1, reg);
3764 gen_op(s, op, ot, opreg);
3766 case 1: /* OP Gv, Ev */
3767 modrm = ldub_code(s->pc++);
3768 mod = (modrm >> 6) & 3;
3769 reg = ((modrm >> 3) & 7) | rex_r;
3770 rm = (modrm & 7) | REX_B(s);
3772 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3773 gen_op_ld_T1_A0(ot + s->mem_index);
3774 } else if (op == OP_XORL && rm == reg) {
3777 gen_op_mov_TN_reg(ot, 1, rm);
3779 gen_op(s, op, ot, reg);
3781 case 2: /* OP A, Iv */
3782 val = insn_get(s, ot);
3783 gen_op_movl_T1_im(val);
3784 gen_op(s, op, ot, OR_EAX);
3790 case 0x80: /* GRP1 */
3800 ot = dflag + OT_WORD;
3802 modrm = ldub_code(s->pc++);
3803 mod = (modrm >> 6) & 3;
3804 rm = (modrm & 7) | REX_B(s);
3805 op = (modrm >> 3) & 7;
3811 s->rip_offset = insn_const_size(ot);
3812 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3823 val = insn_get(s, ot);
3826 val = (int8_t)insn_get(s, OT_BYTE);
3829 gen_op_movl_T1_im(val);
3830 gen_op(s, op, ot, opreg);
3834 /**************************/
3835 /* inc, dec, and other misc arith */
3836 case 0x40 ... 0x47: /* inc Gv */
3837 ot = dflag ? OT_LONG : OT_WORD;
3838 gen_inc(s, ot, OR_EAX + (b & 7), 1);
3840 case 0x48 ... 0x4f: /* dec Gv */
3841 ot = dflag ? OT_LONG : OT_WORD;
3842 gen_inc(s, ot, OR_EAX + (b & 7), -1);
3844 case 0xf6: /* GRP3 */
3849 ot = dflag + OT_WORD;
3851 modrm = ldub_code(s->pc++);
3852 mod = (modrm >> 6) & 3;
3853 rm = (modrm & 7) | REX_B(s);
3854 op = (modrm >> 3) & 7;
3857 s->rip_offset = insn_const_size(ot);
3858 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3859 gen_op_ld_T0_A0(ot + s->mem_index);
3861 gen_op_mov_TN_reg(ot, 0, rm);
3866 val = insn_get(s, ot);
3867 gen_op_movl_T1_im(val);
3868 gen_op_testl_T0_T1_cc();
3869 s->cc_op = CC_OP_LOGICB + ot;
3872 tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
3874 gen_op_st_T0_A0(ot + s->mem_index);
3876 gen_op_mov_reg_T0(ot, rm);
3880 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
3882 gen_op_st_T0_A0(ot + s->mem_index);
3884 gen_op_mov_reg_T0(ot, rm);
3886 gen_op_update_neg_cc();
3887 s->cc_op = CC_OP_SUBB + ot;
3892 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
3893 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
3894 tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
3895 /* XXX: use 32 bit mul which could be faster */
3896 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3897 gen_op_mov_reg_T0(OT_WORD, R_EAX);
3898 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3899 tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
3900 s->cc_op = CC_OP_MULB;
3903 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
3904 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
3905 tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
3906 /* XXX: use 32 bit mul which could be faster */
3907 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3908 gen_op_mov_reg_T0(OT_WORD, R_EAX);
3909 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3910 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
3911 gen_op_mov_reg_T0(OT_WORD, R_EDX);
3912 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
3913 s->cc_op = CC_OP_MULW;
3917 #ifdef TARGET_X86_64
3918 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3919 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
3920 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
3921 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3922 gen_op_mov_reg_T0(OT_LONG, R_EAX);
3923 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3924 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
3925 gen_op_mov_reg_T0(OT_LONG, R_EDX);
3926 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
3930 t0 = tcg_temp_new(TCG_TYPE_I64);
3931 t1 = tcg_temp_new(TCG_TYPE_I64);
3932 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3933 tcg_gen_extu_i32_i64(t0, cpu_T[0]);
3934 tcg_gen_extu_i32_i64(t1, cpu_T[1]);
3935 tcg_gen_mul_i64(t0, t0, t1);
3936 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
3937 gen_op_mov_reg_T0(OT_LONG, R_EAX);
3938 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3939 tcg_gen_shri_i64(t0, t0, 32);
3940 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
3941 gen_op_mov_reg_T0(OT_LONG, R_EDX);
3942 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
3945 s->cc_op = CC_OP_MULL;
3947 #ifdef TARGET_X86_64
3949 tcg_gen_helper_0_1(helper_mulq_EAX_T0, cpu_T[0]);
3950 s->cc_op = CC_OP_MULQ;
3958 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
3959 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
3960 tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
3961 /* XXX: use 32 bit mul which could be faster */
3962 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3963 gen_op_mov_reg_T0(OT_WORD, R_EAX);
3964 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3965 tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
3966 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3967 s->cc_op = CC_OP_MULB;
3970 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
3971 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
3972 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
3973 /* XXX: use 32 bit mul which could be faster */
3974 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3975 gen_op_mov_reg_T0(OT_WORD, R_EAX);
3976 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3977 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
3978 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3979 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
3980 gen_op_mov_reg_T0(OT_WORD, R_EDX);
3981 s->cc_op = CC_OP_MULW;
3985 #ifdef TARGET_X86_64
3986 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3987 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
3988 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
3989 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3990 gen_op_mov_reg_T0(OT_LONG, R_EAX);
3991 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3992 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
3993 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3994 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
3995 gen_op_mov_reg_T0(OT_LONG, R_EDX);
3999 t0 = tcg_temp_new(TCG_TYPE_I64);
4000 t1 = tcg_temp_new(TCG_TYPE_I64);
4001 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4002 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
4003 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
4004 tcg_gen_mul_i64(t0, t0, t1);
4005 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4006 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4007 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4008 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
4009 tcg_gen_shri_i64(t0, t0, 32);
4010 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4011 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4012 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4015 s->cc_op = CC_OP_MULL;
4017 #ifdef TARGET_X86_64
4019 tcg_gen_helper_0_1(helper_imulq_EAX_T0, cpu_T[0]);
4020 s->cc_op = CC_OP_MULQ;
4028 gen_jmp_im(pc_start - s->cs_base);
4029 tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
4032 gen_jmp_im(pc_start - s->cs_base);
4033 tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
4037 gen_jmp_im(pc_start - s->cs_base);
4038 tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
4040 #ifdef TARGET_X86_64
4042 gen_jmp_im(pc_start - s->cs_base);
4043 tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
4051 gen_jmp_im(pc_start - s->cs_base);
4052 tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
4055 gen_jmp_im(pc_start - s->cs_base);
4056 tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
4060 gen_jmp_im(pc_start - s->cs_base);
4061 tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
4063 #ifdef TARGET_X86_64
4065 gen_jmp_im(pc_start - s->cs_base);
4066 tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
4076 case 0xfe: /* GRP4 */
4077 case 0xff: /* GRP5 */
4081 ot = dflag + OT_WORD;
4083 modrm = ldub_code(s->pc++);
4084 mod = (modrm >> 6) & 3;
4085 rm = (modrm & 7) | REX_B(s);
4086 op = (modrm >> 3) & 7;
4087 if (op >= 2 && b == 0xfe) {
4091 if (op == 2 || op == 4) {
4092 /* operand size for jumps is 64 bit */
4094 } else if (op == 3 || op == 5) {
4095 /* for call calls, the operand is 16 or 32 bit, even
4097 ot = dflag ? OT_LONG : OT_WORD;
4098 } else if (op == 6) {
4099 /* default push size is 64 bit */
4100 ot = dflag ? OT_QUAD : OT_WORD;
4104 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4105 if (op >= 2 && op != 3 && op != 5)
4106 gen_op_ld_T0_A0(ot + s->mem_index);
4108 gen_op_mov_TN_reg(ot, 0, rm);
4112 case 0: /* inc Ev */
4117 gen_inc(s, ot, opreg, 1);
4119 case 1: /* dec Ev */
4124 gen_inc(s, ot, opreg, -1);
4126 case 2: /* call Ev */
4127 /* XXX: optimize if memory (no 'and' is necessary) */
4129 gen_op_andl_T0_ffff();
4130 next_eip = s->pc - s->cs_base;
4131 gen_movtl_T1_im(next_eip);
4136 case 3: /* lcall Ev */
4137 gen_op_ld_T1_A0(ot + s->mem_index);
4138 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4139 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4141 if (s->pe && !s->vm86) {
4142 if (s->cc_op != CC_OP_DYNAMIC)
4143 gen_op_set_cc_op(s->cc_op);
4144 gen_jmp_im(pc_start - s->cs_base);
4145 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4146 tcg_gen_helper_0_4(helper_lcall_protected,
4147 cpu_tmp2_i32, cpu_T[1],
4148 tcg_const_i32(dflag),
4149 tcg_const_i32(s->pc - pc_start));
4151 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4152 tcg_gen_helper_0_4(helper_lcall_real,
4153 cpu_tmp2_i32, cpu_T[1],
4154 tcg_const_i32(dflag),
4155 tcg_const_i32(s->pc - s->cs_base));
4159 case 4: /* jmp Ev */
4161 gen_op_andl_T0_ffff();
4165 case 5: /* ljmp Ev */
4166 gen_op_ld_T1_A0(ot + s->mem_index);
4167 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4168 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4170 if (s->pe && !s->vm86) {
4171 if (s->cc_op != CC_OP_DYNAMIC)
4172 gen_op_set_cc_op(s->cc_op);
4173 gen_jmp_im(pc_start - s->cs_base);
4174 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4175 tcg_gen_helper_0_3(helper_ljmp_protected,
4178 tcg_const_i32(s->pc - pc_start));
4180 gen_op_movl_seg_T0_vm(R_CS);
4181 gen_op_movl_T0_T1();
4186 case 6: /* push Ev */
4194 case 0x84: /* test Ev, Gv */
4199 ot = dflag + OT_WORD;
4201 modrm = ldub_code(s->pc++);
4202 mod = (modrm >> 6) & 3;
4203 rm = (modrm & 7) | REX_B(s);
4204 reg = ((modrm >> 3) & 7) | rex_r;
4206 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4207 gen_op_mov_TN_reg(ot, 1, reg);
4208 gen_op_testl_T0_T1_cc();
4209 s->cc_op = CC_OP_LOGICB + ot;
4212 case 0xa8: /* test eAX, Iv */
4217 ot = dflag + OT_WORD;
4218 val = insn_get(s, ot);
4220 gen_op_mov_TN_reg(ot, 0, OR_EAX);
4221 gen_op_movl_T1_im(val);
4222 gen_op_testl_T0_T1_cc();
4223 s->cc_op = CC_OP_LOGICB + ot;
4226 case 0x98: /* CWDE/CBW */
4227 #ifdef TARGET_X86_64
4229 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4230 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4231 gen_op_mov_reg_T0(OT_QUAD, R_EAX);
4235 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4236 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4237 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4239 gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
4240 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4241 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4244 case 0x99: /* CDQ/CWD */
4245 #ifdef TARGET_X86_64
4247 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
4248 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
4249 gen_op_mov_reg_T0(OT_QUAD, R_EDX);
4253 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4254 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4255 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
4256 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4258 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4259 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4260 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
4261 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4264 case 0x1af: /* imul Gv, Ev */
4265 case 0x69: /* imul Gv, Ev, I */
4267 ot = dflag + OT_WORD;
4268 modrm = ldub_code(s->pc++);
4269 reg = ((modrm >> 3) & 7) | rex_r;
4271 s->rip_offset = insn_const_size(ot);
4274 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4276 val = insn_get(s, ot);
4277 gen_op_movl_T1_im(val);
4278 } else if (b == 0x6b) {
4279 val = (int8_t)insn_get(s, OT_BYTE);
4280 gen_op_movl_T1_im(val);
4282 gen_op_mov_TN_reg(ot, 1, reg);
4285 #ifdef TARGET_X86_64
4286 if (ot == OT_QUAD) {
4287 tcg_gen_helper_1_2(helper_imulq_T0_T1, cpu_T[0], cpu_T[0], cpu_T[1]);
4290 if (ot == OT_LONG) {
4291 #ifdef TARGET_X86_64
4292 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4293 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
4294 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4295 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4296 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
4297 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4301 t0 = tcg_temp_new(TCG_TYPE_I64);
4302 t1 = tcg_temp_new(TCG_TYPE_I64);
4303 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
4304 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
4305 tcg_gen_mul_i64(t0, t0, t1);
4306 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4307 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4308 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
4309 tcg_gen_shri_i64(t0, t0, 32);
4310 tcg_gen_trunc_i64_i32(cpu_T[1], t0);
4311 tcg_gen_sub_tl(cpu_cc_src, cpu_T[1], cpu_tmp0);
4315 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4316 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
4317 /* XXX: use 32 bit mul which could be faster */
4318 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4319 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4320 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
4321 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4323 gen_op_mov_reg_T0(ot, reg);
4324 s->cc_op = CC_OP_MULB + ot;
4327 case 0x1c1: /* xadd Ev, Gv */
4331 ot = dflag + OT_WORD;
4332 modrm = ldub_code(s->pc++);
4333 reg = ((modrm >> 3) & 7) | rex_r;
4334 mod = (modrm >> 6) & 3;
4336 rm = (modrm & 7) | REX_B(s);
4337 gen_op_mov_TN_reg(ot, 0, reg);
4338 gen_op_mov_TN_reg(ot, 1, rm);
4339 gen_op_addl_T0_T1();
4340 gen_op_mov_reg_T1(ot, reg);
4341 gen_op_mov_reg_T0(ot, rm);
4343 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4344 gen_op_mov_TN_reg(ot, 0, reg);
4345 gen_op_ld_T1_A0(ot + s->mem_index);
4346 gen_op_addl_T0_T1();
4347 gen_op_st_T0_A0(ot + s->mem_index);
4348 gen_op_mov_reg_T1(ot, reg);
4350 gen_op_update2_cc();
4351 s->cc_op = CC_OP_ADDB + ot;
4354 case 0x1b1: /* cmpxchg Ev, Gv */
4361 ot = dflag + OT_WORD;
4362 modrm = ldub_code(s->pc++);
4363 reg = ((modrm >> 3) & 7) | rex_r;
4364 mod = (modrm >> 6) & 3;
4365 gen_op_mov_TN_reg(ot, 1, reg);
4367 rm = (modrm & 7) | REX_B(s);
4368 gen_op_mov_TN_reg(ot, 0, rm);
4370 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4371 gen_op_ld_T0_A0(ot + s->mem_index);
4372 rm = 0; /* avoid warning */
4374 label1 = gen_new_label();
4375 tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_EAX]));
4376 tcg_gen_sub_tl(cpu_T3, cpu_T3, cpu_T[0]);
4377 gen_extu(ot, cpu_T3);
4378 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label1);
4380 label2 = gen_new_label();
4381 gen_op_mov_reg_T0(ot, R_EAX);
4383 gen_set_label(label1);
4384 gen_op_mov_reg_T1(ot, rm);
4385 gen_set_label(label2);
4387 tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
4388 gen_op_mov_reg_T0(ot, R_EAX);
4389 gen_set_label(label1);
4391 gen_op_st_T1_A0(ot + s->mem_index);
4393 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4394 tcg_gen_mov_tl(cpu_cc_dst, cpu_T3);
4395 s->cc_op = CC_OP_SUBB + ot;
4398 case 0x1c7: /* cmpxchg8b */
4399 modrm = ldub_code(s->pc++);
4400 mod = (modrm >> 6) & 3;
4401 if ((mod == 3) || ((modrm & 0x38) != 0x8))
4403 #ifdef TARGET_X86_64
4405 if (!(s->cpuid_ext_features & CPUID_EXT_CX16))
4407 gen_jmp_im(pc_start - s->cs_base);
4408 if (s->cc_op != CC_OP_DYNAMIC)
4409 gen_op_set_cc_op(s->cc_op);
4410 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4411 tcg_gen_helper_0_1(helper_cmpxchg16b, cpu_A0);
4415 if (!(s->cpuid_features & CPUID_CX8))
4417 gen_jmp_im(pc_start - s->cs_base);
4418 if (s->cc_op != CC_OP_DYNAMIC)
4419 gen_op_set_cc_op(s->cc_op);
4420 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4421 tcg_gen_helper_0_1(helper_cmpxchg8b, cpu_A0);
4423 s->cc_op = CC_OP_EFLAGS;
4426 /**************************/
4428 case 0x50 ... 0x57: /* push */
4429 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
4432 case 0x58 ... 0x5f: /* pop */
4434 ot = dflag ? OT_QUAD : OT_WORD;
4436 ot = dflag + OT_WORD;
4439 /* NOTE: order is important for pop %sp */
4441 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
4443 case 0x60: /* pusha */
4448 case 0x61: /* popa */
4453 case 0x68: /* push Iv */
4456 ot = dflag ? OT_QUAD : OT_WORD;
4458 ot = dflag + OT_WORD;
4461 val = insn_get(s, ot);
4463 val = (int8_t)insn_get(s, OT_BYTE);
4464 gen_op_movl_T0_im(val);
4467 case 0x8f: /* pop Ev */
4469 ot = dflag ? OT_QUAD : OT_WORD;
4471 ot = dflag + OT_WORD;
4473 modrm = ldub_code(s->pc++);
4474 mod = (modrm >> 6) & 3;
4477 /* NOTE: order is important for pop %sp */
4479 rm = (modrm & 7) | REX_B(s);
4480 gen_op_mov_reg_T0(ot, rm);
4482 /* NOTE: order is important too for MMU exceptions */
4483 s->popl_esp_hack = 1 << ot;
4484 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4485 s->popl_esp_hack = 0;
4489 case 0xc8: /* enter */
4492 val = lduw_code(s->pc);
4494 level = ldub_code(s->pc++);
4495 gen_enter(s, val, level);
4498 case 0xc9: /* leave */
4499 /* XXX: exception not precise (ESP is updated before potential exception) */
4501 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
4502 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
4503 } else if (s->ss32) {
4504 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
4505 gen_op_mov_reg_T0(OT_LONG, R_ESP);
4507 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
4508 gen_op_mov_reg_T0(OT_WORD, R_ESP);
4512 ot = dflag ? OT_QUAD : OT_WORD;
4514 ot = dflag + OT_WORD;
4516 gen_op_mov_reg_T0(ot, R_EBP);
4519 case 0x06: /* push es */
4520 case 0x0e: /* push cs */
4521 case 0x16: /* push ss */
4522 case 0x1e: /* push ds */
4525 gen_op_movl_T0_seg(b >> 3);
4528 case 0x1a0: /* push fs */
4529 case 0x1a8: /* push gs */
4530 gen_op_movl_T0_seg((b >> 3) & 7);
4533 case 0x07: /* pop es */
4534 case 0x17: /* pop ss */
4535 case 0x1f: /* pop ds */
4540 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4543 /* if reg == SS, inhibit interrupts/trace. */
4544 /* If several instructions disable interrupts, only the
4546 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4547 tcg_gen_helper_0_0(helper_set_inhibit_irq);
4551 gen_jmp_im(s->pc - s->cs_base);
4555 case 0x1a1: /* pop fs */
4556 case 0x1a9: /* pop gs */
4558 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4561 gen_jmp_im(s->pc - s->cs_base);
4566 /**************************/
4569 case 0x89: /* mov Gv, Ev */
4573 ot = dflag + OT_WORD;
4574 modrm = ldub_code(s->pc++);
4575 reg = ((modrm >> 3) & 7) | rex_r;
4577 /* generate a generic store */
4578 gen_ldst_modrm(s, modrm, ot, reg, 1);
4581 case 0xc7: /* mov Ev, Iv */
4585 ot = dflag + OT_WORD;
4586 modrm = ldub_code(s->pc++);
4587 mod = (modrm >> 6) & 3;
4589 s->rip_offset = insn_const_size(ot);
4590 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4592 val = insn_get(s, ot);
4593 gen_op_movl_T0_im(val);
4595 gen_op_st_T0_A0(ot + s->mem_index);
4597 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
4600 case 0x8b: /* mov Ev, Gv */
4604 ot = OT_WORD + dflag;
4605 modrm = ldub_code(s->pc++);
4606 reg = ((modrm >> 3) & 7) | rex_r;
4608 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4609 gen_op_mov_reg_T0(ot, reg);
4611 case 0x8e: /* mov seg, Gv */
4612 modrm = ldub_code(s->pc++);
4613 reg = (modrm >> 3) & 7;
4614 if (reg >= 6 || reg == R_CS)
4616 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4617 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4619 /* if reg == SS, inhibit interrupts/trace */
4620 /* If several instructions disable interrupts, only the
4622 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4623 tcg_gen_helper_0_0(helper_set_inhibit_irq);
4627 gen_jmp_im(s->pc - s->cs_base);
4631 case 0x8c: /* mov Gv, seg */
4632 modrm = ldub_code(s->pc++);
4633 reg = (modrm >> 3) & 7;
4634 mod = (modrm >> 6) & 3;
4637 gen_op_movl_T0_seg(reg);
4639 ot = OT_WORD + dflag;
4642 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4645 case 0x1b6: /* movzbS Gv, Eb */
4646 case 0x1b7: /* movzwS Gv, Eb */
4647 case 0x1be: /* movsbS Gv, Eb */
4648 case 0x1bf: /* movswS Gv, Eb */
4651 /* d_ot is the size of destination */
4652 d_ot = dflag + OT_WORD;
4653 /* ot is the size of source */
4654 ot = (b & 1) + OT_BYTE;
4655 modrm = ldub_code(s->pc++);
4656 reg = ((modrm >> 3) & 7) | rex_r;
4657 mod = (modrm >> 6) & 3;
4658 rm = (modrm & 7) | REX_B(s);
4661 gen_op_mov_TN_reg(ot, 0, rm);
4662 switch(ot | (b & 8)) {
4664 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4667 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4670 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
4674 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4677 gen_op_mov_reg_T0(d_ot, reg);
4679 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4681 gen_op_lds_T0_A0(ot + s->mem_index);
4683 gen_op_ldu_T0_A0(ot + s->mem_index);
4685 gen_op_mov_reg_T0(d_ot, reg);
4690 case 0x8d: /* lea */
4691 ot = dflag + OT_WORD;
4692 modrm = ldub_code(s->pc++);
4693 mod = (modrm >> 6) & 3;
4696 reg = ((modrm >> 3) & 7) | rex_r;
4697 /* we must ensure that no segment is added */
4701 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4703 gen_op_mov_reg_A0(ot - OT_WORD, reg);
4706 case 0xa0: /* mov EAX, Ov */
4708 case 0xa2: /* mov Ov, EAX */
4711 target_ulong offset_addr;
4716 ot = dflag + OT_WORD;
4717 #ifdef TARGET_X86_64
4718 if (s->aflag == 2) {
4719 offset_addr = ldq_code(s->pc);
4721 gen_op_movq_A0_im(offset_addr);
4726 offset_addr = insn_get(s, OT_LONG);
4728 offset_addr = insn_get(s, OT_WORD);
4730 gen_op_movl_A0_im(offset_addr);
4732 gen_add_A0_ds_seg(s);
4734 gen_op_ld_T0_A0(ot + s->mem_index);
4735 gen_op_mov_reg_T0(ot, R_EAX);
4737 gen_op_mov_TN_reg(ot, 0, R_EAX);
4738 gen_op_st_T0_A0(ot + s->mem_index);
4742 case 0xd7: /* xlat */
4743 #ifdef TARGET_X86_64
4744 if (s->aflag == 2) {
4745 gen_op_movq_A0_reg(R_EBX);
4746 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
4747 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
4748 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
4752 gen_op_movl_A0_reg(R_EBX);
4753 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4754 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
4755 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
4757 gen_op_andl_A0_ffff();
4759 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
4761 gen_add_A0_ds_seg(s);
4762 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
4763 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
4765 case 0xb0 ... 0xb7: /* mov R, Ib */
4766 val = insn_get(s, OT_BYTE);
4767 gen_op_movl_T0_im(val);
4768 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
4770 case 0xb8 ... 0xbf: /* mov R, Iv */
4771 #ifdef TARGET_X86_64
4775 tmp = ldq_code(s->pc);
4777 reg = (b & 7) | REX_B(s);
4778 gen_movtl_T0_im(tmp);
4779 gen_op_mov_reg_T0(OT_QUAD, reg);
4783 ot = dflag ? OT_LONG : OT_WORD;
4784 val = insn_get(s, ot);
4785 reg = (b & 7) | REX_B(s);
4786 gen_op_movl_T0_im(val);
4787 gen_op_mov_reg_T0(ot, reg);
4791 case 0x91 ... 0x97: /* xchg R, EAX */
4792 ot = dflag + OT_WORD;
4793 reg = (b & 7) | REX_B(s);
4797 case 0x87: /* xchg Ev, Gv */
4801 ot = dflag + OT_WORD;
4802 modrm = ldub_code(s->pc++);
4803 reg = ((modrm >> 3) & 7) | rex_r;
4804 mod = (modrm >> 6) & 3;
4806 rm = (modrm & 7) | REX_B(s);
4808 gen_op_mov_TN_reg(ot, 0, reg);
4809 gen_op_mov_TN_reg(ot, 1, rm);
4810 gen_op_mov_reg_T0(ot, rm);
4811 gen_op_mov_reg_T1(ot, reg);
4813 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4814 gen_op_mov_TN_reg(ot, 0, reg);
4815 /* for xchg, lock is implicit */
4816 if (!(prefixes & PREFIX_LOCK))
4817 tcg_gen_helper_0_0(helper_lock);
4818 gen_op_ld_T1_A0(ot + s->mem_index);
4819 gen_op_st_T0_A0(ot + s->mem_index);
4820 if (!(prefixes & PREFIX_LOCK))
4821 tcg_gen_helper_0_0(helper_unlock);
4822 gen_op_mov_reg_T1(ot, reg);
4825 case 0xc4: /* les Gv */
4830 case 0xc5: /* lds Gv */
4835 case 0x1b2: /* lss Gv */
4838 case 0x1b4: /* lfs Gv */
4841 case 0x1b5: /* lgs Gv */
4844 ot = dflag ? OT_LONG : OT_WORD;
4845 modrm = ldub_code(s->pc++);
4846 reg = ((modrm >> 3) & 7) | rex_r;
4847 mod = (modrm >> 6) & 3;
4850 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4851 gen_op_ld_T1_A0(ot + s->mem_index);
4852 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4853 /* load the segment first to handle exceptions properly */
4854 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4855 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4856 /* then put the data */
4857 gen_op_mov_reg_T1(ot, reg);
4859 gen_jmp_im(s->pc - s->cs_base);
4864 /************************/
4875 ot = dflag + OT_WORD;
4877 modrm = ldub_code(s->pc++);
4878 mod = (modrm >> 6) & 3;
4879 op = (modrm >> 3) & 7;
4885 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4888 opreg = (modrm & 7) | REX_B(s);
4893 gen_shift(s, op, ot, opreg, OR_ECX);
4896 shift = ldub_code(s->pc++);
4898 gen_shifti(s, op, ot, opreg, shift);
4913 case 0x1a4: /* shld imm */
4917 case 0x1a5: /* shld cl */
4921 case 0x1ac: /* shrd imm */
4925 case 0x1ad: /* shrd cl */
4929 ot = dflag + OT_WORD;
4930 modrm = ldub_code(s->pc++);
4931 mod = (modrm >> 6) & 3;
4932 rm = (modrm & 7) | REX_B(s);
4933 reg = ((modrm >> 3) & 7) | rex_r;
4935 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4940 gen_op_mov_TN_reg(ot, 1, reg);
4943 val = ldub_code(s->pc++);
4944 tcg_gen_movi_tl(cpu_T3, val);
4946 tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
4948 gen_shiftd_rm_T1_T3(s, ot, opreg, op);
4951 /************************/
4954 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4955 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4956 /* XXX: what to do if illegal op ? */
4957 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4960 modrm = ldub_code(s->pc++);
4961 mod = (modrm >> 6) & 3;
4963 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4966 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4968 case 0x00 ... 0x07: /* fxxxs */
4969 case 0x10 ... 0x17: /* fixxxl */
4970 case 0x20 ... 0x27: /* fxxxl */
4971 case 0x30 ... 0x37: /* fixxx */
4978 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4979 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4980 tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
4983 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4984 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4985 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
4988 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
4989 (s->mem_index >> 2) - 1);
4990 tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
4994 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
4995 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4996 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
5000 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
5002 /* fcomp needs pop */
5003 tcg_gen_helper_0_0(helper_fpop);
5007 case 0x08: /* flds */
5008 case 0x0a: /* fsts */
5009 case 0x0b: /* fstps */
5010 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
5011 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
5012 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
5017 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5018 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5019 tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
5022 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5023 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5024 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
5027 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
5028 (s->mem_index >> 2) - 1);
5029 tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
5033 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
5034 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5035 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
5040 /* XXX: the corresponding CPUID bit must be tested ! */
5043 tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
5044 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5045 gen_op_st_T0_A0(OT_LONG + s->mem_index);
5048 tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
5049 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
5050 (s->mem_index >> 2) - 1);
5054 tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
5055 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5056 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5059 tcg_gen_helper_0_0(helper_fpop);
5064 tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
5065 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5066 gen_op_st_T0_A0(OT_LONG + s->mem_index);
5069 tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
5070 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5071 gen_op_st_T0_A0(OT_LONG + s->mem_index);
5074 tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
5075 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
5076 (s->mem_index >> 2) - 1);
5080 tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
5081 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5082 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5086 tcg_gen_helper_0_0(helper_fpop);
5090 case 0x0c: /* fldenv mem */
5091 if (s->cc_op != CC_OP_DYNAMIC)
5092 gen_op_set_cc_op(s->cc_op);
5093 gen_jmp_im(pc_start - s->cs_base);
5094 tcg_gen_helper_0_2(helper_fldenv,
5095 cpu_A0, tcg_const_i32(s->dflag));
5097 case 0x0d: /* fldcw mem */
5098 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
5099 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5100 tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
5102 case 0x0e: /* fnstenv mem */
5103 if (s->cc_op != CC_OP_DYNAMIC)
5104 gen_op_set_cc_op(s->cc_op);
5105 gen_jmp_im(pc_start - s->cs_base);
5106 tcg_gen_helper_0_2(helper_fstenv,
5107 cpu_A0, tcg_const_i32(s->dflag));
5109 case 0x0f: /* fnstcw mem */
5110 tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
5111 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5112 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5114 case 0x1d: /* fldt mem */
5115 if (s->cc_op != CC_OP_DYNAMIC)
5116 gen_op_set_cc_op(s->cc_op);
5117 gen_jmp_im(pc_start - s->cs_base);
5118 tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
5120 case 0x1f: /* fstpt mem */
5121 if (s->cc_op != CC_OP_DYNAMIC)
5122 gen_op_set_cc_op(s->cc_op);
5123 gen_jmp_im(pc_start - s->cs_base);
5124 tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
5125 tcg_gen_helper_0_0(helper_fpop);
5127 case 0x2c: /* frstor mem */
5128 if (s->cc_op != CC_OP_DYNAMIC)
5129 gen_op_set_cc_op(s->cc_op);
5130 gen_jmp_im(pc_start - s->cs_base);
5131 tcg_gen_helper_0_2(helper_frstor,
5132 cpu_A0, tcg_const_i32(s->dflag));
5134 case 0x2e: /* fnsave mem */
5135 if (s->cc_op != CC_OP_DYNAMIC)
5136 gen_op_set_cc_op(s->cc_op);
5137 gen_jmp_im(pc_start - s->cs_base);
5138 tcg_gen_helper_0_2(helper_fsave,
5139 cpu_A0, tcg_const_i32(s->dflag));
5141 case 0x2f: /* fnstsw mem */
5142 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
5143 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5144 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5146 case 0x3c: /* fbld */
5147 if (s->cc_op != CC_OP_DYNAMIC)
5148 gen_op_set_cc_op(s->cc_op);
5149 gen_jmp_im(pc_start - s->cs_base);
5150 tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
5152 case 0x3e: /* fbstp */
5153 if (s->cc_op != CC_OP_DYNAMIC)
5154 gen_op_set_cc_op(s->cc_op);
5155 gen_jmp_im(pc_start - s->cs_base);
5156 tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
5157 tcg_gen_helper_0_0(helper_fpop);
5159 case 0x3d: /* fildll */
5160 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
5161 (s->mem_index >> 2) - 1);
5162 tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
5164 case 0x3f: /* fistpll */
5165 tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
5166 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
5167 (s->mem_index >> 2) - 1);
5168 tcg_gen_helper_0_0(helper_fpop);
5174 /* register float ops */
5178 case 0x08: /* fld sti */
5179 tcg_gen_helper_0_0(helper_fpush);
5180 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
5182 case 0x09: /* fxchg sti */
5183 case 0x29: /* fxchg4 sti, undocumented op */
5184 case 0x39: /* fxchg7 sti, undocumented op */
5185 tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
5187 case 0x0a: /* grp d9/2 */
5190 /* check exceptions (FreeBSD FPU probe) */
5191 if (s->cc_op != CC_OP_DYNAMIC)
5192 gen_op_set_cc_op(s->cc_op);
5193 gen_jmp_im(pc_start - s->cs_base);
5194 tcg_gen_helper_0_0(helper_fwait);
5200 case 0x0c: /* grp d9/4 */
5203 tcg_gen_helper_0_0(helper_fchs_ST0);
5206 tcg_gen_helper_0_0(helper_fabs_ST0);
5209 tcg_gen_helper_0_0(helper_fldz_FT0);
5210 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5213 tcg_gen_helper_0_0(helper_fxam_ST0);
5219 case 0x0d: /* grp d9/5 */
5223 tcg_gen_helper_0_0(helper_fpush);
5224 tcg_gen_helper_0_0(helper_fld1_ST0);
5227 tcg_gen_helper_0_0(helper_fpush);
5228 tcg_gen_helper_0_0(helper_fldl2t_ST0);
5231 tcg_gen_helper_0_0(helper_fpush);
5232 tcg_gen_helper_0_0(helper_fldl2e_ST0);
5235 tcg_gen_helper_0_0(helper_fpush);
5236 tcg_gen_helper_0_0(helper_fldpi_ST0);
5239 tcg_gen_helper_0_0(helper_fpush);
5240 tcg_gen_helper_0_0(helper_fldlg2_ST0);
5243 tcg_gen_helper_0_0(helper_fpush);
5244 tcg_gen_helper_0_0(helper_fldln2_ST0);
5247 tcg_gen_helper_0_0(helper_fpush);
5248 tcg_gen_helper_0_0(helper_fldz_ST0);
5255 case 0x0e: /* grp d9/6 */
5258 tcg_gen_helper_0_0(helper_f2xm1);
5261 tcg_gen_helper_0_0(helper_fyl2x);
5264 tcg_gen_helper_0_0(helper_fptan);
5266 case 3: /* fpatan */
5267 tcg_gen_helper_0_0(helper_fpatan);
5269 case 4: /* fxtract */
5270 tcg_gen_helper_0_0(helper_fxtract);
5272 case 5: /* fprem1 */
5273 tcg_gen_helper_0_0(helper_fprem1);
5275 case 6: /* fdecstp */
5276 tcg_gen_helper_0_0(helper_fdecstp);
5279 case 7: /* fincstp */
5280 tcg_gen_helper_0_0(helper_fincstp);
5284 case 0x0f: /* grp d9/7 */
5287 tcg_gen_helper_0_0(helper_fprem);
5289 case 1: /* fyl2xp1 */
5290 tcg_gen_helper_0_0(helper_fyl2xp1);
5293 tcg_gen_helper_0_0(helper_fsqrt);
5295 case 3: /* fsincos */
5296 tcg_gen_helper_0_0(helper_fsincos);
5298 case 5: /* fscale */
5299 tcg_gen_helper_0_0(helper_fscale);
5301 case 4: /* frndint */
5302 tcg_gen_helper_0_0(helper_frndint);
5305 tcg_gen_helper_0_0(helper_fsin);
5309 tcg_gen_helper_0_0(helper_fcos);
5313 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
5314 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
5315 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
5321 tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
5323 tcg_gen_helper_0_0(helper_fpop);
5325 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5326 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
5330 case 0x02: /* fcom */
5331 case 0x22: /* fcom2, undocumented op */
5332 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5333 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5335 case 0x03: /* fcomp */
5336 case 0x23: /* fcomp3, undocumented op */
5337 case 0x32: /* fcomp5, undocumented op */
5338 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5339 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5340 tcg_gen_helper_0_0(helper_fpop);
5342 case 0x15: /* da/5 */
5344 case 1: /* fucompp */
5345 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5346 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5347 tcg_gen_helper_0_0(helper_fpop);
5348 tcg_gen_helper_0_0(helper_fpop);
5356 case 0: /* feni (287 only, just do nop here) */
5358 case 1: /* fdisi (287 only, just do nop here) */
5361 tcg_gen_helper_0_0(helper_fclex);
5363 case 3: /* fninit */
5364 tcg_gen_helper_0_0(helper_fninit);
5366 case 4: /* fsetpm (287 only, just do nop here) */
5372 case 0x1d: /* fucomi */
5373 if (s->cc_op != CC_OP_DYNAMIC)
5374 gen_op_set_cc_op(s->cc_op);
5375 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5376 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5377 s->cc_op = CC_OP_EFLAGS;
5379 case 0x1e: /* fcomi */
5380 if (s->cc_op != CC_OP_DYNAMIC)
5381 gen_op_set_cc_op(s->cc_op);
5382 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5383 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5384 s->cc_op = CC_OP_EFLAGS;
5386 case 0x28: /* ffree sti */
5387 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5389 case 0x2a: /* fst sti */
5390 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5392 case 0x2b: /* fstp sti */
5393 case 0x0b: /* fstp1 sti, undocumented op */
5394 case 0x3a: /* fstp8 sti, undocumented op */
5395 case 0x3b: /* fstp9 sti, undocumented op */
5396 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5397 tcg_gen_helper_0_0(helper_fpop);
5399 case 0x2c: /* fucom st(i) */
5400 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5401 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5403 case 0x2d: /* fucomp st(i) */
5404 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5405 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5406 tcg_gen_helper_0_0(helper_fpop);
5408 case 0x33: /* de/3 */
5410 case 1: /* fcompp */
5411 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5412 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5413 tcg_gen_helper_0_0(helper_fpop);
5414 tcg_gen_helper_0_0(helper_fpop);
5420 case 0x38: /* ffreep sti, undocumented op */
5421 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5422 tcg_gen_helper_0_0(helper_fpop);
5424 case 0x3c: /* df/4 */
5427 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
5428 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5429 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5435 case 0x3d: /* fucomip */
5436 if (s->cc_op != CC_OP_DYNAMIC)
5437 gen_op_set_cc_op(s->cc_op);
5438 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5439 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5440 tcg_gen_helper_0_0(helper_fpop);
5441 s->cc_op = CC_OP_EFLAGS;
5443 case 0x3e: /* fcomip */
5444 if (s->cc_op != CC_OP_DYNAMIC)
5445 gen_op_set_cc_op(s->cc_op);
5446 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5447 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5448 tcg_gen_helper_0_0(helper_fpop);
5449 s->cc_op = CC_OP_EFLAGS;
5451 case 0x10 ... 0x13: /* fcmovxx */
5455 const static uint8_t fcmov_cc[8] = {
5461 op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
5463 l1 = gen_new_label();
5464 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[0], tcg_const_tl(0), l1);
5465 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
5474 /************************/
5477 case 0xa4: /* movsS */
5482 ot = dflag + OT_WORD;
5484 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5485 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5491 case 0xaa: /* stosS */
5496 ot = dflag + OT_WORD;
5498 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5499 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5504 case 0xac: /* lodsS */
5509 ot = dflag + OT_WORD;
5510 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5511 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5516 case 0xae: /* scasS */
5521 ot = dflag + OT_WORD;
5522 if (prefixes & PREFIX_REPNZ) {
5523 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5524 } else if (prefixes & PREFIX_REPZ) {
5525 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5528 s->cc_op = CC_OP_SUBB + ot;
5532 case 0xa6: /* cmpsS */
5537 ot = dflag + OT_WORD;
5538 if (prefixes & PREFIX_REPNZ) {
5539 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5540 } else if (prefixes & PREFIX_REPZ) {
5541 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5544 s->cc_op = CC_OP_SUBB + ot;
5547 case 0x6c: /* insS */
5552 ot = dflag ? OT_LONG : OT_WORD;
5553 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5554 gen_op_andl_T0_ffff();
5555 gen_check_io(s, ot, pc_start - s->cs_base,
5556 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
5557 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5558 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5563 case 0x6e: /* outsS */
5568 ot = dflag ? OT_LONG : OT_WORD;
5569 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5570 gen_op_andl_T0_ffff();
5571 gen_check_io(s, ot, pc_start - s->cs_base,
5572 svm_is_rep(prefixes) | 4);
5573 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5574 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5580 /************************/
5588 ot = dflag ? OT_LONG : OT_WORD;
5589 val = ldub_code(s->pc++);
5590 gen_op_movl_T0_im(val);
5591 gen_check_io(s, ot, pc_start - s->cs_base,
5592 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5593 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5594 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5595 gen_op_mov_reg_T1(ot, R_EAX);
5602 ot = dflag ? OT_LONG : OT_WORD;
5603 val = ldub_code(s->pc++);
5604 gen_op_movl_T0_im(val);
5605 gen_check_io(s, ot, pc_start - s->cs_base,
5606 svm_is_rep(prefixes));
5607 gen_op_mov_TN_reg(ot, 1, R_EAX);
5609 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5610 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
5611 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5612 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
5619 ot = dflag ? OT_LONG : OT_WORD;
5620 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5621 gen_op_andl_T0_ffff();
5622 gen_check_io(s, ot, pc_start - s->cs_base,
5623 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5624 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5625 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5626 gen_op_mov_reg_T1(ot, R_EAX);
5633 ot = dflag ? OT_LONG : OT_WORD;
5634 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5635 gen_op_andl_T0_ffff();
5636 gen_check_io(s, ot, pc_start - s->cs_base,
5637 svm_is_rep(prefixes));
5638 gen_op_mov_TN_reg(ot, 1, R_EAX);
5640 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5641 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
5642 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5643 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
5646 /************************/
5648 case 0xc2: /* ret im */
5649 val = ldsw_code(s->pc);
5652 if (CODE64(s) && s->dflag)
5654 gen_stack_update(s, val + (2 << s->dflag));
5656 gen_op_andl_T0_ffff();
5660 case 0xc3: /* ret */
5664 gen_op_andl_T0_ffff();
5668 case 0xca: /* lret im */
5669 val = ldsw_code(s->pc);
5672 if (s->pe && !s->vm86) {
5673 if (s->cc_op != CC_OP_DYNAMIC)
5674 gen_op_set_cc_op(s->cc_op);
5675 gen_jmp_im(pc_start - s->cs_base);
5676 tcg_gen_helper_0_2(helper_lret_protected,
5677 tcg_const_i32(s->dflag),
5678 tcg_const_i32(val));
5682 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5684 gen_op_andl_T0_ffff();
5685 /* NOTE: keeping EIP updated is not a problem in case of
5689 gen_op_addl_A0_im(2 << s->dflag);
5690 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5691 gen_op_movl_seg_T0_vm(R_CS);
5692 /* add stack offset */
5693 gen_stack_update(s, val + (4 << s->dflag));
5697 case 0xcb: /* lret */
5700 case 0xcf: /* iret */
5701 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET))
5705 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5706 s->cc_op = CC_OP_EFLAGS;
5707 } else if (s->vm86) {
5709 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5711 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5712 s->cc_op = CC_OP_EFLAGS;
5715 if (s->cc_op != CC_OP_DYNAMIC)
5716 gen_op_set_cc_op(s->cc_op);
5717 gen_jmp_im(pc_start - s->cs_base);
5718 tcg_gen_helper_0_2(helper_iret_protected,
5719 tcg_const_i32(s->dflag),
5720 tcg_const_i32(s->pc - s->cs_base));
5721 s->cc_op = CC_OP_EFLAGS;
5725 case 0xe8: /* call im */
5728 tval = (int32_t)insn_get(s, OT_LONG);
5730 tval = (int16_t)insn_get(s, OT_WORD);
5731 next_eip = s->pc - s->cs_base;
5735 gen_movtl_T0_im(next_eip);
5740 case 0x9a: /* lcall im */
5742 unsigned int selector, offset;
5746 ot = dflag ? OT_LONG : OT_WORD;
5747 offset = insn_get(s, ot);
5748 selector = insn_get(s, OT_WORD);
5750 gen_op_movl_T0_im(selector);
5751 gen_op_movl_T1_imu(offset);
5754 case 0xe9: /* jmp im */
5756 tval = (int32_t)insn_get(s, OT_LONG);
5758 tval = (int16_t)insn_get(s, OT_WORD);
5759 tval += s->pc - s->cs_base;
5764 case 0xea: /* ljmp im */
5766 unsigned int selector, offset;
5770 ot = dflag ? OT_LONG : OT_WORD;
5771 offset = insn_get(s, ot);
5772 selector = insn_get(s, OT_WORD);
5774 gen_op_movl_T0_im(selector);
5775 gen_op_movl_T1_imu(offset);
5778 case 0xeb: /* jmp Jb */
5779 tval = (int8_t)insn_get(s, OT_BYTE);
5780 tval += s->pc - s->cs_base;
5785 case 0x70 ... 0x7f: /* jcc Jb */
5786 tval = (int8_t)insn_get(s, OT_BYTE);
5788 case 0x180 ... 0x18f: /* jcc Jv */
5790 tval = (int32_t)insn_get(s, OT_LONG);
5792 tval = (int16_t)insn_get(s, OT_WORD);
5795 next_eip = s->pc - s->cs_base;
5799 gen_jcc(s, b, tval, next_eip);
5802 case 0x190 ... 0x19f: /* setcc Gv */
5803 modrm = ldub_code(s->pc++);
5805 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5807 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5810 ot = dflag + OT_WORD;
5811 modrm = ldub_code(s->pc++);
5812 reg = ((modrm >> 3) & 7) | rex_r;
5813 mod = (modrm >> 6) & 3;
5815 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5816 gen_op_ld_T1_A0(ot + s->mem_index);
5818 rm = (modrm & 7) | REX_B(s);
5819 gen_op_mov_TN_reg(ot, 1, rm);
5821 if (s->cc_op != CC_OP_DYNAMIC)
5822 gen_op_set_cc_op(s->cc_op);
5823 #ifdef TARGET_X86_64
5824 if (ot == OT_LONG) {
5825 /* XXX: specific Intel behaviour ? */
5826 l1 = gen_new_label();
5827 gen_jcc1(s, s->cc_op, b ^ 1, l1);
5828 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
5830 tcg_gen_movi_tl(cpu_tmp0, 0);
5831 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
5835 l1 = gen_new_label();
5836 gen_jcc1(s, s->cc_op, b ^ 1, l1);
5837 gen_op_mov_reg_T1(ot, reg);
5843 /************************/
5845 case 0x9c: /* pushf */
5846 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF))
5848 if (s->vm86 && s->iopl != 3) {
5849 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5851 if (s->cc_op != CC_OP_DYNAMIC)
5852 gen_op_set_cc_op(s->cc_op);
5853 tcg_gen_helper_1_0(helper_read_eflags, cpu_T[0]);
5857 case 0x9d: /* popf */
5858 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF))
5860 if (s->vm86 && s->iopl != 3) {
5861 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5866 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5867 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK)));
5869 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5870 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK) & 0xffff));
5873 if (s->cpl <= s->iopl) {
5875 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5876 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK)));
5878 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5879 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK) & 0xffff));
5883 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5884 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK)));
5886 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5887 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK) & 0xffff));
5892 s->cc_op = CC_OP_EFLAGS;
5893 /* abort translation because TF flag may change */
5894 gen_jmp_im(s->pc - s->cs_base);
5898 case 0x9e: /* sahf */
5899 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
5901 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
5902 if (s->cc_op != CC_OP_DYNAMIC)
5903 gen_op_set_cc_op(s->cc_op);
5904 gen_compute_eflags(cpu_cc_src);
5905 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, CC_O);
5906 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_S | CC_Z | CC_A | CC_P | CC_C);
5907 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
5908 s->cc_op = CC_OP_EFLAGS;
5910 case 0x9f: /* lahf */
5911 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
5913 if (s->cc_op != CC_OP_DYNAMIC)
5914 gen_op_set_cc_op(s->cc_op);
5915 gen_compute_eflags(cpu_T[0]);
5916 /* Note: gen_compute_eflags() only gives the condition codes */
5917 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], 0x02);
5918 gen_op_mov_reg_T0(OT_BYTE, R_AH);
5920 case 0xf5: /* cmc */
5921 if (s->cc_op != CC_OP_DYNAMIC)
5922 gen_op_set_cc_op(s->cc_op);
5923 gen_compute_eflags(cpu_cc_src);
5924 tcg_gen_xori_tl(cpu_cc_src, cpu_cc_src, CC_C);
5925 s->cc_op = CC_OP_EFLAGS;
5927 case 0xf8: /* clc */
5928 if (s->cc_op != CC_OP_DYNAMIC)
5929 gen_op_set_cc_op(s->cc_op);
5930 gen_compute_eflags(cpu_cc_src);
5931 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_C);
5932 s->cc_op = CC_OP_EFLAGS;
5934 case 0xf9: /* stc */
5935 if (s->cc_op != CC_OP_DYNAMIC)
5936 gen_op_set_cc_op(s->cc_op);
5937 gen_compute_eflags(cpu_cc_src);
5938 tcg_gen_ori_tl(cpu_cc_src, cpu_cc_src, CC_C);
5939 s->cc_op = CC_OP_EFLAGS;
5941 case 0xfc: /* cld */
5942 tcg_gen_movi_i32(cpu_tmp2_i32, 1);
5943 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
5945 case 0xfd: /* std */
5946 tcg_gen_movi_i32(cpu_tmp2_i32, -1);
5947 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
5950 /************************/
5951 /* bit operations */
5952 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5953 ot = dflag + OT_WORD;
5954 modrm = ldub_code(s->pc++);
5955 op = (modrm >> 3) & 7;
5956 mod = (modrm >> 6) & 3;
5957 rm = (modrm & 7) | REX_B(s);
5960 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5961 gen_op_ld_T0_A0(ot + s->mem_index);
5963 gen_op_mov_TN_reg(ot, 0, rm);
5966 val = ldub_code(s->pc++);
5967 gen_op_movl_T1_im(val);
5972 case 0x1a3: /* bt Gv, Ev */
5975 case 0x1ab: /* bts */
5978 case 0x1b3: /* btr */
5981 case 0x1bb: /* btc */
5984 ot = dflag + OT_WORD;
5985 modrm = ldub_code(s->pc++);
5986 reg = ((modrm >> 3) & 7) | rex_r;
5987 mod = (modrm >> 6) & 3;
5988 rm = (modrm & 7) | REX_B(s);
5989 gen_op_mov_TN_reg(OT_LONG, 1, reg);
5991 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5992 /* specific case: we need to add a displacement */
5993 gen_exts(ot, cpu_T[1]);
5994 tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
5995 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
5996 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
5997 gen_op_ld_T0_A0(ot + s->mem_index);
5999 gen_op_mov_TN_reg(ot, 0, rm);
6002 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
6005 tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
6006 tcg_gen_movi_tl(cpu_cc_dst, 0);
6009 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
6010 tcg_gen_movi_tl(cpu_tmp0, 1);
6011 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
6012 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
6015 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
6016 tcg_gen_movi_tl(cpu_tmp0, 1);
6017 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
6018 tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
6019 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
6023 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
6024 tcg_gen_movi_tl(cpu_tmp0, 1);
6025 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
6026 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
6029 s->cc_op = CC_OP_SARB + ot;
6032 gen_op_st_T0_A0(ot + s->mem_index);
6034 gen_op_mov_reg_T0(ot, rm);
6035 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
6036 tcg_gen_movi_tl(cpu_cc_dst, 0);
6039 case 0x1bc: /* bsf */
6040 case 0x1bd: /* bsr */
6043 ot = dflag + OT_WORD;
6044 modrm = ldub_code(s->pc++);
6045 reg = ((modrm >> 3) & 7) | rex_r;
6046 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
6047 gen_extu(ot, cpu_T[0]);
6048 label1 = gen_new_label();
6049 tcg_gen_movi_tl(cpu_cc_dst, 0);
6050 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[0], tcg_const_tl(0), label1);
6052 tcg_gen_helper_1_1(helper_bsr, cpu_T[0], cpu_T[0]);
6054 tcg_gen_helper_1_1(helper_bsf, cpu_T[0], cpu_T[0]);
6056 gen_op_mov_reg_T0(ot, reg);
6057 tcg_gen_movi_tl(cpu_cc_dst, 1);
6058 gen_set_label(label1);
6059 tcg_gen_discard_tl(cpu_cc_src);
6060 s->cc_op = CC_OP_LOGICB + ot;
6063 /************************/
6065 case 0x27: /* daa */
6068 if (s->cc_op != CC_OP_DYNAMIC)
6069 gen_op_set_cc_op(s->cc_op);
6070 tcg_gen_helper_0_0(helper_daa);
6071 s->cc_op = CC_OP_EFLAGS;
6073 case 0x2f: /* das */
6076 if (s->cc_op != CC_OP_DYNAMIC)
6077 gen_op_set_cc_op(s->cc_op);
6078 tcg_gen_helper_0_0(helper_das);
6079 s->cc_op = CC_OP_EFLAGS;
6081 case 0x37: /* aaa */
6084 if (s->cc_op != CC_OP_DYNAMIC)
6085 gen_op_set_cc_op(s->cc_op);
6086 tcg_gen_helper_0_0(helper_aaa);
6087 s->cc_op = CC_OP_EFLAGS;
6089 case 0x3f: /* aas */
6092 if (s->cc_op != CC_OP_DYNAMIC)
6093 gen_op_set_cc_op(s->cc_op);
6094 tcg_gen_helper_0_0(helper_aas);
6095 s->cc_op = CC_OP_EFLAGS;
6097 case 0xd4: /* aam */
6100 val = ldub_code(s->pc++);
6102 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
6104 tcg_gen_helper_0_1(helper_aam, tcg_const_i32(val));
6105 s->cc_op = CC_OP_LOGICB;
6108 case 0xd5: /* aad */
6111 val = ldub_code(s->pc++);
6112 tcg_gen_helper_0_1(helper_aad, tcg_const_i32(val));
6113 s->cc_op = CC_OP_LOGICB;
6115 /************************/
6117 case 0x90: /* nop */
6118 /* XXX: xchg + rex handling */
6119 /* XXX: correct lock test for all insn */
6120 if (prefixes & PREFIX_LOCK)
6122 if (prefixes & PREFIX_REPZ) {
6123 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
6126 case 0x9b: /* fwait */
6127 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
6128 (HF_MP_MASK | HF_TS_MASK)) {
6129 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6131 if (s->cc_op != CC_OP_DYNAMIC)
6132 gen_op_set_cc_op(s->cc_op);
6133 gen_jmp_im(pc_start - s->cs_base);
6134 tcg_gen_helper_0_0(helper_fwait);
6137 case 0xcc: /* int3 */
6138 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
6140 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
6142 case 0xcd: /* int N */
6143 val = ldub_code(s->pc++);
6144 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
6146 if (s->vm86 && s->iopl != 3) {
6147 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6149 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
6152 case 0xce: /* into */
6155 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
6157 if (s->cc_op != CC_OP_DYNAMIC)
6158 gen_op_set_cc_op(s->cc_op);
6159 gen_jmp_im(pc_start - s->cs_base);
6160 tcg_gen_helper_0_1(helper_into, tcg_const_i32(s->pc - pc_start));
6162 case 0xf1: /* icebp (undocumented, exits to external debugger) */
6163 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP))
6166 gen_debug(s, pc_start - s->cs_base);
6169 tb_flush(cpu_single_env);
6170 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
6173 case 0xfa: /* cli */
6175 if (s->cpl <= s->iopl) {
6176 tcg_gen_helper_0_0(helper_cli);
6178 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6182 tcg_gen_helper_0_0(helper_cli);
6184 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6188 case 0xfb: /* sti */
6190 if (s->cpl <= s->iopl) {
6192 tcg_gen_helper_0_0(helper_sti);
6193 /* interruptions are enabled only the first insn after sti */
6194 /* If several instructions disable interrupts, only the
6196 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
6197 tcg_gen_helper_0_0(helper_set_inhibit_irq);
6198 /* give a chance to handle pending irqs */
6199 gen_jmp_im(s->pc - s->cs_base);
6202 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6208 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6212 case 0x62: /* bound */
6215 ot = dflag ? OT_LONG : OT_WORD;
6216 modrm = ldub_code(s->pc++);
6217 reg = (modrm >> 3) & 7;
6218 mod = (modrm >> 6) & 3;
6221 gen_op_mov_TN_reg(ot, 0, reg);
6222 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6223 gen_jmp_im(pc_start - s->cs_base);
6224 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6226 tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
6228 tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
6230 case 0x1c8 ... 0x1cf: /* bswap reg */
6231 reg = (b & 7) | REX_B(s);
6232 #ifdef TARGET_X86_64
6234 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
6235 tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
6236 gen_op_mov_reg_T0(OT_QUAD, reg);
6240 gen_op_mov_TN_reg(OT_LONG, 0, reg);
6242 tmp0 = tcg_temp_new(TCG_TYPE_I32);
6243 tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
6244 tcg_gen_bswap_i32(tmp0, tmp0);
6245 tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
6246 gen_op_mov_reg_T0(OT_LONG, reg);
6250 gen_op_mov_TN_reg(OT_LONG, 0, reg);
6251 tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
6252 gen_op_mov_reg_T0(OT_LONG, reg);
6256 case 0xd6: /* salc */
6259 if (s->cc_op != CC_OP_DYNAMIC)
6260 gen_op_set_cc_op(s->cc_op);
6261 gen_compute_eflags_c(cpu_T[0]);
6262 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
6263 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
6265 case 0xe0: /* loopnz */
6266 case 0xe1: /* loopz */
6267 case 0xe2: /* loop */
6268 case 0xe3: /* jecxz */
6272 tval = (int8_t)insn_get(s, OT_BYTE);
6273 next_eip = s->pc - s->cs_base;
6278 l1 = gen_new_label();
6279 l2 = gen_new_label();
6280 l3 = gen_new_label();
6283 case 0: /* loopnz */
6285 if (s->cc_op != CC_OP_DYNAMIC)
6286 gen_op_set_cc_op(s->cc_op);
6287 gen_op_add_reg_im(s->aflag, R_ECX, -1);
6288 gen_op_jz_ecx(s->aflag, l3);
6289 gen_compute_eflags(cpu_tmp0);
6290 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_Z);
6292 tcg_gen_brcond_tl(TCG_COND_EQ,
6293 cpu_tmp0, tcg_const_tl(0), l1);
6295 tcg_gen_brcond_tl(TCG_COND_NE,
6296 cpu_tmp0, tcg_const_tl(0), l1);
6300 gen_op_add_reg_im(s->aflag, R_ECX, -1);
6301 gen_op_jnz_ecx(s->aflag, l1);
6305 gen_op_jz_ecx(s->aflag, l1);
6310 gen_jmp_im(next_eip);
6319 case 0x130: /* wrmsr */
6320 case 0x132: /* rdmsr */
6322 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6326 retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 0);
6327 tcg_gen_helper_0_0(helper_rdmsr);
6329 retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 1);
6330 tcg_gen_helper_0_0(helper_wrmsr);
6336 case 0x131: /* rdtsc */
6337 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RDTSC))
6339 gen_jmp_im(pc_start - s->cs_base);
6340 tcg_gen_helper_0_0(helper_rdtsc);
6342 case 0x133: /* rdpmc */
6343 gen_jmp_im(pc_start - s->cs_base);
6344 tcg_gen_helper_0_0(helper_rdpmc);
6346 case 0x134: /* sysenter */
6350 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6352 if (s->cc_op != CC_OP_DYNAMIC) {
6353 gen_op_set_cc_op(s->cc_op);
6354 s->cc_op = CC_OP_DYNAMIC;
6356 gen_jmp_im(pc_start - s->cs_base);
6357 tcg_gen_helper_0_0(helper_sysenter);
6361 case 0x135: /* sysexit */
6365 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6367 if (s->cc_op != CC_OP_DYNAMIC) {
6368 gen_op_set_cc_op(s->cc_op);
6369 s->cc_op = CC_OP_DYNAMIC;
6371 gen_jmp_im(pc_start - s->cs_base);
6372 tcg_gen_helper_0_0(helper_sysexit);
6376 #ifdef TARGET_X86_64
6377 case 0x105: /* syscall */
6378 /* XXX: is it usable in real mode ? */
6379 if (s->cc_op != CC_OP_DYNAMIC) {
6380 gen_op_set_cc_op(s->cc_op);
6381 s->cc_op = CC_OP_DYNAMIC;
6383 gen_jmp_im(pc_start - s->cs_base);
6384 tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
6387 case 0x107: /* sysret */
6389 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6391 if (s->cc_op != CC_OP_DYNAMIC) {
6392 gen_op_set_cc_op(s->cc_op);
6393 s->cc_op = CC_OP_DYNAMIC;
6395 gen_jmp_im(pc_start - s->cs_base);
6396 tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
6397 /* condition codes are modified only in long mode */
6399 s->cc_op = CC_OP_EFLAGS;
6404 case 0x1a2: /* cpuid */
6405 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CPUID))
6407 tcg_gen_helper_0_0(helper_cpuid);
6409 case 0xf4: /* hlt */
6411 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6413 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_HLT))
6415 if (s->cc_op != CC_OP_DYNAMIC)
6416 gen_op_set_cc_op(s->cc_op);
6417 gen_jmp_im(s->pc - s->cs_base);
6418 tcg_gen_helper_0_0(helper_hlt);
6423 modrm = ldub_code(s->pc++);
6424 mod = (modrm >> 6) & 3;
6425 op = (modrm >> 3) & 7;
6428 if (!s->pe || s->vm86)
6430 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ))
6432 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,ldt.selector));
6436 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6439 if (!s->pe || s->vm86)
6442 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6444 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE))
6446 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6447 gen_jmp_im(pc_start - s->cs_base);
6448 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6449 tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
6453 if (!s->pe || s->vm86)
6455 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ))
6457 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,tr.selector));
6461 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6464 if (!s->pe || s->vm86)
6467 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6469 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE))
6471 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6472 gen_jmp_im(pc_start - s->cs_base);
6473 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6474 tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
6479 if (!s->pe || s->vm86)
6481 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6482 if (s->cc_op != CC_OP_DYNAMIC)
6483 gen_op_set_cc_op(s->cc_op);
6485 tcg_gen_helper_0_1(helper_verr, cpu_T[0]);
6487 tcg_gen_helper_0_1(helper_verw, cpu_T[0]);
6488 s->cc_op = CC_OP_EFLAGS;
6495 modrm = ldub_code(s->pc++);
6496 mod = (modrm >> 6) & 3;
6497 op = (modrm >> 3) & 7;
6503 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ))
6505 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6506 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.limit));
6507 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6508 gen_add_A0_im(s, 2);
6509 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.base));
6511 gen_op_andl_T0_im(0xffffff);
6512 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6517 case 0: /* monitor */
6518 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6521 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MONITOR))
6523 gen_jmp_im(pc_start - s->cs_base);
6524 #ifdef TARGET_X86_64
6525 if (s->aflag == 2) {
6526 gen_op_movq_A0_reg(R_EAX);
6530 gen_op_movl_A0_reg(R_EAX);
6532 gen_op_andl_A0_ffff();
6534 gen_add_A0_ds_seg(s);
6535 tcg_gen_helper_0_1(helper_monitor, cpu_A0);
6538 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6541 if (s->cc_op != CC_OP_DYNAMIC) {
6542 gen_op_set_cc_op(s->cc_op);
6543 s->cc_op = CC_OP_DYNAMIC;
6545 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MWAIT))
6547 gen_jmp_im(s->pc - s->cs_base);
6548 tcg_gen_helper_0_0(helper_mwait);
6555 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ))
6557 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6558 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.limit));
6559 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6560 gen_add_A0_im(s, 2);
6561 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.base));
6563 gen_op_andl_T0_im(0xffffff);
6564 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6572 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMRUN))
6574 if (s->cc_op != CC_OP_DYNAMIC)
6575 gen_op_set_cc_op(s->cc_op);
6576 gen_jmp_im(s->pc - s->cs_base);
6577 tcg_gen_helper_0_0(helper_vmrun);
6578 s->cc_op = CC_OP_EFLAGS;
6581 case 1: /* VMMCALL */
6582 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMMCALL))
6584 /* FIXME: cause #UD if hflags & SVM */
6585 tcg_gen_helper_0_0(helper_vmmcall);
6587 case 2: /* VMLOAD */
6588 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMLOAD))
6590 tcg_gen_helper_0_0(helper_vmload);
6592 case 3: /* VMSAVE */
6593 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMSAVE))
6595 tcg_gen_helper_0_0(helper_vmsave);
6598 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_STGI))
6600 tcg_gen_helper_0_0(helper_stgi);
6603 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CLGI))
6605 tcg_gen_helper_0_0(helper_clgi);
6607 case 6: /* SKINIT */
6608 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SKINIT))
6610 tcg_gen_helper_0_0(helper_skinit);
6612 case 7: /* INVLPGA */
6613 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPGA))
6615 tcg_gen_helper_0_0(helper_invlpga);
6620 } else if (s->cpl != 0) {
6621 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6623 if (gen_svm_check_intercept(s, pc_start,
6624 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE))
6626 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6627 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
6628 gen_add_A0_im(s, 2);
6629 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6631 gen_op_andl_T0_im(0xffffff);
6633 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,gdt.base));
6634 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,gdt.limit));
6636 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,idt.base));
6637 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,idt.limit));
6642 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0))
6644 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]));
6645 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
6649 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6651 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0))
6653 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6654 tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
6655 gen_jmp_im(s->pc - s->cs_base);
6659 case 7: /* invlpg */
6661 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6664 #ifdef TARGET_X86_64
6665 if (CODE64(s) && rm == 0) {
6667 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
6668 tcg_gen_ld_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,kernelgsbase));
6669 tcg_gen_st_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
6670 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,kernelgsbase));
6677 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPG))
6679 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6680 tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
6681 gen_jmp_im(s->pc - s->cs_base);
6690 case 0x108: /* invd */
6691 case 0x109: /* wbinvd */
6693 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6695 if (gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD))
6700 case 0x63: /* arpl or movslS (x86_64) */
6701 #ifdef TARGET_X86_64
6704 /* d_ot is the size of destination */
6705 d_ot = dflag + OT_WORD;
6707 modrm = ldub_code(s->pc++);
6708 reg = ((modrm >> 3) & 7) | rex_r;
6709 mod = (modrm >> 6) & 3;
6710 rm = (modrm & 7) | REX_B(s);
6713 gen_op_mov_TN_reg(OT_LONG, 0, rm);
6715 if (d_ot == OT_QUAD)
6716 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
6717 gen_op_mov_reg_T0(d_ot, reg);
6719 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6720 if (d_ot == OT_QUAD) {
6721 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
6723 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6725 gen_op_mov_reg_T0(d_ot, reg);
6731 if (!s->pe || s->vm86)
6734 modrm = ldub_code(s->pc++);
6735 reg = (modrm >> 3) & 7;
6736 mod = (modrm >> 6) & 3;
6739 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6740 gen_op_ld_T0_A0(ot + s->mem_index);
6742 gen_op_mov_TN_reg(ot, 0, rm);
6744 gen_op_mov_TN_reg(ot, 1, reg);
6745 tcg_gen_andi_tl(cpu_tmp0, cpu_T[0], 3);
6746 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], 3);
6747 tcg_gen_movi_tl(cpu_T3, 0);
6748 label1 = gen_new_label();
6749 tcg_gen_brcond_tl(TCG_COND_GE, cpu_tmp0, cpu_T[1], label1);
6750 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], ~3);
6751 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
6752 tcg_gen_movi_tl(cpu_T3, CC_Z);
6753 gen_set_label(label1);
6755 gen_op_st_T0_A0(ot + s->mem_index);
6757 gen_op_mov_reg_T0(ot, rm);
6759 if (s->cc_op != CC_OP_DYNAMIC)
6760 gen_op_set_cc_op(s->cc_op);
6761 gen_compute_eflags(cpu_cc_src);
6762 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_Z);
6763 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T3);
6764 s->cc_op = CC_OP_EFLAGS;
6767 case 0x102: /* lar */
6768 case 0x103: /* lsl */
6771 if (!s->pe || s->vm86)
6773 ot = dflag ? OT_LONG : OT_WORD;
6774 modrm = ldub_code(s->pc++);
6775 reg = ((modrm >> 3) & 7) | rex_r;
6776 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6777 if (s->cc_op != CC_OP_DYNAMIC)
6778 gen_op_set_cc_op(s->cc_op);
6780 tcg_gen_helper_1_1(helper_lar, cpu_T[0], cpu_T[0]);
6782 tcg_gen_helper_1_1(helper_lsl, cpu_T[0], cpu_T[0]);
6783 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
6784 label1 = gen_new_label();
6785 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_tmp0, tcg_const_tl(0), label1);
6786 gen_op_mov_reg_T0(ot, reg);
6787 gen_set_label(label1);
6788 s->cc_op = CC_OP_EFLAGS;
6792 modrm = ldub_code(s->pc++);
6793 mod = (modrm >> 6) & 3;
6794 op = (modrm >> 3) & 7;
6796 case 0: /* prefetchnta */
6797 case 1: /* prefetchnt0 */
6798 case 2: /* prefetchnt0 */
6799 case 3: /* prefetchnt0 */
6802 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6803 /* nothing more to do */
6805 default: /* nop (multi byte) */
6806 gen_nop_modrm(s, modrm);
6810 case 0x119 ... 0x11f: /* nop (multi byte) */
6811 modrm = ldub_code(s->pc++);
6812 gen_nop_modrm(s, modrm);
6814 case 0x120: /* mov reg, crN */
6815 case 0x122: /* mov crN, reg */
6817 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6819 modrm = ldub_code(s->pc++);
6820 if ((modrm & 0xc0) != 0xc0)
6822 rm = (modrm & 7) | REX_B(s);
6823 reg = ((modrm >> 3) & 7) | rex_r;
6835 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0 + reg);
6836 gen_op_mov_TN_reg(ot, 0, rm);
6837 tcg_gen_helper_0_2(helper_movl_crN_T0,
6838 tcg_const_i32(reg), cpu_T[0]);
6839 gen_jmp_im(s->pc - s->cs_base);
6842 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0 + reg);
6843 #if !defined(CONFIG_USER_ONLY)
6845 tcg_gen_helper_1_0(helper_movtl_T0_cr8, cpu_T[0]);
6848 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[reg]));
6849 gen_op_mov_reg_T0(ot, rm);
6857 case 0x121: /* mov reg, drN */
6858 case 0x123: /* mov drN, reg */
6860 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6862 modrm = ldub_code(s->pc++);
6863 if ((modrm & 0xc0) != 0xc0)
6865 rm = (modrm & 7) | REX_B(s);
6866 reg = ((modrm >> 3) & 7) | rex_r;
6871 /* XXX: do it dynamically with CR4.DE bit */
6872 if (reg == 4 || reg == 5 || reg >= 8)
6875 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
6876 gen_op_mov_TN_reg(ot, 0, rm);
6877 tcg_gen_helper_0_2(helper_movl_drN_T0,
6878 tcg_const_i32(reg), cpu_T[0]);
6879 gen_jmp_im(s->pc - s->cs_base);
6882 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
6883 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,dr[reg]));
6884 gen_op_mov_reg_T0(ot, rm);
6888 case 0x106: /* clts */
6890 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6892 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
6893 tcg_gen_helper_0_0(helper_clts);
6894 /* abort block because static cpu state changed */
6895 gen_jmp_im(s->pc - s->cs_base);
6899 /* MMX/3DNow!/SSE/SSE2/SSE3 support */
6900 case 0x1c3: /* MOVNTI reg, mem */
6901 if (!(s->cpuid_features & CPUID_SSE2))
6903 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6904 modrm = ldub_code(s->pc++);
6905 mod = (modrm >> 6) & 3;
6908 reg = ((modrm >> 3) & 7) | rex_r;
6909 /* generate a generic store */
6910 gen_ldst_modrm(s, modrm, ot, reg, 1);
6913 modrm = ldub_code(s->pc++);
6914 mod = (modrm >> 6) & 3;
6915 op = (modrm >> 3) & 7;
6917 case 0: /* fxsave */
6918 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6919 (s->flags & HF_EM_MASK))
6921 if (s->flags & HF_TS_MASK) {
6922 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6925 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6926 if (s->cc_op != CC_OP_DYNAMIC)
6927 gen_op_set_cc_op(s->cc_op);
6928 gen_jmp_im(pc_start - s->cs_base);
6929 tcg_gen_helper_0_2(helper_fxsave,
6930 cpu_A0, tcg_const_i32((s->dflag == 2)));
6932 case 1: /* fxrstor */
6933 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6934 (s->flags & HF_EM_MASK))
6936 if (s->flags & HF_TS_MASK) {
6937 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6940 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6941 if (s->cc_op != CC_OP_DYNAMIC)
6942 gen_op_set_cc_op(s->cc_op);
6943 gen_jmp_im(pc_start - s->cs_base);
6944 tcg_gen_helper_0_2(helper_fxrstor,
6945 cpu_A0, tcg_const_i32((s->dflag == 2)));
6947 case 2: /* ldmxcsr */
6948 case 3: /* stmxcsr */
6949 if (s->flags & HF_TS_MASK) {
6950 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6953 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6956 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6958 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6959 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
6961 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
6962 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6965 case 5: /* lfence */
6966 case 6: /* mfence */
6967 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6970 case 7: /* sfence / clflush */
6971 if ((modrm & 0xc7) == 0xc0) {
6973 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
6974 if (!(s->cpuid_features & CPUID_SSE))
6978 if (!(s->cpuid_features & CPUID_CLFLUSH))
6980 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6987 case 0x10d: /* 3DNow! prefetch(w) */
6988 modrm = ldub_code(s->pc++);
6989 mod = (modrm >> 6) & 3;
6992 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6993 /* ignore for now */
6995 case 0x1aa: /* rsm */
6996 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM))
6998 if (!(s->flags & HF_SMM_MASK))
7000 if (s->cc_op != CC_OP_DYNAMIC) {
7001 gen_op_set_cc_op(s->cc_op);
7002 s->cc_op = CC_OP_DYNAMIC;
7004 gen_jmp_im(s->pc - s->cs_base);
7005 tcg_gen_helper_0_0(helper_rsm);
7008 case 0x10e ... 0x10f:
7009 /* 3DNow! instructions, ignore prefixes */
7010 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
7011 case 0x110 ... 0x117:
7012 case 0x128 ... 0x12f:
7013 case 0x150 ... 0x177:
7014 case 0x17c ... 0x17f:
7016 case 0x1c4 ... 0x1c6:
7017 case 0x1d0 ... 0x1fe:
7018 gen_sse(s, b, pc_start, rex_r);
7023 /* lock generation */
7024 if (s->prefix & PREFIX_LOCK)
7025 tcg_gen_helper_0_0(helper_unlock);
7028 if (s->prefix & PREFIX_LOCK)
7029 tcg_gen_helper_0_0(helper_unlock);
7030 /* XXX: ensure that no lock was generated */
7031 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
7035 static void tcg_macro_func(TCGContext *s, int macro_id, const int *dead_args)
7040 tcg_gen_helper_0_1(helper_divl_EAX_T0, cpu_T[0]);
7046 void optimize_flags_init(void)
7048 #if TCG_TARGET_REG_BITS == 32
7049 assert(sizeof(CCTable) == (1 << 3));
7051 assert(sizeof(CCTable) == (1 << 4));
7053 tcg_set_macro_func(&tcg_ctx, tcg_macro_func);
7055 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
7056 #if TARGET_LONG_BITS > HOST_LONG_BITS
7057 cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL,
7058 TCG_AREG0, offsetof(CPUState, t0), "T0");
7059 cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
7060 TCG_AREG0, offsetof(CPUState, t1), "T1");
7061 cpu_A0 = tcg_global_mem_new(TCG_TYPE_TL,
7062 TCG_AREG0, offsetof(CPUState, t2), "A0");
7064 cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
7065 cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
7066 cpu_A0 = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "A0");
7068 cpu_T3 = tcg_global_mem_new(TCG_TYPE_TL,
7069 TCG_AREG0, offsetof(CPUState, t3), "T3");
7070 #if defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS)
7071 /* XXX: must be suppressed once there are less fixed registers */
7072 cpu_tmp1_i64 = tcg_global_reg2_new_hack(TCG_TYPE_I64, TCG_AREG1, TCG_AREG2, "tmp1");
7074 cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
7075 TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
7076 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
7077 TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
7078 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
7079 TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
7081 /* register helpers */
7083 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
7087 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
7088 basic block 'tb'. If search_pc is TRUE, also generate PC
7089 information for each intermediate instruction. */
7090 static inline int gen_intermediate_code_internal(CPUState *env,
7091 TranslationBlock *tb,
7094 DisasContext dc1, *dc = &dc1;
7095 target_ulong pc_ptr;
7096 uint16_t *gen_opc_end;
7099 target_ulong pc_start;
7100 target_ulong cs_base;
7102 /* generate intermediate code */
7104 cs_base = tb->cs_base;
7106 cflags = tb->cflags;
7108 dc->pe = (flags >> HF_PE_SHIFT) & 1;
7109 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
7110 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
7111 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
7113 dc->vm86 = (flags >> VM_SHIFT) & 1;
7114 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
7115 dc->iopl = (flags >> IOPL_SHIFT) & 3;
7116 dc->tf = (flags >> TF_SHIFT) & 1;
7117 dc->singlestep_enabled = env->singlestep_enabled;
7118 dc->cc_op = CC_OP_DYNAMIC;
7119 dc->cs_base = cs_base;
7121 dc->popl_esp_hack = 0;
7122 /* select memory access functions */
7124 if (flags & HF_SOFTMMU_MASK) {
7126 dc->mem_index = 2 * 4;
7128 dc->mem_index = 1 * 4;
7130 dc->cpuid_features = env->cpuid_features;
7131 dc->cpuid_ext_features = env->cpuid_ext_features;
7132 dc->cpuid_ext2_features = env->cpuid_ext2_features;
7133 dc->cpuid_ext3_features = env->cpuid_ext3_features;
7134 #ifdef TARGET_X86_64
7135 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
7136 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
7139 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
7140 (flags & HF_INHIBIT_IRQ_MASK)
7141 #ifndef CONFIG_SOFTMMU
7142 || (flags & HF_SOFTMMU_MASK)
7146 /* check addseg logic */
7147 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
7148 printf("ERROR addseg\n");
7151 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
7152 #if !(defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS))
7153 cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
7155 cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
7156 cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
7157 cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
7158 cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
7159 cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
7160 cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
7161 cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
7163 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
7165 dc->is_jmp = DISAS_NEXT;
7170 if (env->nb_breakpoints > 0) {
7171 for(j = 0; j < env->nb_breakpoints; j++) {
7172 if (env->breakpoints[j] == pc_ptr) {
7173 gen_debug(dc, pc_ptr - dc->cs_base);
7179 j = gen_opc_ptr - gen_opc_buf;
7183 gen_opc_instr_start[lj++] = 0;
7185 gen_opc_pc[lj] = pc_ptr;
7186 gen_opc_cc_op[lj] = dc->cc_op;
7187 gen_opc_instr_start[lj] = 1;
7189 pc_ptr = disas_insn(dc, pc_ptr);
7190 /* stop translation if indicated */
7193 /* if single step mode, we generate only one instruction and
7194 generate an exception */
7195 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
7196 the flag and abort the translation to give the irqs a
7197 change to be happen */
7198 if (dc->tf || dc->singlestep_enabled ||
7199 (flags & HF_INHIBIT_IRQ_MASK) ||
7200 (cflags & CF_SINGLE_INSN)) {
7201 gen_jmp_im(pc_ptr - dc->cs_base);
7205 /* if too long translation, stop generation too */
7206 if (gen_opc_ptr >= gen_opc_end ||
7207 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
7208 gen_jmp_im(pc_ptr - dc->cs_base);
7213 *gen_opc_ptr = INDEX_op_end;
7214 /* we don't forget to fill the last values */
7216 j = gen_opc_ptr - gen_opc_buf;
7219 gen_opc_instr_start[lj++] = 0;
7223 if (loglevel & CPU_LOG_TB_CPU) {
7224 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
7226 if (loglevel & CPU_LOG_TB_IN_ASM) {
7228 fprintf(logfile, "----------------\n");
7229 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
7230 #ifdef TARGET_X86_64
7235 disas_flags = !dc->code32;
7236 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
7237 fprintf(logfile, "\n");
7242 tb->size = pc_ptr - pc_start;
7246 int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
7248 return gen_intermediate_code_internal(env, tb, 0);
7251 int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
7253 return gen_intermediate_code_internal(env, tb, 1);
7256 void gen_pc_load(CPUState *env, TranslationBlock *tb,
7257 unsigned long searched_pc, int pc_pos, void *puc)
7261 if (loglevel & CPU_LOG_TB_OP) {
7263 fprintf(logfile, "RESTORE:\n");
7264 for(i = 0;i <= pc_pos; i++) {
7265 if (gen_opc_instr_start[i]) {
7266 fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
7269 fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
7270 searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
7271 (uint32_t)tb->cs_base);
7274 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
7275 cc_op = gen_opc_cc_op[pc_pos];
7276 if (cc_op != CC_OP_DYNAMIC)