4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
35 #define PREFIX_REPZ 0x01
36 #define PREFIX_REPNZ 0x02
37 #define PREFIX_LOCK 0x04
38 #define PREFIX_DATA 0x08
39 #define PREFIX_ADR 0x10
42 #define X86_64_ONLY(x) x
43 #define X86_64_DEF(...) __VA_ARGS__
44 #define CODE64(s) ((s)->code64)
45 #define REX_X(s) ((s)->rex_x)
46 #define REX_B(s) ((s)->rex_b)
47 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
49 #define BUGGY_64(x) NULL
52 #define X86_64_ONLY(x) NULL
53 #define X86_64_DEF(...)
59 //#define MACRO_TEST 1
61 /* global register indexes */
62 static TCGv_ptr cpu_env;
63 static TCGv cpu_A0, cpu_cc_src, cpu_cc_dst, cpu_cc_tmp;
64 static TCGv_i32 cpu_cc_op;
65 static TCGv cpu_regs[CPU_NB_REGS];
67 static TCGv cpu_T[2], cpu_T3;
68 /* local register indexes (only used inside old micro ops) */
69 static TCGv cpu_tmp0, cpu_tmp4;
70 static TCGv_ptr cpu_ptr0, cpu_ptr1;
71 static TCGv_i32 cpu_tmp2_i32, cpu_tmp3_i32;
72 static TCGv_i64 cpu_tmp1_i64;
75 static uint8_t gen_opc_cc_op[OPC_BUF_SIZE];
77 #include "gen-icount.h"
80 static int x86_64_hregs;
83 typedef struct DisasContext {
84 /* current insn context */
85 int override; /* -1 if no override */
88 target_ulong pc; /* pc = eip + cs_base */
89 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
90 static state change (stop translation) */
91 /* current block context */
92 target_ulong cs_base; /* base of CS segment */
93 int pe; /* protected mode */
94 int code32; /* 32 bit code segment */
96 int lma; /* long mode active */
97 int code64; /* 64 bit code segment */
100 int ss32; /* 32 bit stack segment */
101 int cc_op; /* current CC operation */
102 int addseg; /* non zero if either DS/ES/SS have a non zero base */
103 int f_st; /* currently unused */
104 int vm86; /* vm86 mode */
107 int tf; /* TF cpu flag */
108 int singlestep_enabled; /* "hardware" single step enabled */
109 int jmp_opt; /* use direct block chaining for direct jumps */
110 int mem_index; /* select memory access functions */
111 uint64_t flags; /* all execution flags */
112 struct TranslationBlock *tb;
113 int popl_esp_hack; /* for correct popl with esp base handling */
114 int rip_offset; /* only used in x86_64, but left for simplicity */
116 int cpuid_ext_features;
117 int cpuid_ext2_features;
118 int cpuid_ext3_features;
121 static void gen_eob(DisasContext *s);
122 static void gen_jmp(DisasContext *s, target_ulong eip);
123 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
125 /* i386 arith/logic operations */
145 OP_SHL1, /* undocumented */
169 /* I386 int registers */
170 OR_EAX, /* MUST be even numbered */
179 OR_TMP0 = 16, /* temporary operand register */
181 OR_A0, /* temporary register used when doing address evaluation */
184 static inline void gen_op_movl_T0_0(void)
186 tcg_gen_movi_tl(cpu_T[0], 0);
189 static inline void gen_op_movl_T0_im(int32_t val)
191 tcg_gen_movi_tl(cpu_T[0], val);
194 static inline void gen_op_movl_T0_imu(uint32_t val)
196 tcg_gen_movi_tl(cpu_T[0], val);
199 static inline void gen_op_movl_T1_im(int32_t val)
201 tcg_gen_movi_tl(cpu_T[1], val);
204 static inline void gen_op_movl_T1_imu(uint32_t val)
206 tcg_gen_movi_tl(cpu_T[1], val);
209 static inline void gen_op_movl_A0_im(uint32_t val)
211 tcg_gen_movi_tl(cpu_A0, val);
215 static inline void gen_op_movq_A0_im(int64_t val)
217 tcg_gen_movi_tl(cpu_A0, val);
221 static inline void gen_movtl_T0_im(target_ulong val)
223 tcg_gen_movi_tl(cpu_T[0], val);
226 static inline void gen_movtl_T1_im(target_ulong val)
228 tcg_gen_movi_tl(cpu_T[1], val);
231 static inline void gen_op_andl_T0_ffff(void)
233 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
236 static inline void gen_op_andl_T0_im(uint32_t val)
238 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
241 static inline void gen_op_movl_T0_T1(void)
243 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
246 static inline void gen_op_andl_A0_ffff(void)
248 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
253 #define NB_OP_SIZES 4
255 #else /* !TARGET_X86_64 */
257 #define NB_OP_SIZES 3
259 #endif /* !TARGET_X86_64 */
261 #if defined(HOST_WORDS_BIGENDIAN)
262 #define REG_B_OFFSET (sizeof(target_ulong) - 1)
263 #define REG_H_OFFSET (sizeof(target_ulong) - 2)
264 #define REG_W_OFFSET (sizeof(target_ulong) - 2)
265 #define REG_L_OFFSET (sizeof(target_ulong) - 4)
266 #define REG_LH_OFFSET (sizeof(target_ulong) - 8)
268 #define REG_B_OFFSET 0
269 #define REG_H_OFFSET 1
270 #define REG_W_OFFSET 0
271 #define REG_L_OFFSET 0
272 #define REG_LH_OFFSET 4
275 static inline void gen_op_mov_reg_v(int ot, int reg, TCGv t0)
279 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
280 tcg_gen_deposit_tl(cpu_regs[reg], cpu_regs[reg], t0, 0, 8);
282 tcg_gen_deposit_tl(cpu_regs[reg - 4], cpu_regs[reg - 4], t0, 8, 8);
286 tcg_gen_deposit_tl(cpu_regs[reg], cpu_regs[reg], t0, 0, 16);
288 default: /* XXX this shouldn't be reached; abort? */
290 /* For x86_64, this sets the higher half of register to zero.
291 For i386, this is equivalent to a mov. */
292 tcg_gen_ext32u_tl(cpu_regs[reg], t0);
296 tcg_gen_mov_tl(cpu_regs[reg], t0);
302 static inline void gen_op_mov_reg_T0(int ot, int reg)
304 gen_op_mov_reg_v(ot, reg, cpu_T[0]);
307 static inline void gen_op_mov_reg_T1(int ot, int reg)
309 gen_op_mov_reg_v(ot, reg, cpu_T[1]);
312 static inline void gen_op_mov_reg_A0(int size, int reg)
316 tcg_gen_deposit_tl(cpu_regs[reg], cpu_regs[reg], cpu_A0, 0, 16);
318 default: /* XXX this shouldn't be reached; abort? */
320 /* For x86_64, this sets the higher half of register to zero.
321 For i386, this is equivalent to a mov. */
322 tcg_gen_ext32u_tl(cpu_regs[reg], cpu_A0);
326 tcg_gen_mov_tl(cpu_regs[reg], cpu_A0);
332 static inline void gen_op_mov_v_reg(int ot, TCGv t0, int reg)
336 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
339 tcg_gen_shri_tl(t0, cpu_regs[reg - 4], 8);
340 tcg_gen_ext8u_tl(t0, t0);
345 tcg_gen_mov_tl(t0, cpu_regs[reg]);
350 static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
352 gen_op_mov_v_reg(ot, cpu_T[t_index], reg);
355 static inline void gen_op_movl_A0_reg(int reg)
357 tcg_gen_mov_tl(cpu_A0, cpu_regs[reg]);
360 static inline void gen_op_addl_A0_im(int32_t val)
362 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
364 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
369 static inline void gen_op_addq_A0_im(int64_t val)
371 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
375 static void gen_add_A0_im(DisasContext *s, int val)
379 gen_op_addq_A0_im(val);
382 gen_op_addl_A0_im(val);
385 static inline void gen_op_addl_T0_T1(void)
387 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
390 static inline void gen_op_jmp_T0(void)
392 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
395 static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
399 tcg_gen_addi_tl(cpu_tmp0, cpu_regs[reg], val);
400 tcg_gen_deposit_tl(cpu_regs[reg], cpu_regs[reg], cpu_tmp0, 0, 16);
403 tcg_gen_addi_tl(cpu_tmp0, cpu_regs[reg], val);
404 /* For x86_64, this sets the higher half of register to zero.
405 For i386, this is equivalent to a nop. */
406 tcg_gen_ext32u_tl(cpu_tmp0, cpu_tmp0);
407 tcg_gen_mov_tl(cpu_regs[reg], cpu_tmp0);
411 tcg_gen_addi_tl(cpu_regs[reg], cpu_regs[reg], val);
417 static inline void gen_op_add_reg_T0(int size, int reg)
421 tcg_gen_add_tl(cpu_tmp0, cpu_regs[reg], cpu_T[0]);
422 tcg_gen_deposit_tl(cpu_regs[reg], cpu_regs[reg], cpu_tmp0, 0, 16);
425 tcg_gen_add_tl(cpu_tmp0, cpu_regs[reg], cpu_T[0]);
426 /* For x86_64, this sets the higher half of register to zero.
427 For i386, this is equivalent to a nop. */
428 tcg_gen_ext32u_tl(cpu_tmp0, cpu_tmp0);
429 tcg_gen_mov_tl(cpu_regs[reg], cpu_tmp0);
433 tcg_gen_add_tl(cpu_regs[reg], cpu_regs[reg], cpu_T[0]);
439 static inline void gen_op_set_cc_op(int32_t val)
441 tcg_gen_movi_i32(cpu_cc_op, val);
444 static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
446 tcg_gen_mov_tl(cpu_tmp0, cpu_regs[reg]);
448 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
449 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
450 /* For x86_64, this sets the higher half of register to zero.
451 For i386, this is equivalent to a nop. */
452 tcg_gen_ext32u_tl(cpu_A0, cpu_A0);
455 static inline void gen_op_movl_A0_seg(int reg)
457 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
460 static inline void gen_op_addl_A0_seg(int reg)
462 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
463 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
465 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
470 static inline void gen_op_movq_A0_seg(int reg)
472 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
475 static inline void gen_op_addq_A0_seg(int reg)
477 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
478 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
481 static inline void gen_op_movq_A0_reg(int reg)
483 tcg_gen_mov_tl(cpu_A0, cpu_regs[reg]);
486 static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
488 tcg_gen_mov_tl(cpu_tmp0, cpu_regs[reg]);
490 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
491 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
495 static inline void gen_op_lds_T0_A0(int idx)
497 int mem_index = (idx >> 2) - 1;
500 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
503 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
507 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
512 static inline void gen_op_ld_v(int idx, TCGv t0, TCGv a0)
514 int mem_index = (idx >> 2) - 1;
517 tcg_gen_qemu_ld8u(t0, a0, mem_index);
520 tcg_gen_qemu_ld16u(t0, a0, mem_index);
523 tcg_gen_qemu_ld32u(t0, a0, mem_index);
527 /* Should never happen on 32-bit targets. */
529 tcg_gen_qemu_ld64(t0, a0, mem_index);
535 /* XXX: always use ldu or lds */
536 static inline void gen_op_ld_T0_A0(int idx)
538 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
541 static inline void gen_op_ldu_T0_A0(int idx)
543 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
546 static inline void gen_op_ld_T1_A0(int idx)
548 gen_op_ld_v(idx, cpu_T[1], cpu_A0);
551 static inline void gen_op_st_v(int idx, TCGv t0, TCGv a0)
553 int mem_index = (idx >> 2) - 1;
556 tcg_gen_qemu_st8(t0, a0, mem_index);
559 tcg_gen_qemu_st16(t0, a0, mem_index);
562 tcg_gen_qemu_st32(t0, a0, mem_index);
566 /* Should never happen on 32-bit targets. */
568 tcg_gen_qemu_st64(t0, a0, mem_index);
574 static inline void gen_op_st_T0_A0(int idx)
576 gen_op_st_v(idx, cpu_T[0], cpu_A0);
579 static inline void gen_op_st_T1_A0(int idx)
581 gen_op_st_v(idx, cpu_T[1], cpu_A0);
584 static inline void gen_jmp_im(target_ulong pc)
586 tcg_gen_movi_tl(cpu_tmp0, pc);
587 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
590 static inline void gen_string_movl_A0_ESI(DisasContext *s)
594 override = s->override;
598 gen_op_movq_A0_seg(override);
599 gen_op_addq_A0_reg_sN(0, R_ESI);
601 gen_op_movq_A0_reg(R_ESI);
607 if (s->addseg && override < 0)
610 gen_op_movl_A0_seg(override);
611 gen_op_addl_A0_reg_sN(0, R_ESI);
613 gen_op_movl_A0_reg(R_ESI);
616 /* 16 address, always override */
619 gen_op_movl_A0_reg(R_ESI);
620 gen_op_andl_A0_ffff();
621 gen_op_addl_A0_seg(override);
625 static inline void gen_string_movl_A0_EDI(DisasContext *s)
629 gen_op_movq_A0_reg(R_EDI);
634 gen_op_movl_A0_seg(R_ES);
635 gen_op_addl_A0_reg_sN(0, R_EDI);
637 gen_op_movl_A0_reg(R_EDI);
640 gen_op_movl_A0_reg(R_EDI);
641 gen_op_andl_A0_ffff();
642 gen_op_addl_A0_seg(R_ES);
646 static inline void gen_op_movl_T0_Dshift(int ot)
648 tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
649 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
652 static void gen_extu(int ot, TCGv reg)
656 tcg_gen_ext8u_tl(reg, reg);
659 tcg_gen_ext16u_tl(reg, reg);
662 tcg_gen_ext32u_tl(reg, reg);
669 static void gen_exts(int ot, TCGv reg)
673 tcg_gen_ext8s_tl(reg, reg);
676 tcg_gen_ext16s_tl(reg, reg);
679 tcg_gen_ext32s_tl(reg, reg);
686 static inline void gen_op_jnz_ecx(int size, int label1)
688 tcg_gen_mov_tl(cpu_tmp0, cpu_regs[R_ECX]);
689 gen_extu(size + 1, cpu_tmp0);
690 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, label1);
693 static inline void gen_op_jz_ecx(int size, int label1)
695 tcg_gen_mov_tl(cpu_tmp0, cpu_regs[R_ECX]);
696 gen_extu(size + 1, cpu_tmp0);
697 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
700 static void gen_helper_in_func(int ot, TCGv v, TCGv_i32 n)
703 case 0: gen_helper_inb(v, n); break;
704 case 1: gen_helper_inw(v, n); break;
705 case 2: gen_helper_inl(v, n); break;
710 static void gen_helper_out_func(int ot, TCGv_i32 v, TCGv_i32 n)
713 case 0: gen_helper_outb(v, n); break;
714 case 1: gen_helper_outw(v, n); break;
715 case 2: gen_helper_outl(v, n); break;
720 static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
724 target_ulong next_eip;
727 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
728 if (s->cc_op != CC_OP_DYNAMIC)
729 gen_op_set_cc_op(s->cc_op);
732 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
734 case 0: gen_helper_check_iob(cpu_tmp2_i32); break;
735 case 1: gen_helper_check_iow(cpu_tmp2_i32); break;
736 case 2: gen_helper_check_iol(cpu_tmp2_i32); break;
739 if(s->flags & HF_SVMI_MASK) {
741 if (s->cc_op != CC_OP_DYNAMIC)
742 gen_op_set_cc_op(s->cc_op);
745 svm_flags |= (1 << (4 + ot));
746 next_eip = s->pc - s->cs_base;
747 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
748 gen_helper_svm_check_io(cpu_tmp2_i32, tcg_const_i32(svm_flags),
749 tcg_const_i32(next_eip - cur_eip));
753 static inline void gen_movs(DisasContext *s, int ot)
755 gen_string_movl_A0_ESI(s);
756 gen_op_ld_T0_A0(ot + s->mem_index);
757 gen_string_movl_A0_EDI(s);
758 gen_op_st_T0_A0(ot + s->mem_index);
759 gen_op_movl_T0_Dshift(ot);
760 gen_op_add_reg_T0(s->aflag, R_ESI);
761 gen_op_add_reg_T0(s->aflag, R_EDI);
764 static inline void gen_update_cc_op(DisasContext *s)
766 if (s->cc_op != CC_OP_DYNAMIC) {
767 gen_op_set_cc_op(s->cc_op);
768 s->cc_op = CC_OP_DYNAMIC;
772 static void gen_op_update1_cc(void)
774 tcg_gen_discard_tl(cpu_cc_src);
775 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
778 static void gen_op_update2_cc(void)
780 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
781 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
784 static inline void gen_op_cmpl_T0_T1_cc(void)
786 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
787 tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
790 static inline void gen_op_testl_T0_T1_cc(void)
792 tcg_gen_discard_tl(cpu_cc_src);
793 tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
796 static void gen_op_update_neg_cc(void)
798 tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
799 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
802 /* compute eflags.C to reg */
803 static void gen_compute_eflags_c(TCGv reg)
805 gen_helper_cc_compute_c(cpu_tmp2_i32, cpu_cc_op);
806 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
809 /* compute all eflags to cc_src */
810 static void gen_compute_eflags(TCGv reg)
812 gen_helper_cc_compute_all(cpu_tmp2_i32, cpu_cc_op);
813 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
816 static inline void gen_setcc_slow_T0(DisasContext *s, int jcc_op)
818 if (s->cc_op != CC_OP_DYNAMIC)
819 gen_op_set_cc_op(s->cc_op);
822 gen_compute_eflags(cpu_T[0]);
823 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 11);
824 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
827 gen_compute_eflags_c(cpu_T[0]);
830 gen_compute_eflags(cpu_T[0]);
831 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 6);
832 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
835 gen_compute_eflags(cpu_tmp0);
836 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 6);
837 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
838 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
841 gen_compute_eflags(cpu_T[0]);
842 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 7);
843 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
846 gen_compute_eflags(cpu_T[0]);
847 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 2);
848 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
851 gen_compute_eflags(cpu_tmp0);
852 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
853 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 7); /* CC_S */
854 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
855 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
859 gen_compute_eflags(cpu_tmp0);
860 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
861 tcg_gen_shri_tl(cpu_tmp4, cpu_tmp0, 7); /* CC_S */
862 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 6); /* CC_Z */
863 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
864 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
865 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
870 /* return true if setcc_slow is not needed (WARNING: must be kept in
871 sync with gen_jcc1) */
872 static int is_fast_jcc_case(DisasContext *s, int b)
875 jcc_op = (b >> 1) & 7;
877 /* we optimize the cmp/jcc case */
882 if (jcc_op == JCC_O || jcc_op == JCC_P)
886 /* some jumps are easy to compute */
911 if (jcc_op != JCC_Z && jcc_op != JCC_S)
921 /* generate a conditional jump to label 'l1' according to jump opcode
922 value 'b'. In the fast case, T0 is guaranted not to be used. */
923 static inline void gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
925 int inv, jcc_op, size, cond;
929 jcc_op = (b >> 1) & 7;
932 /* we optimize the cmp/jcc case */
938 size = cc_op - CC_OP_SUBB;
944 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xff);
948 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffff);
953 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffffffff);
961 tcg_gen_brcondi_tl(inv ? TCG_COND_NE : TCG_COND_EQ, t0, 0, l1);
967 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80);
968 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
972 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x8000);
973 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
978 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80000000);
979 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
984 tcg_gen_brcondi_tl(inv ? TCG_COND_GE : TCG_COND_LT, cpu_cc_dst,
991 cond = inv ? TCG_COND_GEU : TCG_COND_LTU;
994 cond = inv ? TCG_COND_GTU : TCG_COND_LEU;
996 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1000 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xff);
1001 tcg_gen_andi_tl(t0, cpu_cc_src, 0xff);
1005 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffff);
1006 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffff);
1008 #ifdef TARGET_X86_64
1011 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffffffff);
1012 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffffffff);
1019 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1023 cond = inv ? TCG_COND_GE : TCG_COND_LT;
1026 cond = inv ? TCG_COND_GT : TCG_COND_LE;
1028 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1032 tcg_gen_ext8s_tl(cpu_tmp4, cpu_tmp4);
1033 tcg_gen_ext8s_tl(t0, cpu_cc_src);
1037 tcg_gen_ext16s_tl(cpu_tmp4, cpu_tmp4);
1038 tcg_gen_ext16s_tl(t0, cpu_cc_src);
1040 #ifdef TARGET_X86_64
1043 tcg_gen_ext32s_tl(cpu_tmp4, cpu_tmp4);
1044 tcg_gen_ext32s_tl(t0, cpu_cc_src);
1051 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1059 /* some jumps are easy to compute */
1101 size = (cc_op - CC_OP_ADDB) & 3;
1104 size = (cc_op - CC_OP_ADDB) & 3;
1112 gen_setcc_slow_T0(s, jcc_op);
1113 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE,
1119 /* XXX: does not work with gdbstub "ice" single step - not a
1121 static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1125 l1 = gen_new_label();
1126 l2 = gen_new_label();
1127 gen_op_jnz_ecx(s->aflag, l1);
1129 gen_jmp_tb(s, next_eip, 1);
1134 static inline void gen_stos(DisasContext *s, int ot)
1136 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1137 gen_string_movl_A0_EDI(s);
1138 gen_op_st_T0_A0(ot + s->mem_index);
1139 gen_op_movl_T0_Dshift(ot);
1140 gen_op_add_reg_T0(s->aflag, R_EDI);
1143 static inline void gen_lods(DisasContext *s, int ot)
1145 gen_string_movl_A0_ESI(s);
1146 gen_op_ld_T0_A0(ot + s->mem_index);
1147 gen_op_mov_reg_T0(ot, R_EAX);
1148 gen_op_movl_T0_Dshift(ot);
1149 gen_op_add_reg_T0(s->aflag, R_ESI);
1152 static inline void gen_scas(DisasContext *s, int ot)
1154 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1155 gen_string_movl_A0_EDI(s);
1156 gen_op_ld_T1_A0(ot + s->mem_index);
1157 gen_op_cmpl_T0_T1_cc();
1158 gen_op_movl_T0_Dshift(ot);
1159 gen_op_add_reg_T0(s->aflag, R_EDI);
1162 static inline void gen_cmps(DisasContext *s, int ot)
1164 gen_string_movl_A0_ESI(s);
1165 gen_op_ld_T0_A0(ot + s->mem_index);
1166 gen_string_movl_A0_EDI(s);
1167 gen_op_ld_T1_A0(ot + s->mem_index);
1168 gen_op_cmpl_T0_T1_cc();
1169 gen_op_movl_T0_Dshift(ot);
1170 gen_op_add_reg_T0(s->aflag, R_ESI);
1171 gen_op_add_reg_T0(s->aflag, R_EDI);
1174 static inline void gen_ins(DisasContext *s, int ot)
1178 gen_string_movl_A0_EDI(s);
1179 /* Note: we must do this dummy write first to be restartable in
1180 case of page fault. */
1182 gen_op_st_T0_A0(ot + s->mem_index);
1183 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1184 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1185 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1186 gen_helper_in_func(ot, cpu_T[0], cpu_tmp2_i32);
1187 gen_op_st_T0_A0(ot + s->mem_index);
1188 gen_op_movl_T0_Dshift(ot);
1189 gen_op_add_reg_T0(s->aflag, R_EDI);
1194 static inline void gen_outs(DisasContext *s, int ot)
1198 gen_string_movl_A0_ESI(s);
1199 gen_op_ld_T0_A0(ot + s->mem_index);
1201 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1202 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1203 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1204 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
1205 gen_helper_out_func(ot, cpu_tmp2_i32, cpu_tmp3_i32);
1207 gen_op_movl_T0_Dshift(ot);
1208 gen_op_add_reg_T0(s->aflag, R_ESI);
1213 /* same method as Valgrind : we generate jumps to current or next
1215 #define GEN_REPZ(op) \
1216 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1217 target_ulong cur_eip, target_ulong next_eip) \
1220 gen_update_cc_op(s); \
1221 l2 = gen_jz_ecx_string(s, next_eip); \
1222 gen_ ## op(s, ot); \
1223 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1224 /* a loop would cause two single step exceptions if ECX = 1 \
1225 before rep string_insn */ \
1227 gen_op_jz_ecx(s->aflag, l2); \
1228 gen_jmp(s, cur_eip); \
1231 #define GEN_REPZ2(op) \
1232 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1233 target_ulong cur_eip, \
1234 target_ulong next_eip, \
1238 gen_update_cc_op(s); \
1239 l2 = gen_jz_ecx_string(s, next_eip); \
1240 gen_ ## op(s, ot); \
1241 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1242 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1243 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1245 gen_op_jz_ecx(s->aflag, l2); \
1246 gen_jmp(s, cur_eip); \
1257 static void gen_helper_fp_arith_ST0_FT0(int op)
1260 case 0: gen_helper_fadd_ST0_FT0(); break;
1261 case 1: gen_helper_fmul_ST0_FT0(); break;
1262 case 2: gen_helper_fcom_ST0_FT0(); break;
1263 case 3: gen_helper_fcom_ST0_FT0(); break;
1264 case 4: gen_helper_fsub_ST0_FT0(); break;
1265 case 5: gen_helper_fsubr_ST0_FT0(); break;
1266 case 6: gen_helper_fdiv_ST0_FT0(); break;
1267 case 7: gen_helper_fdivr_ST0_FT0(); break;
1271 /* NOTE the exception in "r" op ordering */
1272 static void gen_helper_fp_arith_STN_ST0(int op, int opreg)
1274 TCGv_i32 tmp = tcg_const_i32(opreg);
1276 case 0: gen_helper_fadd_STN_ST0(tmp); break;
1277 case 1: gen_helper_fmul_STN_ST0(tmp); break;
1278 case 4: gen_helper_fsubr_STN_ST0(tmp); break;
1279 case 5: gen_helper_fsub_STN_ST0(tmp); break;
1280 case 6: gen_helper_fdivr_STN_ST0(tmp); break;
1281 case 7: gen_helper_fdiv_STN_ST0(tmp); break;
1285 /* if d == OR_TMP0, it means memory operand (address in A0) */
1286 static void gen_op(DisasContext *s1, int op, int ot, int d)
1289 gen_op_mov_TN_reg(ot, 0, d);
1291 gen_op_ld_T0_A0(ot + s1->mem_index);
1295 if (s1->cc_op != CC_OP_DYNAMIC)
1296 gen_op_set_cc_op(s1->cc_op);
1297 gen_compute_eflags_c(cpu_tmp4);
1298 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1299 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1301 gen_op_mov_reg_T0(ot, d);
1303 gen_op_st_T0_A0(ot + s1->mem_index);
1304 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1305 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1306 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1307 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1308 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1309 s1->cc_op = CC_OP_DYNAMIC;
1312 if (s1->cc_op != CC_OP_DYNAMIC)
1313 gen_op_set_cc_op(s1->cc_op);
1314 gen_compute_eflags_c(cpu_tmp4);
1315 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1316 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1318 gen_op_mov_reg_T0(ot, d);
1320 gen_op_st_T0_A0(ot + s1->mem_index);
1321 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1322 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1323 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1324 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1325 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1326 s1->cc_op = CC_OP_DYNAMIC;
1329 gen_op_addl_T0_T1();
1331 gen_op_mov_reg_T0(ot, d);
1333 gen_op_st_T0_A0(ot + s1->mem_index);
1334 gen_op_update2_cc();
1335 s1->cc_op = CC_OP_ADDB + ot;
1338 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1340 gen_op_mov_reg_T0(ot, d);
1342 gen_op_st_T0_A0(ot + s1->mem_index);
1343 gen_op_update2_cc();
1344 s1->cc_op = CC_OP_SUBB + ot;
1348 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1350 gen_op_mov_reg_T0(ot, d);
1352 gen_op_st_T0_A0(ot + s1->mem_index);
1353 gen_op_update1_cc();
1354 s1->cc_op = CC_OP_LOGICB + ot;
1357 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1359 gen_op_mov_reg_T0(ot, d);
1361 gen_op_st_T0_A0(ot + s1->mem_index);
1362 gen_op_update1_cc();
1363 s1->cc_op = CC_OP_LOGICB + ot;
1366 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1368 gen_op_mov_reg_T0(ot, d);
1370 gen_op_st_T0_A0(ot + s1->mem_index);
1371 gen_op_update1_cc();
1372 s1->cc_op = CC_OP_LOGICB + ot;
1375 gen_op_cmpl_T0_T1_cc();
1376 s1->cc_op = CC_OP_SUBB + ot;
1381 /* if d == OR_TMP0, it means memory operand (address in A0) */
1382 static void gen_inc(DisasContext *s1, int ot, int d, int c)
1385 gen_op_mov_TN_reg(ot, 0, d);
1387 gen_op_ld_T0_A0(ot + s1->mem_index);
1388 if (s1->cc_op != CC_OP_DYNAMIC)
1389 gen_op_set_cc_op(s1->cc_op);
1391 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1392 s1->cc_op = CC_OP_INCB + ot;
1394 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1395 s1->cc_op = CC_OP_DECB + ot;
1398 gen_op_mov_reg_T0(ot, d);
1400 gen_op_st_T0_A0(ot + s1->mem_index);
1401 gen_compute_eflags_c(cpu_cc_src);
1402 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1405 static void gen_shift_rm_T1(DisasContext *s, int ot, int op1,
1406 int is_right, int is_arith)
1419 gen_op_ld_T0_A0(ot + s->mem_index);
1421 gen_op_mov_TN_reg(ot, 0, op1);
1423 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1425 tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1429 gen_exts(ot, cpu_T[0]);
1430 tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1431 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1433 gen_extu(ot, cpu_T[0]);
1434 tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1435 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1438 tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1439 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1444 gen_op_st_T0_A0(ot + s->mem_index);
1446 gen_op_mov_reg_T0(ot, op1);
1448 /* update eflags if non zero shift */
1449 if (s->cc_op != CC_OP_DYNAMIC)
1450 gen_op_set_cc_op(s->cc_op);
1452 /* XXX: inefficient */
1453 t0 = tcg_temp_local_new();
1454 t1 = tcg_temp_local_new();
1456 tcg_gen_mov_tl(t0, cpu_T[0]);
1457 tcg_gen_mov_tl(t1, cpu_T3);
1459 shift_label = gen_new_label();
1460 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_T[1], 0, shift_label);
1462 tcg_gen_mov_tl(cpu_cc_src, t1);
1463 tcg_gen_mov_tl(cpu_cc_dst, t0);
1465 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1467 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1469 gen_set_label(shift_label);
1470 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1476 static void gen_shift_rm_im(DisasContext *s, int ot, int op1, int op2,
1477 int is_right, int is_arith)
1488 gen_op_ld_T0_A0(ot + s->mem_index);
1490 gen_op_mov_TN_reg(ot, 0, op1);
1496 gen_exts(ot, cpu_T[0]);
1497 tcg_gen_sari_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1498 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], op2);
1500 gen_extu(ot, cpu_T[0]);
1501 tcg_gen_shri_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1502 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], op2);
1505 tcg_gen_shli_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1506 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], op2);
1512 gen_op_st_T0_A0(ot + s->mem_index);
1514 gen_op_mov_reg_T0(ot, op1);
1516 /* update eflags if non zero shift */
1518 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
1519 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1521 s->cc_op = CC_OP_SARB + ot;
1523 s->cc_op = CC_OP_SHLB + ot;
1527 static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1530 tcg_gen_shli_tl(ret, arg1, arg2);
1532 tcg_gen_shri_tl(ret, arg1, -arg2);
1535 static void gen_rot_rm_T1(DisasContext *s, int ot, int op1,
1539 int label1, label2, data_bits;
1540 TCGv t0, t1, t2, a0;
1542 /* XXX: inefficient, but we must use local temps */
1543 t0 = tcg_temp_local_new();
1544 t1 = tcg_temp_local_new();
1545 t2 = tcg_temp_local_new();
1546 a0 = tcg_temp_local_new();
1554 if (op1 == OR_TMP0) {
1555 tcg_gen_mov_tl(a0, cpu_A0);
1556 gen_op_ld_v(ot + s->mem_index, t0, a0);
1558 gen_op_mov_v_reg(ot, t0, op1);
1561 tcg_gen_mov_tl(t1, cpu_T[1]);
1563 tcg_gen_andi_tl(t1, t1, mask);
1565 /* Must test zero case to avoid using undefined behaviour in TCG
1567 label1 = gen_new_label();
1568 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label1);
1571 tcg_gen_andi_tl(cpu_tmp0, t1, (1 << (3 + ot)) - 1);
1573 tcg_gen_mov_tl(cpu_tmp0, t1);
1576 tcg_gen_mov_tl(t2, t0);
1578 data_bits = 8 << ot;
1579 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
1580 fix TCG definition) */
1582 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp0);
1583 tcg_gen_subfi_tl(cpu_tmp0, data_bits, cpu_tmp0);
1584 tcg_gen_shl_tl(t0, t0, cpu_tmp0);
1586 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp0);
1587 tcg_gen_subfi_tl(cpu_tmp0, data_bits, cpu_tmp0);
1588 tcg_gen_shr_tl(t0, t0, cpu_tmp0);
1590 tcg_gen_or_tl(t0, t0, cpu_tmp4);
1592 gen_set_label(label1);
1594 if (op1 == OR_TMP0) {
1595 gen_op_st_v(ot + s->mem_index, t0, a0);
1597 gen_op_mov_reg_v(ot, op1, t0);
1601 if (s->cc_op != CC_OP_DYNAMIC)
1602 gen_op_set_cc_op(s->cc_op);
1604 label2 = gen_new_label();
1605 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label2);
1607 gen_compute_eflags(cpu_cc_src);
1608 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
1609 tcg_gen_xor_tl(cpu_tmp0, t2, t0);
1610 tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
1611 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
1612 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
1614 tcg_gen_shri_tl(t0, t0, data_bits - 1);
1616 tcg_gen_andi_tl(t0, t0, CC_C);
1617 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
1619 tcg_gen_discard_tl(cpu_cc_dst);
1620 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1622 gen_set_label(label2);
1623 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1631 static void gen_rot_rm_im(DisasContext *s, int ot, int op1, int op2,
1638 /* XXX: inefficient, but we must use local temps */
1639 t0 = tcg_temp_local_new();
1640 t1 = tcg_temp_local_new();
1641 a0 = tcg_temp_local_new();
1649 if (op1 == OR_TMP0) {
1650 tcg_gen_mov_tl(a0, cpu_A0);
1651 gen_op_ld_v(ot + s->mem_index, t0, a0);
1653 gen_op_mov_v_reg(ot, t0, op1);
1657 tcg_gen_mov_tl(t1, t0);
1660 data_bits = 8 << ot;
1662 int shift = op2 & ((1 << (3 + ot)) - 1);
1664 tcg_gen_shri_tl(cpu_tmp4, t0, shift);
1665 tcg_gen_shli_tl(t0, t0, data_bits - shift);
1668 tcg_gen_shli_tl(cpu_tmp4, t0, shift);
1669 tcg_gen_shri_tl(t0, t0, data_bits - shift);
1671 tcg_gen_or_tl(t0, t0, cpu_tmp4);
1675 if (op1 == OR_TMP0) {
1676 gen_op_st_v(ot + s->mem_index, t0, a0);
1678 gen_op_mov_reg_v(ot, op1, t0);
1683 if (s->cc_op != CC_OP_DYNAMIC)
1684 gen_op_set_cc_op(s->cc_op);
1686 gen_compute_eflags(cpu_cc_src);
1687 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
1688 tcg_gen_xor_tl(cpu_tmp0, t1, t0);
1689 tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
1690 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
1691 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
1693 tcg_gen_shri_tl(t0, t0, data_bits - 1);
1695 tcg_gen_andi_tl(t0, t0, CC_C);
1696 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
1698 tcg_gen_discard_tl(cpu_cc_dst);
1699 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1700 s->cc_op = CC_OP_EFLAGS;
1708 /* XXX: add faster immediate = 1 case */
1709 static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1,
1714 if (s->cc_op != CC_OP_DYNAMIC)
1715 gen_op_set_cc_op(s->cc_op);
1719 gen_op_ld_T0_A0(ot + s->mem_index);
1721 gen_op_mov_TN_reg(ot, 0, op1);
1725 case 0: gen_helper_rcrb(cpu_T[0], cpu_T[0], cpu_T[1]); break;
1726 case 1: gen_helper_rcrw(cpu_T[0], cpu_T[0], cpu_T[1]); break;
1727 case 2: gen_helper_rcrl(cpu_T[0], cpu_T[0], cpu_T[1]); break;
1728 #ifdef TARGET_X86_64
1729 case 3: gen_helper_rcrq(cpu_T[0], cpu_T[0], cpu_T[1]); break;
1734 case 0: gen_helper_rclb(cpu_T[0], cpu_T[0], cpu_T[1]); break;
1735 case 1: gen_helper_rclw(cpu_T[0], cpu_T[0], cpu_T[1]); break;
1736 case 2: gen_helper_rcll(cpu_T[0], cpu_T[0], cpu_T[1]); break;
1737 #ifdef TARGET_X86_64
1738 case 3: gen_helper_rclq(cpu_T[0], cpu_T[0], cpu_T[1]); break;
1744 gen_op_st_T0_A0(ot + s->mem_index);
1746 gen_op_mov_reg_T0(ot, op1);
1749 label1 = gen_new_label();
1750 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_tmp, -1, label1);
1752 tcg_gen_mov_tl(cpu_cc_src, cpu_cc_tmp);
1753 tcg_gen_discard_tl(cpu_cc_dst);
1754 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1756 gen_set_label(label1);
1757 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1760 /* XXX: add faster immediate case */
1761 static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1,
1764 int label1, label2, data_bits;
1766 TCGv t0, t1, t2, a0;
1768 t0 = tcg_temp_local_new();
1769 t1 = tcg_temp_local_new();
1770 t2 = tcg_temp_local_new();
1771 a0 = tcg_temp_local_new();
1779 if (op1 == OR_TMP0) {
1780 tcg_gen_mov_tl(a0, cpu_A0);
1781 gen_op_ld_v(ot + s->mem_index, t0, a0);
1783 gen_op_mov_v_reg(ot, t0, op1);
1786 tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
1788 tcg_gen_mov_tl(t1, cpu_T[1]);
1789 tcg_gen_mov_tl(t2, cpu_T3);
1791 /* Must test zero case to avoid using undefined behaviour in TCG
1793 label1 = gen_new_label();
1794 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
1796 tcg_gen_addi_tl(cpu_tmp5, t2, -1);
1797 if (ot == OT_WORD) {
1798 /* Note: we implement the Intel behaviour for shift count > 16 */
1800 tcg_gen_andi_tl(t0, t0, 0xffff);
1801 tcg_gen_shli_tl(cpu_tmp0, t1, 16);
1802 tcg_gen_or_tl(t0, t0, cpu_tmp0);
1803 tcg_gen_ext32u_tl(t0, t0);
1805 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
1807 /* only needed if count > 16, but a test would complicate */
1808 tcg_gen_subfi_tl(cpu_tmp5, 32, t2);
1809 tcg_gen_shl_tl(cpu_tmp0, t0, cpu_tmp5);
1811 tcg_gen_shr_tl(t0, t0, t2);
1813 tcg_gen_or_tl(t0, t0, cpu_tmp0);
1815 /* XXX: not optimal */
1816 tcg_gen_andi_tl(t0, t0, 0xffff);
1817 tcg_gen_shli_tl(t1, t1, 16);
1818 tcg_gen_or_tl(t1, t1, t0);
1819 tcg_gen_ext32u_tl(t1, t1);
1821 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
1822 tcg_gen_subfi_tl(cpu_tmp0, 32, cpu_tmp5);
1823 tcg_gen_shr_tl(cpu_tmp5, t1, cpu_tmp0);
1824 tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp5);
1826 tcg_gen_shl_tl(t0, t0, t2);
1827 tcg_gen_subfi_tl(cpu_tmp5, 32, t2);
1828 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
1829 tcg_gen_or_tl(t0, t0, t1);
1832 data_bits = 8 << ot;
1835 tcg_gen_ext32u_tl(t0, t0);
1837 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
1839 tcg_gen_shr_tl(t0, t0, t2);
1840 tcg_gen_subfi_tl(cpu_tmp5, data_bits, t2);
1841 tcg_gen_shl_tl(t1, t1, cpu_tmp5);
1842 tcg_gen_or_tl(t0, t0, t1);
1846 tcg_gen_ext32u_tl(t1, t1);
1848 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
1850 tcg_gen_shl_tl(t0, t0, t2);
1851 tcg_gen_subfi_tl(cpu_tmp5, data_bits, t2);
1852 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
1853 tcg_gen_or_tl(t0, t0, t1);
1856 tcg_gen_mov_tl(t1, cpu_tmp4);
1858 gen_set_label(label1);
1860 if (op1 == OR_TMP0) {
1861 gen_op_st_v(ot + s->mem_index, t0, a0);
1863 gen_op_mov_reg_v(ot, op1, t0);
1867 if (s->cc_op != CC_OP_DYNAMIC)
1868 gen_op_set_cc_op(s->cc_op);
1870 label2 = gen_new_label();
1871 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label2);
1873 tcg_gen_mov_tl(cpu_cc_src, t1);
1874 tcg_gen_mov_tl(cpu_cc_dst, t0);
1876 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1878 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1880 gen_set_label(label2);
1881 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1889 static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1892 gen_op_mov_TN_reg(ot, 1, s);
1895 gen_rot_rm_T1(s1, ot, d, 0);
1898 gen_rot_rm_T1(s1, ot, d, 1);
1902 gen_shift_rm_T1(s1, ot, d, 0, 0);
1905 gen_shift_rm_T1(s1, ot, d, 1, 0);
1908 gen_shift_rm_T1(s1, ot, d, 1, 1);
1911 gen_rotc_rm_T1(s1, ot, d, 0);
1914 gen_rotc_rm_T1(s1, ot, d, 1);
1919 static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1923 gen_rot_rm_im(s1, ot, d, c, 0);
1926 gen_rot_rm_im(s1, ot, d, c, 1);
1930 gen_shift_rm_im(s1, ot, d, c, 0, 0);
1933 gen_shift_rm_im(s1, ot, d, c, 1, 0);
1936 gen_shift_rm_im(s1, ot, d, c, 1, 1);
1939 /* currently not optimized */
1940 gen_op_movl_T1_im(c);
1941 gen_shift(s1, op, ot, d, OR_TMP1);
1946 static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1954 int mod, rm, code, override, must_add_seg;
1956 override = s->override;
1957 must_add_seg = s->addseg;
1960 mod = (modrm >> 6) & 3;
1972 code = ldub_code(s->pc++);
1973 scale = (code >> 6) & 3;
1974 index = ((code >> 3) & 7) | REX_X(s);
1981 if ((base & 7) == 5) {
1983 disp = (int32_t)ldl_code(s->pc);
1985 if (CODE64(s) && !havesib) {
1986 disp += s->pc + s->rip_offset;
1993 disp = (int8_t)ldub_code(s->pc++);
1997 disp = (int32_t)ldl_code(s->pc);
2003 /* for correct popl handling with esp */
2004 if (base == 4 && s->popl_esp_hack)
2005 disp += s->popl_esp_hack;
2006 #ifdef TARGET_X86_64
2007 if (s->aflag == 2) {
2008 gen_op_movq_A0_reg(base);
2010 gen_op_addq_A0_im(disp);
2015 gen_op_movl_A0_reg(base);
2017 gen_op_addl_A0_im(disp);
2020 #ifdef TARGET_X86_64
2021 if (s->aflag == 2) {
2022 gen_op_movq_A0_im(disp);
2026 gen_op_movl_A0_im(disp);
2029 /* index == 4 means no index */
2030 if (havesib && (index != 4)) {
2031 #ifdef TARGET_X86_64
2032 if (s->aflag == 2) {
2033 gen_op_addq_A0_reg_sN(scale, index);
2037 gen_op_addl_A0_reg_sN(scale, index);
2042 if (base == R_EBP || base == R_ESP)
2047 #ifdef TARGET_X86_64
2048 if (s->aflag == 2) {
2049 gen_op_addq_A0_seg(override);
2053 gen_op_addl_A0_seg(override);
2060 disp = lduw_code(s->pc);
2062 gen_op_movl_A0_im(disp);
2063 rm = 0; /* avoid SS override */
2070 disp = (int8_t)ldub_code(s->pc++);
2074 disp = lduw_code(s->pc);
2080 gen_op_movl_A0_reg(R_EBX);
2081 gen_op_addl_A0_reg_sN(0, R_ESI);
2084 gen_op_movl_A0_reg(R_EBX);
2085 gen_op_addl_A0_reg_sN(0, R_EDI);
2088 gen_op_movl_A0_reg(R_EBP);
2089 gen_op_addl_A0_reg_sN(0, R_ESI);
2092 gen_op_movl_A0_reg(R_EBP);
2093 gen_op_addl_A0_reg_sN(0, R_EDI);
2096 gen_op_movl_A0_reg(R_ESI);
2099 gen_op_movl_A0_reg(R_EDI);
2102 gen_op_movl_A0_reg(R_EBP);
2106 gen_op_movl_A0_reg(R_EBX);
2110 gen_op_addl_A0_im(disp);
2111 gen_op_andl_A0_ffff();
2115 if (rm == 2 || rm == 3 || rm == 6)
2120 gen_op_addl_A0_seg(override);
2130 static void gen_nop_modrm(DisasContext *s, int modrm)
2132 int mod, rm, base, code;
2134 mod = (modrm >> 6) & 3;
2144 code = ldub_code(s->pc++);
2180 /* used for LEA and MOV AX, mem */
2181 static void gen_add_A0_ds_seg(DisasContext *s)
2183 int override, must_add_seg;
2184 must_add_seg = s->addseg;
2186 if (s->override >= 0) {
2187 override = s->override;
2191 #ifdef TARGET_X86_64
2193 gen_op_addq_A0_seg(override);
2197 gen_op_addl_A0_seg(override);
2202 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg ==
2204 static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
2206 int mod, rm, opreg, disp;
2208 mod = (modrm >> 6) & 3;
2209 rm = (modrm & 7) | REX_B(s);
2213 gen_op_mov_TN_reg(ot, 0, reg);
2214 gen_op_mov_reg_T0(ot, rm);
2216 gen_op_mov_TN_reg(ot, 0, rm);
2218 gen_op_mov_reg_T0(ot, reg);
2221 gen_lea_modrm(s, modrm, &opreg, &disp);
2224 gen_op_mov_TN_reg(ot, 0, reg);
2225 gen_op_st_T0_A0(ot + s->mem_index);
2227 gen_op_ld_T0_A0(ot + s->mem_index);
2229 gen_op_mov_reg_T0(ot, reg);
2234 static inline uint32_t insn_get(DisasContext *s, int ot)
2240 ret = ldub_code(s->pc);
2244 ret = lduw_code(s->pc);
2249 ret = ldl_code(s->pc);
2256 static inline int insn_const_size(unsigned int ot)
2264 static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2266 TranslationBlock *tb;
2269 pc = s->cs_base + eip;
2271 /* NOTE: we handle the case where the TB spans two pages here */
2272 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2273 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
2274 /* jump to same page: we can use a direct jump */
2275 tcg_gen_goto_tb(tb_num);
2277 tcg_gen_exit_tb((long)tb + tb_num);
2279 /* jump to another page: currently not optimized */
2285 static inline void gen_jcc(DisasContext *s, int b,
2286 target_ulong val, target_ulong next_eip)
2291 gen_update_cc_op(s);
2293 l1 = gen_new_label();
2294 gen_jcc1(s, cc_op, b, l1);
2296 gen_goto_tb(s, 0, next_eip);
2299 gen_goto_tb(s, 1, val);
2300 s->is_jmp = DISAS_TB_JUMP;
2303 l1 = gen_new_label();
2304 l2 = gen_new_label();
2305 gen_jcc1(s, cc_op, b, l1);
2307 gen_jmp_im(next_eip);
2317 static void gen_setcc(DisasContext *s, int b)
2319 int inv, jcc_op, l1;
2322 if (is_fast_jcc_case(s, b)) {
2323 /* nominal case: we use a jump */
2324 /* XXX: make it faster by adding new instructions in TCG */
2325 t0 = tcg_temp_local_new();
2326 tcg_gen_movi_tl(t0, 0);
2327 l1 = gen_new_label();
2328 gen_jcc1(s, s->cc_op, b ^ 1, l1);
2329 tcg_gen_movi_tl(t0, 1);
2331 tcg_gen_mov_tl(cpu_T[0], t0);
2334 /* slow case: it is more efficient not to generate a jump,
2335 although it is questionnable whether this optimization is
2338 jcc_op = (b >> 1) & 7;
2339 gen_setcc_slow_T0(s, jcc_op);
2341 tcg_gen_xori_tl(cpu_T[0], cpu_T[0], 1);
2346 static inline void gen_op_movl_T0_seg(int seg_reg)
2348 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2349 offsetof(CPUX86State,segs[seg_reg].selector));
2352 static inline void gen_op_movl_seg_T0_vm(int seg_reg)
2354 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
2355 tcg_gen_st32_tl(cpu_T[0], cpu_env,
2356 offsetof(CPUX86State,segs[seg_reg].selector));
2357 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], 4);
2358 tcg_gen_st_tl(cpu_T[0], cpu_env,
2359 offsetof(CPUX86State,segs[seg_reg].base));
2362 /* move T0 to seg_reg and compute if the CPU state may change. Never
2363 call this function with seg_reg == R_CS */
2364 static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2366 if (s->pe && !s->vm86) {
2367 /* XXX: optimize by finding processor state dynamically */
2368 if (s->cc_op != CC_OP_DYNAMIC)
2369 gen_op_set_cc_op(s->cc_op);
2370 gen_jmp_im(cur_eip);
2371 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2372 gen_helper_load_seg(tcg_const_i32(seg_reg), cpu_tmp2_i32);
2373 /* abort translation because the addseg value may change or
2374 because ss32 may change. For R_SS, translation must always
2375 stop as a special handling must be done to disable hardware
2376 interrupts for the next instruction */
2377 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2378 s->is_jmp = DISAS_TB_JUMP;
2380 gen_op_movl_seg_T0_vm(seg_reg);
2381 if (seg_reg == R_SS)
2382 s->is_jmp = DISAS_TB_JUMP;
2386 static inline int svm_is_rep(int prefixes)
2388 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2392 gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2393 uint32_t type, uint64_t param)
2395 /* no SVM activated; fast case */
2396 if (likely(!(s->flags & HF_SVMI_MASK)))
2398 if (s->cc_op != CC_OP_DYNAMIC)
2399 gen_op_set_cc_op(s->cc_op);
2400 gen_jmp_im(pc_start - s->cs_base);
2401 gen_helper_svm_check_intercept_param(tcg_const_i32(type),
2402 tcg_const_i64(param));
2406 gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2408 gen_svm_check_intercept_param(s, pc_start, type, 0);
2411 static inline void gen_stack_update(DisasContext *s, int addend)
2413 #ifdef TARGET_X86_64
2415 gen_op_add_reg_im(2, R_ESP, addend);
2419 gen_op_add_reg_im(1, R_ESP, addend);
2421 gen_op_add_reg_im(0, R_ESP, addend);
2425 /* generate a push. It depends on ss32, addseg and dflag */
2426 static void gen_push_T0(DisasContext *s)
2428 #ifdef TARGET_X86_64
2430 gen_op_movq_A0_reg(R_ESP);
2432 gen_op_addq_A0_im(-8);
2433 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2435 gen_op_addq_A0_im(-2);
2436 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2438 gen_op_mov_reg_A0(2, R_ESP);
2442 gen_op_movl_A0_reg(R_ESP);
2444 gen_op_addl_A0_im(-2);
2446 gen_op_addl_A0_im(-4);
2449 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2450 gen_op_addl_A0_seg(R_SS);
2453 gen_op_andl_A0_ffff();
2454 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2455 gen_op_addl_A0_seg(R_SS);
2457 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2458 if (s->ss32 && !s->addseg)
2459 gen_op_mov_reg_A0(1, R_ESP);
2461 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2465 /* generate a push. It depends on ss32, addseg and dflag */
2466 /* slower version for T1, only used for call Ev */
2467 static void gen_push_T1(DisasContext *s)
2469 #ifdef TARGET_X86_64
2471 gen_op_movq_A0_reg(R_ESP);
2473 gen_op_addq_A0_im(-8);
2474 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2476 gen_op_addq_A0_im(-2);
2477 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2479 gen_op_mov_reg_A0(2, R_ESP);
2483 gen_op_movl_A0_reg(R_ESP);
2485 gen_op_addl_A0_im(-2);
2487 gen_op_addl_A0_im(-4);
2490 gen_op_addl_A0_seg(R_SS);
2493 gen_op_andl_A0_ffff();
2494 gen_op_addl_A0_seg(R_SS);
2496 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2498 if (s->ss32 && !s->addseg)
2499 gen_op_mov_reg_A0(1, R_ESP);
2501 gen_stack_update(s, (-2) << s->dflag);
2505 /* two step pop is necessary for precise exceptions */
2506 static void gen_pop_T0(DisasContext *s)
2508 #ifdef TARGET_X86_64
2510 gen_op_movq_A0_reg(R_ESP);
2511 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2515 gen_op_movl_A0_reg(R_ESP);
2518 gen_op_addl_A0_seg(R_SS);
2520 gen_op_andl_A0_ffff();
2521 gen_op_addl_A0_seg(R_SS);
2523 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2527 static void gen_pop_update(DisasContext *s)
2529 #ifdef TARGET_X86_64
2530 if (CODE64(s) && s->dflag) {
2531 gen_stack_update(s, 8);
2535 gen_stack_update(s, 2 << s->dflag);
2539 static void gen_stack_A0(DisasContext *s)
2541 gen_op_movl_A0_reg(R_ESP);
2543 gen_op_andl_A0_ffff();
2544 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2546 gen_op_addl_A0_seg(R_SS);
2549 /* NOTE: wrap around in 16 bit not fully handled */
2550 static void gen_pusha(DisasContext *s)
2553 gen_op_movl_A0_reg(R_ESP);
2554 gen_op_addl_A0_im(-16 << s->dflag);
2556 gen_op_andl_A0_ffff();
2557 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2559 gen_op_addl_A0_seg(R_SS);
2560 for(i = 0;i < 8; i++) {
2561 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2562 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2563 gen_op_addl_A0_im(2 << s->dflag);
2565 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2568 /* NOTE: wrap around in 16 bit not fully handled */
2569 static void gen_popa(DisasContext *s)
2572 gen_op_movl_A0_reg(R_ESP);
2574 gen_op_andl_A0_ffff();
2575 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2576 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 << s->dflag);
2578 gen_op_addl_A0_seg(R_SS);
2579 for(i = 0;i < 8; i++) {
2580 /* ESP is not reloaded */
2582 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2583 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2585 gen_op_addl_A0_im(2 << s->dflag);
2587 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2590 static void gen_enter(DisasContext *s, int esp_addend, int level)
2595 #ifdef TARGET_X86_64
2597 ot = s->dflag ? OT_QUAD : OT_WORD;
2600 gen_op_movl_A0_reg(R_ESP);
2601 gen_op_addq_A0_im(-opsize);
2602 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2605 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2606 gen_op_st_T0_A0(ot + s->mem_index);
2608 /* XXX: must save state */
2609 gen_helper_enter64_level(tcg_const_i32(level),
2610 tcg_const_i32((ot == OT_QUAD)),
2613 gen_op_mov_reg_T1(ot, R_EBP);
2614 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2615 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2619 ot = s->dflag + OT_WORD;
2620 opsize = 2 << s->dflag;
2622 gen_op_movl_A0_reg(R_ESP);
2623 gen_op_addl_A0_im(-opsize);
2625 gen_op_andl_A0_ffff();
2626 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2628 gen_op_addl_A0_seg(R_SS);
2630 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2631 gen_op_st_T0_A0(ot + s->mem_index);
2633 /* XXX: must save state */
2634 gen_helper_enter_level(tcg_const_i32(level),
2635 tcg_const_i32(s->dflag),
2638 gen_op_mov_reg_T1(ot, R_EBP);
2639 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2640 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2644 static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2646 if (s->cc_op != CC_OP_DYNAMIC)
2647 gen_op_set_cc_op(s->cc_op);
2648 gen_jmp_im(cur_eip);
2649 gen_helper_raise_exception(tcg_const_i32(trapno));
2650 s->is_jmp = DISAS_TB_JUMP;
2653 /* an interrupt is different from an exception because of the
2655 static void gen_interrupt(DisasContext *s, int intno,
2656 target_ulong cur_eip, target_ulong next_eip)
2658 if (s->cc_op != CC_OP_DYNAMIC)
2659 gen_op_set_cc_op(s->cc_op);
2660 gen_jmp_im(cur_eip);
2661 gen_helper_raise_interrupt(tcg_const_i32(intno),
2662 tcg_const_i32(next_eip - cur_eip));
2663 s->is_jmp = DISAS_TB_JUMP;
2666 static void gen_debug(DisasContext *s, target_ulong cur_eip)
2668 if (s->cc_op != CC_OP_DYNAMIC)
2669 gen_op_set_cc_op(s->cc_op);
2670 gen_jmp_im(cur_eip);
2672 s->is_jmp = DISAS_TB_JUMP;
2675 /* generate a generic end of block. Trace exception is also generated
2677 static void gen_eob(DisasContext *s)
2679 if (s->cc_op != CC_OP_DYNAMIC)
2680 gen_op_set_cc_op(s->cc_op);
2681 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2682 gen_helper_reset_inhibit_irq();
2684 if (s->tb->flags & HF_RF_MASK) {
2685 gen_helper_reset_rf();
2687 if (s->singlestep_enabled) {
2690 gen_helper_single_step();
2694 s->is_jmp = DISAS_TB_JUMP;
2697 /* generate a jump to eip. No segment change must happen before as a
2698 direct call to the next block may occur */
2699 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2702 gen_update_cc_op(s);
2703 gen_goto_tb(s, tb_num, eip);
2704 s->is_jmp = DISAS_TB_JUMP;
2711 static void gen_jmp(DisasContext *s, target_ulong eip)
2713 gen_jmp_tb(s, eip, 0);
2716 static inline void gen_ldq_env_A0(int idx, int offset)
2718 int mem_index = (idx >> 2) - 1;
2719 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2720 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
2723 static inline void gen_stq_env_A0(int idx, int offset)
2725 int mem_index = (idx >> 2) - 1;
2726 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
2727 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2730 static inline void gen_ldo_env_A0(int idx, int offset)
2732 int mem_index = (idx >> 2) - 1;
2733 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2734 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2735 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2736 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2737 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2740 static inline void gen_sto_env_A0(int idx, int offset)
2742 int mem_index = (idx >> 2) - 1;
2743 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2744 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2745 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2746 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2747 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2750 static inline void gen_op_movo(int d_offset, int s_offset)
2752 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2753 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2754 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
2755 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
2758 static inline void gen_op_movq(int d_offset, int s_offset)
2760 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2761 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2764 static inline void gen_op_movl(int d_offset, int s_offset)
2766 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
2767 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
2770 static inline void gen_op_movq_env_0(int d_offset)
2772 tcg_gen_movi_i64(cpu_tmp1_i64, 0);
2773 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2776 #define SSE_SPECIAL ((void *)1)
2777 #define SSE_DUMMY ((void *)2)
2779 #define MMX_OP2(x) { gen_helper_ ## x ## _mmx, gen_helper_ ## x ## _xmm }
2780 #define SSE_FOP(x) { gen_helper_ ## x ## ps, gen_helper_ ## x ## pd, \
2781 gen_helper_ ## x ## ss, gen_helper_ ## x ## sd, }
2783 static void *sse_op_table1[256][4] = {
2784 /* 3DNow! extensions */
2785 [0x0e] = { SSE_DUMMY }, /* femms */
2786 [0x0f] = { SSE_DUMMY }, /* pf... */
2787 /* pure SSE operations */
2788 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2789 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2790 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2791 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2792 [0x14] = { gen_helper_punpckldq_xmm, gen_helper_punpcklqdq_xmm },
2793 [0x15] = { gen_helper_punpckhdq_xmm, gen_helper_punpckhqdq_xmm },
2794 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2795 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2797 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2798 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2799 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2800 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd, movntss, movntsd */
2801 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2802 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2803 [0x2e] = { gen_helper_ucomiss, gen_helper_ucomisd },
2804 [0x2f] = { gen_helper_comiss, gen_helper_comisd },
2805 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2806 [0x51] = SSE_FOP(sqrt),
2807 [0x52] = { gen_helper_rsqrtps, NULL, gen_helper_rsqrtss, NULL },
2808 [0x53] = { gen_helper_rcpps, NULL, gen_helper_rcpss, NULL },
2809 [0x54] = { gen_helper_pand_xmm, gen_helper_pand_xmm }, /* andps, andpd */
2810 [0x55] = { gen_helper_pandn_xmm, gen_helper_pandn_xmm }, /* andnps, andnpd */
2811 [0x56] = { gen_helper_por_xmm, gen_helper_por_xmm }, /* orps, orpd */
2812 [0x57] = { gen_helper_pxor_xmm, gen_helper_pxor_xmm }, /* xorps, xorpd */
2813 [0x58] = SSE_FOP(add),
2814 [0x59] = SSE_FOP(mul),
2815 [0x5a] = { gen_helper_cvtps2pd, gen_helper_cvtpd2ps,
2816 gen_helper_cvtss2sd, gen_helper_cvtsd2ss },
2817 [0x5b] = { gen_helper_cvtdq2ps, gen_helper_cvtps2dq, gen_helper_cvttps2dq },
2818 [0x5c] = SSE_FOP(sub),
2819 [0x5d] = SSE_FOP(min),
2820 [0x5e] = SSE_FOP(div),
2821 [0x5f] = SSE_FOP(max),
2823 [0xc2] = SSE_FOP(cmpeq),
2824 [0xc6] = { gen_helper_shufps, gen_helper_shufpd },
2826 [0x38] = { SSE_SPECIAL, SSE_SPECIAL, NULL, SSE_SPECIAL }, /* SSSE3/SSE4 */
2827 [0x3a] = { SSE_SPECIAL, SSE_SPECIAL }, /* SSSE3/SSE4 */
2829 /* MMX ops and their SSE extensions */
2830 [0x60] = MMX_OP2(punpcklbw),
2831 [0x61] = MMX_OP2(punpcklwd),
2832 [0x62] = MMX_OP2(punpckldq),
2833 [0x63] = MMX_OP2(packsswb),
2834 [0x64] = MMX_OP2(pcmpgtb),
2835 [0x65] = MMX_OP2(pcmpgtw),
2836 [0x66] = MMX_OP2(pcmpgtl),
2837 [0x67] = MMX_OP2(packuswb),
2838 [0x68] = MMX_OP2(punpckhbw),
2839 [0x69] = MMX_OP2(punpckhwd),
2840 [0x6a] = MMX_OP2(punpckhdq),
2841 [0x6b] = MMX_OP2(packssdw),
2842 [0x6c] = { NULL, gen_helper_punpcklqdq_xmm },
2843 [0x6d] = { NULL, gen_helper_punpckhqdq_xmm },
2844 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2845 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2846 [0x70] = { gen_helper_pshufw_mmx,
2847 gen_helper_pshufd_xmm,
2848 gen_helper_pshufhw_xmm,
2849 gen_helper_pshuflw_xmm },
2850 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2851 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2852 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2853 [0x74] = MMX_OP2(pcmpeqb),
2854 [0x75] = MMX_OP2(pcmpeqw),
2855 [0x76] = MMX_OP2(pcmpeql),
2856 [0x77] = { SSE_DUMMY }, /* emms */
2857 [0x78] = { NULL, SSE_SPECIAL, NULL, SSE_SPECIAL }, /* extrq_i, insertq_i */
2858 [0x79] = { NULL, gen_helper_extrq_r, NULL, gen_helper_insertq_r },
2859 [0x7c] = { NULL, gen_helper_haddpd, NULL, gen_helper_haddps },
2860 [0x7d] = { NULL, gen_helper_hsubpd, NULL, gen_helper_hsubps },
2861 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2862 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2863 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2864 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2865 [0xd0] = { NULL, gen_helper_addsubpd, NULL, gen_helper_addsubps },
2866 [0xd1] = MMX_OP2(psrlw),
2867 [0xd2] = MMX_OP2(psrld),
2868 [0xd3] = MMX_OP2(psrlq),
2869 [0xd4] = MMX_OP2(paddq),
2870 [0xd5] = MMX_OP2(pmullw),
2871 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2872 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2873 [0xd8] = MMX_OP2(psubusb),
2874 [0xd9] = MMX_OP2(psubusw),
2875 [0xda] = MMX_OP2(pminub),
2876 [0xdb] = MMX_OP2(pand),
2877 [0xdc] = MMX_OP2(paddusb),
2878 [0xdd] = MMX_OP2(paddusw),
2879 [0xde] = MMX_OP2(pmaxub),
2880 [0xdf] = MMX_OP2(pandn),
2881 [0xe0] = MMX_OP2(pavgb),
2882 [0xe1] = MMX_OP2(psraw),
2883 [0xe2] = MMX_OP2(psrad),
2884 [0xe3] = MMX_OP2(pavgw),
2885 [0xe4] = MMX_OP2(pmulhuw),
2886 [0xe5] = MMX_OP2(pmulhw),
2887 [0xe6] = { NULL, gen_helper_cvttpd2dq, gen_helper_cvtdq2pd, gen_helper_cvtpd2dq },
2888 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2889 [0xe8] = MMX_OP2(psubsb),
2890 [0xe9] = MMX_OP2(psubsw),
2891 [0xea] = MMX_OP2(pminsw),
2892 [0xeb] = MMX_OP2(por),
2893 [0xec] = MMX_OP2(paddsb),
2894 [0xed] = MMX_OP2(paddsw),
2895 [0xee] = MMX_OP2(pmaxsw),
2896 [0xef] = MMX_OP2(pxor),
2897 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2898 [0xf1] = MMX_OP2(psllw),
2899 [0xf2] = MMX_OP2(pslld),
2900 [0xf3] = MMX_OP2(psllq),
2901 [0xf4] = MMX_OP2(pmuludq),
2902 [0xf5] = MMX_OP2(pmaddwd),
2903 [0xf6] = MMX_OP2(psadbw),
2904 [0xf7] = MMX_OP2(maskmov),
2905 [0xf8] = MMX_OP2(psubb),
2906 [0xf9] = MMX_OP2(psubw),
2907 [0xfa] = MMX_OP2(psubl),
2908 [0xfb] = MMX_OP2(psubq),
2909 [0xfc] = MMX_OP2(paddb),
2910 [0xfd] = MMX_OP2(paddw),
2911 [0xfe] = MMX_OP2(paddl),
2914 static void *sse_op_table2[3 * 8][2] = {
2915 [0 + 2] = MMX_OP2(psrlw),
2916 [0 + 4] = MMX_OP2(psraw),
2917 [0 + 6] = MMX_OP2(psllw),
2918 [8 + 2] = MMX_OP2(psrld),
2919 [8 + 4] = MMX_OP2(psrad),
2920 [8 + 6] = MMX_OP2(pslld),
2921 [16 + 2] = MMX_OP2(psrlq),
2922 [16 + 3] = { NULL, gen_helper_psrldq_xmm },
2923 [16 + 6] = MMX_OP2(psllq),
2924 [16 + 7] = { NULL, gen_helper_pslldq_xmm },
2927 static void *sse_op_table3[4 * 3] = {
2928 gen_helper_cvtsi2ss,
2929 gen_helper_cvtsi2sd,
2930 X86_64_ONLY(gen_helper_cvtsq2ss),
2931 X86_64_ONLY(gen_helper_cvtsq2sd),
2933 gen_helper_cvttss2si,
2934 gen_helper_cvttsd2si,
2935 X86_64_ONLY(gen_helper_cvttss2sq),
2936 X86_64_ONLY(gen_helper_cvttsd2sq),
2938 gen_helper_cvtss2si,
2939 gen_helper_cvtsd2si,
2940 X86_64_ONLY(gen_helper_cvtss2sq),
2941 X86_64_ONLY(gen_helper_cvtsd2sq),
2944 static void *sse_op_table4[8][4] = {
2955 static void *sse_op_table5[256] = {
2956 [0x0c] = gen_helper_pi2fw,
2957 [0x0d] = gen_helper_pi2fd,
2958 [0x1c] = gen_helper_pf2iw,
2959 [0x1d] = gen_helper_pf2id,
2960 [0x8a] = gen_helper_pfnacc,
2961 [0x8e] = gen_helper_pfpnacc,
2962 [0x90] = gen_helper_pfcmpge,
2963 [0x94] = gen_helper_pfmin,
2964 [0x96] = gen_helper_pfrcp,
2965 [0x97] = gen_helper_pfrsqrt,
2966 [0x9a] = gen_helper_pfsub,
2967 [0x9e] = gen_helper_pfadd,
2968 [0xa0] = gen_helper_pfcmpgt,
2969 [0xa4] = gen_helper_pfmax,
2970 [0xa6] = gen_helper_movq, /* pfrcpit1; no need to actually increase precision */
2971 [0xa7] = gen_helper_movq, /* pfrsqit1 */
2972 [0xaa] = gen_helper_pfsubr,
2973 [0xae] = gen_helper_pfacc,
2974 [0xb0] = gen_helper_pfcmpeq,
2975 [0xb4] = gen_helper_pfmul,
2976 [0xb6] = gen_helper_movq, /* pfrcpit2 */
2977 [0xb7] = gen_helper_pmulhrw_mmx,
2978 [0xbb] = gen_helper_pswapd,
2979 [0xbf] = gen_helper_pavgb_mmx /* pavgusb */
2982 struct sse_op_helper_s {
2983 void *op[2]; uint32_t ext_mask;
2985 #define SSSE3_OP(x) { MMX_OP2(x), CPUID_EXT_SSSE3 }
2986 #define SSE41_OP(x) { { NULL, gen_helper_ ## x ## _xmm }, CPUID_EXT_SSE41 }
2987 #define SSE42_OP(x) { { NULL, gen_helper_ ## x ## _xmm }, CPUID_EXT_SSE42 }
2988 #define SSE41_SPECIAL { { NULL, SSE_SPECIAL }, CPUID_EXT_SSE41 }
2989 static struct sse_op_helper_s sse_op_table6[256] = {
2990 [0x00] = SSSE3_OP(pshufb),
2991 [0x01] = SSSE3_OP(phaddw),
2992 [0x02] = SSSE3_OP(phaddd),
2993 [0x03] = SSSE3_OP(phaddsw),
2994 [0x04] = SSSE3_OP(pmaddubsw),
2995 [0x05] = SSSE3_OP(phsubw),
2996 [0x06] = SSSE3_OP(phsubd),
2997 [0x07] = SSSE3_OP(phsubsw),
2998 [0x08] = SSSE3_OP(psignb),
2999 [0x09] = SSSE3_OP(psignw),
3000 [0x0a] = SSSE3_OP(psignd),
3001 [0x0b] = SSSE3_OP(pmulhrsw),
3002 [0x10] = SSE41_OP(pblendvb),
3003 [0x14] = SSE41_OP(blendvps),
3004 [0x15] = SSE41_OP(blendvpd),
3005 [0x17] = SSE41_OP(ptest),
3006 [0x1c] = SSSE3_OP(pabsb),
3007 [0x1d] = SSSE3_OP(pabsw),
3008 [0x1e] = SSSE3_OP(pabsd),
3009 [0x20] = SSE41_OP(pmovsxbw),
3010 [0x21] = SSE41_OP(pmovsxbd),
3011 [0x22] = SSE41_OP(pmovsxbq),
3012 [0x23] = SSE41_OP(pmovsxwd),
3013 [0x24] = SSE41_OP(pmovsxwq),
3014 [0x25] = SSE41_OP(pmovsxdq),
3015 [0x28] = SSE41_OP(pmuldq),
3016 [0x29] = SSE41_OP(pcmpeqq),
3017 [0x2a] = SSE41_SPECIAL, /* movntqda */
3018 [0x2b] = SSE41_OP(packusdw),
3019 [0x30] = SSE41_OP(pmovzxbw),
3020 [0x31] = SSE41_OP(pmovzxbd),
3021 [0x32] = SSE41_OP(pmovzxbq),
3022 [0x33] = SSE41_OP(pmovzxwd),
3023 [0x34] = SSE41_OP(pmovzxwq),
3024 [0x35] = SSE41_OP(pmovzxdq),
3025 [0x37] = SSE42_OP(pcmpgtq),
3026 [0x38] = SSE41_OP(pminsb),
3027 [0x39] = SSE41_OP(pminsd),
3028 [0x3a] = SSE41_OP(pminuw),
3029 [0x3b] = SSE41_OP(pminud),
3030 [0x3c] = SSE41_OP(pmaxsb),
3031 [0x3d] = SSE41_OP(pmaxsd),
3032 [0x3e] = SSE41_OP(pmaxuw),
3033 [0x3f] = SSE41_OP(pmaxud),
3034 [0x40] = SSE41_OP(pmulld),
3035 [0x41] = SSE41_OP(phminposuw),
3038 static struct sse_op_helper_s sse_op_table7[256] = {
3039 [0x08] = SSE41_OP(roundps),
3040 [0x09] = SSE41_OP(roundpd),
3041 [0x0a] = SSE41_OP(roundss),
3042 [0x0b] = SSE41_OP(roundsd),
3043 [0x0c] = SSE41_OP(blendps),
3044 [0x0d] = SSE41_OP(blendpd),
3045 [0x0e] = SSE41_OP(pblendw),
3046 [0x0f] = SSSE3_OP(palignr),
3047 [0x14] = SSE41_SPECIAL, /* pextrb */
3048 [0x15] = SSE41_SPECIAL, /* pextrw */
3049 [0x16] = SSE41_SPECIAL, /* pextrd/pextrq */
3050 [0x17] = SSE41_SPECIAL, /* extractps */
3051 [0x20] = SSE41_SPECIAL, /* pinsrb */
3052 [0x21] = SSE41_SPECIAL, /* insertps */
3053 [0x22] = SSE41_SPECIAL, /* pinsrd/pinsrq */
3054 [0x40] = SSE41_OP(dpps),
3055 [0x41] = SSE41_OP(dppd),
3056 [0x42] = SSE41_OP(mpsadbw),
3057 [0x60] = SSE42_OP(pcmpestrm),
3058 [0x61] = SSE42_OP(pcmpestri),
3059 [0x62] = SSE42_OP(pcmpistrm),
3060 [0x63] = SSE42_OP(pcmpistri),
3063 static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
3065 int b1, op1_offset, op2_offset, is_xmm, val, ot;
3066 int modrm, mod, rm, reg, reg_addr, offset_addr;
3070 if (s->prefix & PREFIX_DATA)
3072 else if (s->prefix & PREFIX_REPZ)
3074 else if (s->prefix & PREFIX_REPNZ)
3078 sse_op2 = sse_op_table1[b][b1];
3081 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
3091 /* simple MMX/SSE operation */
3092 if (s->flags & HF_TS_MASK) {
3093 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
3096 if (s->flags & HF_EM_MASK) {
3098 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
3101 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
3102 if ((b != 0x38 && b != 0x3a) || (s->prefix & PREFIX_DATA))
3105 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3116 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
3117 the static cpu state) */
3119 gen_helper_enter_mmx();
3122 modrm = ldub_code(s->pc++);
3123 reg = ((modrm >> 3) & 7);
3126 mod = (modrm >> 6) & 3;
3127 if (sse_op2 == SSE_SPECIAL) {
3130 case 0x0e7: /* movntq */
3133 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3134 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3136 case 0x1e7: /* movntdq */
3137 case 0x02b: /* movntps */
3138 case 0x12b: /* movntps */
3141 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3142 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3144 case 0x3f0: /* lddqu */
3147 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3148 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3150 case 0x22b: /* movntss */
3151 case 0x32b: /* movntsd */
3154 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3156 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,
3159 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
3160 xmm_regs[reg].XMM_L(0)));
3161 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3164 case 0x6e: /* movd mm, ea */
3165 #ifdef TARGET_X86_64
3166 if (s->dflag == 2) {
3167 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3168 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
3172 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3173 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3174 offsetof(CPUX86State,fpregs[reg].mmx));
3175 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3176 gen_helper_movl_mm_T0_mmx(cpu_ptr0, cpu_tmp2_i32);
3179 case 0x16e: /* movd xmm, ea */
3180 #ifdef TARGET_X86_64
3181 if (s->dflag == 2) {
3182 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3183 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3184 offsetof(CPUX86State,xmm_regs[reg]));
3185 gen_helper_movq_mm_T0_xmm(cpu_ptr0, cpu_T[0]);
3189 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3190 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3191 offsetof(CPUX86State,xmm_regs[reg]));
3192 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3193 gen_helper_movl_mm_T0_xmm(cpu_ptr0, cpu_tmp2_i32);
3196 case 0x6f: /* movq mm, ea */
3198 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3199 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3202 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3203 offsetof(CPUX86State,fpregs[rm].mmx));
3204 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3205 offsetof(CPUX86State,fpregs[reg].mmx));
3208 case 0x010: /* movups */
3209 case 0x110: /* movupd */
3210 case 0x028: /* movaps */
3211 case 0x128: /* movapd */
3212 case 0x16f: /* movdqa xmm, ea */
3213 case 0x26f: /* movdqu xmm, ea */
3215 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3216 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3218 rm = (modrm & 7) | REX_B(s);
3219 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
3220 offsetof(CPUX86State,xmm_regs[rm]));
3223 case 0x210: /* movss xmm, ea */
3225 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3226 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3227 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3229 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3230 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3231 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3233 rm = (modrm & 7) | REX_B(s);
3234 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3235 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3238 case 0x310: /* movsd xmm, ea */
3240 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3241 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3243 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3244 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3246 rm = (modrm & 7) | REX_B(s);
3247 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3248 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3251 case 0x012: /* movlps */
3252 case 0x112: /* movlpd */
3254 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3255 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3258 rm = (modrm & 7) | REX_B(s);
3259 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3260 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3263 case 0x212: /* movsldup */
3265 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3266 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3268 rm = (modrm & 7) | REX_B(s);
3269 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3270 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3271 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3272 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3274 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3275 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3276 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3277 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3279 case 0x312: /* movddup */
3281 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3282 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3284 rm = (modrm & 7) | REX_B(s);
3285 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3286 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3288 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3289 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3291 case 0x016: /* movhps */
3292 case 0x116: /* movhpd */
3294 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3295 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3298 rm = (modrm & 7) | REX_B(s);
3299 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3300 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3303 case 0x216: /* movshdup */
3305 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3306 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3308 rm = (modrm & 7) | REX_B(s);
3309 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3310 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3311 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3312 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3314 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3315 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3316 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3317 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3322 int bit_index, field_length;
3324 if (b1 == 1 && reg != 0)
3326 field_length = ldub_code(s->pc++) & 0x3F;
3327 bit_index = ldub_code(s->pc++) & 0x3F;
3328 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3329 offsetof(CPUX86State,xmm_regs[reg]));
3331 gen_helper_extrq_i(cpu_ptr0, tcg_const_i32(bit_index),
3332 tcg_const_i32(field_length));
3334 gen_helper_insertq_i(cpu_ptr0, tcg_const_i32(bit_index),
3335 tcg_const_i32(field_length));
3338 case 0x7e: /* movd ea, mm */
3339 #ifdef TARGET_X86_64
3340 if (s->dflag == 2) {
3341 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3342 offsetof(CPUX86State,fpregs[reg].mmx));
3343 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3347 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3348 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3349 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3352 case 0x17e: /* movd ea, xmm */
3353 #ifdef TARGET_X86_64
3354 if (s->dflag == 2) {
3355 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3356 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3357 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3361 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3362 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3363 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3366 case 0x27e: /* movq xmm, ea */
3368 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3369 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3371 rm = (modrm & 7) | REX_B(s);
3372 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3373 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3375 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3377 case 0x7f: /* movq ea, mm */
3379 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3380 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3383 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3384 offsetof(CPUX86State,fpregs[reg].mmx));
3387 case 0x011: /* movups */
3388 case 0x111: /* movupd */
3389 case 0x029: /* movaps */
3390 case 0x129: /* movapd */
3391 case 0x17f: /* movdqa ea, xmm */
3392 case 0x27f: /* movdqu ea, xmm */
3394 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3395 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3397 rm = (modrm & 7) | REX_B(s);
3398 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3399 offsetof(CPUX86State,xmm_regs[reg]));
3402 case 0x211: /* movss ea, xmm */
3404 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3405 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3406 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3408 rm = (modrm & 7) | REX_B(s);
3409 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3410 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3413 case 0x311: /* movsd ea, xmm */
3415 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3416 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3418 rm = (modrm & 7) | REX_B(s);
3419 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3420 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3423 case 0x013: /* movlps */
3424 case 0x113: /* movlpd */
3426 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3427 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3432 case 0x017: /* movhps */
3433 case 0x117: /* movhpd */
3435 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3436 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3441 case 0x71: /* shift mm, im */
3444 case 0x171: /* shift xmm, im */
3450 val = ldub_code(s->pc++);
3452 gen_op_movl_T0_im(val);
3453 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3455 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3456 op1_offset = offsetof(CPUX86State,xmm_t0);
3458 gen_op_movl_T0_im(val);
3459 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3461 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3462 op1_offset = offsetof(CPUX86State,mmx_t0);
3464 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3468 rm = (modrm & 7) | REX_B(s);
3469 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3472 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3474 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3475 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3476 ((void (*)(TCGv_ptr, TCGv_ptr))sse_op2)(cpu_ptr0, cpu_ptr1);
3478 case 0x050: /* movmskps */
3479 rm = (modrm & 7) | REX_B(s);
3480 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3481 offsetof(CPUX86State,xmm_regs[rm]));
3482 gen_helper_movmskps(cpu_tmp2_i32, cpu_ptr0);
3483 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3484 gen_op_mov_reg_T0(OT_LONG, reg);
3486 case 0x150: /* movmskpd */
3487 rm = (modrm & 7) | REX_B(s);
3488 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3489 offsetof(CPUX86State,xmm_regs[rm]));
3490 gen_helper_movmskpd(cpu_tmp2_i32, cpu_ptr0);
3491 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3492 gen_op_mov_reg_T0(OT_LONG, reg);
3494 case 0x02a: /* cvtpi2ps */
3495 case 0x12a: /* cvtpi2pd */
3496 gen_helper_enter_mmx();
3498 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3499 op2_offset = offsetof(CPUX86State,mmx_t0);
3500 gen_ldq_env_A0(s->mem_index, op2_offset);
3503 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3505 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3506 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3507 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3510 gen_helper_cvtpi2ps(cpu_ptr0, cpu_ptr1);
3514 gen_helper_cvtpi2pd(cpu_ptr0, cpu_ptr1);
3518 case 0x22a: /* cvtsi2ss */
3519 case 0x32a: /* cvtsi2sd */
3520 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3521 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3522 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3523 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3524 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3525 if (ot == OT_LONG) {
3526 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3527 ((void (*)(TCGv_ptr, TCGv_i32))sse_op2)(cpu_ptr0, cpu_tmp2_i32);
3529 ((void (*)(TCGv_ptr, TCGv))sse_op2)(cpu_ptr0, cpu_T[0]);
3532 case 0x02c: /* cvttps2pi */
3533 case 0x12c: /* cvttpd2pi */
3534 case 0x02d: /* cvtps2pi */
3535 case 0x12d: /* cvtpd2pi */
3536 gen_helper_enter_mmx();
3538 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3539 op2_offset = offsetof(CPUX86State,xmm_t0);
3540 gen_ldo_env_A0(s->mem_index, op2_offset);
3542 rm = (modrm & 7) | REX_B(s);
3543 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3545 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3546 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3547 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3550 gen_helper_cvttps2pi(cpu_ptr0, cpu_ptr1);
3553 gen_helper_cvttpd2pi(cpu_ptr0, cpu_ptr1);
3556 gen_helper_cvtps2pi(cpu_ptr0, cpu_ptr1);
3559 gen_helper_cvtpd2pi(cpu_ptr0, cpu_ptr1);
3563 case 0x22c: /* cvttss2si */
3564 case 0x32c: /* cvttsd2si */
3565 case 0x22d: /* cvtss2si */
3566 case 0x32d: /* cvtsd2si */
3567 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3569 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3571 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3573 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3574 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3576 op2_offset = offsetof(CPUX86State,xmm_t0);
3578 rm = (modrm & 7) | REX_B(s);
3579 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3581 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3583 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3584 if (ot == OT_LONG) {
3585 ((void (*)(TCGv_i32, TCGv_ptr))sse_op2)(cpu_tmp2_i32, cpu_ptr0);
3586 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3588 ((void (*)(TCGv, TCGv_ptr))sse_op2)(cpu_T[0], cpu_ptr0);
3590 gen_op_mov_reg_T0(ot, reg);
3592 case 0xc4: /* pinsrw */
3595 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3596 val = ldub_code(s->pc++);
3599 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3600 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
3603 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3604 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
3607 case 0xc5: /* pextrw */
3611 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3612 val = ldub_code(s->pc++);
3615 rm = (modrm & 7) | REX_B(s);
3616 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3617 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
3621 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3622 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
3624 reg = ((modrm >> 3) & 7) | rex_r;
3625 gen_op_mov_reg_T0(ot, reg);
3627 case 0x1d6: /* movq ea, xmm */
3629 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3630 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3632 rm = (modrm & 7) | REX_B(s);
3633 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3634 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3635 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3638 case 0x2d6: /* movq2dq */
3639 gen_helper_enter_mmx();
3641 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3642 offsetof(CPUX86State,fpregs[rm].mmx));
3643 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3645 case 0x3d6: /* movdq2q */
3646 gen_helper_enter_mmx();
3647 rm = (modrm & 7) | REX_B(s);
3648 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3649 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3651 case 0xd7: /* pmovmskb */
3656 rm = (modrm & 7) | REX_B(s);
3657 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
3658 gen_helper_pmovmskb_xmm(cpu_tmp2_i32, cpu_ptr0);
3661 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
3662 gen_helper_pmovmskb_mmx(cpu_tmp2_i32, cpu_ptr0);
3664 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3665 reg = ((modrm >> 3) & 7) | rex_r;
3666 gen_op_mov_reg_T0(OT_LONG, reg);
3669 if (s->prefix & PREFIX_REPNZ)
3673 modrm = ldub_code(s->pc++);
3675 reg = ((modrm >> 3) & 7) | rex_r;
3676 mod = (modrm >> 6) & 3;
3681 sse_op2 = sse_op_table6[b].op[b1];
3684 if (!(s->cpuid_ext_features & sse_op_table6[b].ext_mask))
3688 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3690 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
3692 op2_offset = offsetof(CPUX86State,xmm_t0);
3693 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3695 case 0x20: case 0x30: /* pmovsxbw, pmovzxbw */
3696 case 0x23: case 0x33: /* pmovsxwd, pmovzxwd */
3697 case 0x25: case 0x35: /* pmovsxdq, pmovzxdq */
3698 gen_ldq_env_A0(s->mem_index, op2_offset +
3699 offsetof(XMMReg, XMM_Q(0)));
3701 case 0x21: case 0x31: /* pmovsxbd, pmovzxbd */
3702 case 0x24: case 0x34: /* pmovsxwq, pmovzxwq */
3703 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_A0,
3704 (s->mem_index >> 2) - 1);
3705 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp0);
3706 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, op2_offset +
3707 offsetof(XMMReg, XMM_L(0)));
3709 case 0x22: case 0x32: /* pmovsxbq, pmovzxbq */
3710 tcg_gen_qemu_ld16u(cpu_tmp0, cpu_A0,
3711 (s->mem_index >> 2) - 1);
3712 tcg_gen_st16_tl(cpu_tmp0, cpu_env, op2_offset +
3713 offsetof(XMMReg, XMM_W(0)));
3715 case 0x2a: /* movntqda */
3716 gen_ldo_env_A0(s->mem_index, op1_offset);
3719 gen_ldo_env_A0(s->mem_index, op2_offset);
3723 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3725 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3727 op2_offset = offsetof(CPUX86State,mmx_t0);
3728 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3729 gen_ldq_env_A0(s->mem_index, op2_offset);
3732 if (sse_op2 == SSE_SPECIAL)
3735 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3736 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3737 ((void (*)(TCGv_ptr, TCGv_ptr))sse_op2)(cpu_ptr0, cpu_ptr1);
3740 s->cc_op = CC_OP_EFLAGS;
3742 case 0x338: /* crc32 */
3745 modrm = ldub_code(s->pc++);
3746 reg = ((modrm >> 3) & 7) | rex_r;
3748 if (b != 0xf0 && b != 0xf1)
3750 if (!(s->cpuid_ext_features & CPUID_EXT_SSE42))
3755 else if (b == 0xf1 && s->dflag != 2)
3756 if (s->prefix & PREFIX_DATA)
3763 gen_op_mov_TN_reg(OT_LONG, 0, reg);
3764 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3765 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3766 gen_helper_crc32(cpu_T[0], cpu_tmp2_i32,
3767 cpu_T[0], tcg_const_i32(8 << ot));
3769 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3770 gen_op_mov_reg_T0(ot, reg);
3775 modrm = ldub_code(s->pc++);
3777 reg = ((modrm >> 3) & 7) | rex_r;
3778 mod = (modrm >> 6) & 3;
3783 sse_op2 = sse_op_table7[b].op[b1];
3786 if (!(s->cpuid_ext_features & sse_op_table7[b].ext_mask))
3789 if (sse_op2 == SSE_SPECIAL) {
3790 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3791 rm = (modrm & 7) | REX_B(s);
3793 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3794 reg = ((modrm >> 3) & 7) | rex_r;
3795 val = ldub_code(s->pc++);
3797 case 0x14: /* pextrb */
3798 tcg_gen_ld8u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
3799 xmm_regs[reg].XMM_B(val & 15)));
3801 gen_op_mov_reg_T0(ot, rm);
3803 tcg_gen_qemu_st8(cpu_T[0], cpu_A0,
3804 (s->mem_index >> 2) - 1);
3806 case 0x15: /* pextrw */
3807 tcg_gen_ld16u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
3808 xmm_regs[reg].XMM_W(val & 7)));
3810 gen_op_mov_reg_T0(ot, rm);
3812 tcg_gen_qemu_st16(cpu_T[0], cpu_A0,
3813 (s->mem_index >> 2) - 1);
3816 if (ot == OT_LONG) { /* pextrd */
3817 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
3818 offsetof(CPUX86State,
3819 xmm_regs[reg].XMM_L(val & 3)));
3820 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3822 gen_op_mov_reg_v(ot, rm, cpu_T[0]);
3824 tcg_gen_qemu_st32(cpu_T[0], cpu_A0,
3825 (s->mem_index >> 2) - 1);
3826 } else { /* pextrq */
3827 #ifdef TARGET_X86_64
3828 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3829 offsetof(CPUX86State,
3830 xmm_regs[reg].XMM_Q(val & 1)));
3832 gen_op_mov_reg_v(ot, rm, cpu_tmp1_i64);
3834 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
3835 (s->mem_index >> 2) - 1);
3841 case 0x17: /* extractps */
3842 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
3843 xmm_regs[reg].XMM_L(val & 3)));
3845 gen_op_mov_reg_T0(ot, rm);
3847 tcg_gen_qemu_st32(cpu_T[0], cpu_A0,
3848 (s->mem_index >> 2) - 1);
3850 case 0x20: /* pinsrb */
3852 gen_op_mov_TN_reg(OT_LONG, 0, rm);
3854 tcg_gen_qemu_ld8u(cpu_tmp0, cpu_A0,
3855 (s->mem_index >> 2) - 1);
3856 tcg_gen_st8_tl(cpu_tmp0, cpu_env, offsetof(CPUX86State,
3857 xmm_regs[reg].XMM_B(val & 15)));
3859 case 0x21: /* insertps */
3861 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
3862 offsetof(CPUX86State,xmm_regs[rm]
3863 .XMM_L((val >> 6) & 3)));
3865 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_A0,
3866 (s->mem_index >> 2) - 1);
3867 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp0);
3869 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
3870 offsetof(CPUX86State,xmm_regs[reg]
3871 .XMM_L((val >> 4) & 3)));
3873 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
3874 cpu_env, offsetof(CPUX86State,
3875 xmm_regs[reg].XMM_L(0)));
3877 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
3878 cpu_env, offsetof(CPUX86State,
3879 xmm_regs[reg].XMM_L(1)));
3881 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
3882 cpu_env, offsetof(CPUX86State,
3883 xmm_regs[reg].XMM_L(2)));
3885 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
3886 cpu_env, offsetof(CPUX86State,
3887 xmm_regs[reg].XMM_L(3)));
3890 if (ot == OT_LONG) { /* pinsrd */
3892 gen_op_mov_v_reg(ot, cpu_tmp0, rm);
3894 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_A0,
3895 (s->mem_index >> 2) - 1);
3896 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp0);
3897 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
3898 offsetof(CPUX86State,
3899 xmm_regs[reg].XMM_L(val & 3)));
3900 } else { /* pinsrq */
3901 #ifdef TARGET_X86_64
3903 gen_op_mov_v_reg(ot, cpu_tmp1_i64, rm);
3905 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
3906 (s->mem_index >> 2) - 1);
3907 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3908 offsetof(CPUX86State,
3909 xmm_regs[reg].XMM_Q(val & 1)));
3920 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3922 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
3924 op2_offset = offsetof(CPUX86State,xmm_t0);
3925 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3926 gen_ldo_env_A0(s->mem_index, op2_offset);
3929 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3931 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3933 op2_offset = offsetof(CPUX86State,mmx_t0);
3934 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3935 gen_ldq_env_A0(s->mem_index, op2_offset);
3938 val = ldub_code(s->pc++);
3940 if ((b & 0xfc) == 0x60) { /* pcmpXstrX */
3941 s->cc_op = CC_OP_EFLAGS;
3944 /* The helper must use entire 64-bit gp registers */
3948 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3949 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3950 ((void (*)(TCGv_ptr, TCGv_ptr, TCGv_i32))sse_op2)(cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
3956 /* generic MMX or SSE operation */
3958 case 0x70: /* pshufx insn */
3959 case 0xc6: /* pshufx insn */
3960 case 0xc2: /* compare insns */
3967 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3969 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3970 op2_offset = offsetof(CPUX86State,xmm_t0);
3971 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3973 /* specific case for SSE single instructions */
3976 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3977 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3980 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3983 gen_ldo_env_A0(s->mem_index, op2_offset);
3986 rm = (modrm & 7) | REX_B(s);
3987 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3990 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3992 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3993 op2_offset = offsetof(CPUX86State,mmx_t0);
3994 gen_ldq_env_A0(s->mem_index, op2_offset);
3997 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4001 case 0x0f: /* 3DNow! data insns */
4002 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
4004 val = ldub_code(s->pc++);
4005 sse_op2 = sse_op_table5[val];
4008 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4009 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4010 ((void (*)(TCGv_ptr, TCGv_ptr))sse_op2)(cpu_ptr0, cpu_ptr1);
4012 case 0x70: /* pshufx insn */
4013 case 0xc6: /* pshufx insn */
4014 val = ldub_code(s->pc++);
4015 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4016 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4017 ((void (*)(TCGv_ptr, TCGv_ptr, TCGv_i32))sse_op2)(cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4021 val = ldub_code(s->pc++);
4024 sse_op2 = sse_op_table4[val][b1];
4025 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4026 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4027 ((void (*)(TCGv_ptr, TCGv_ptr))sse_op2)(cpu_ptr0, cpu_ptr1);
4030 /* maskmov : we must prepare A0 */
4033 #ifdef TARGET_X86_64
4034 if (s->aflag == 2) {
4035 gen_op_movq_A0_reg(R_EDI);
4039 gen_op_movl_A0_reg(R_EDI);
4041 gen_op_andl_A0_ffff();
4043 gen_add_A0_ds_seg(s);
4045 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4046 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4047 ((void (*)(TCGv_ptr, TCGv_ptr, TCGv))sse_op2)(cpu_ptr0, cpu_ptr1, cpu_A0);
4050 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4051 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4052 ((void (*)(TCGv_ptr, TCGv_ptr))sse_op2)(cpu_ptr0, cpu_ptr1);
4055 if (b == 0x2e || b == 0x2f) {
4056 s->cc_op = CC_OP_EFLAGS;
4061 /* convert one instruction. s->is_jmp is set if the translation must
4062 be stopped. Return the next pc value */
4063 static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
4065 int b, prefixes, aflag, dflag;
4067 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
4068 target_ulong next_eip, tval;
4071 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
4072 tcg_gen_debug_insn_start(pc_start);
4080 #ifdef TARGET_X86_64
4085 s->rip_offset = 0; /* for relative ip address */
4087 b = ldub_code(s->pc);
4089 /* check prefixes */
4090 #ifdef TARGET_X86_64
4094 prefixes |= PREFIX_REPZ;
4097 prefixes |= PREFIX_REPNZ;
4100 prefixes |= PREFIX_LOCK;
4121 prefixes |= PREFIX_DATA;
4124 prefixes |= PREFIX_ADR;
4128 rex_w = (b >> 3) & 1;
4129 rex_r = (b & 0x4) << 1;
4130 s->rex_x = (b & 0x2) << 2;
4131 REX_B(s) = (b & 0x1) << 3;
4132 x86_64_hregs = 1; /* select uniform byte register addressing */
4136 /* 0x66 is ignored if rex.w is set */
4139 if (prefixes & PREFIX_DATA)
4142 if (!(prefixes & PREFIX_ADR))
4149 prefixes |= PREFIX_REPZ;
4152 prefixes |= PREFIX_REPNZ;
4155 prefixes |= PREFIX_LOCK;
4176 prefixes |= PREFIX_DATA;
4179 prefixes |= PREFIX_ADR;
4182 if (prefixes & PREFIX_DATA)
4184 if (prefixes & PREFIX_ADR)
4188 s->prefix = prefixes;
4192 /* lock generation */
4193 if (prefixes & PREFIX_LOCK)
4196 /* now check op code */
4200 /**************************/
4201 /* extended op code */
4202 b = ldub_code(s->pc++) | 0x100;
4205 /**************************/
4223 ot = dflag + OT_WORD;
4226 case 0: /* OP Ev, Gv */
4227 modrm = ldub_code(s->pc++);
4228 reg = ((modrm >> 3) & 7) | rex_r;
4229 mod = (modrm >> 6) & 3;
4230 rm = (modrm & 7) | REX_B(s);
4232 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4234 } else if (op == OP_XORL && rm == reg) {
4236 /* xor reg, reg optimisation */
4238 s->cc_op = CC_OP_LOGICB + ot;
4239 gen_op_mov_reg_T0(ot, reg);
4240 gen_op_update1_cc();
4245 gen_op_mov_TN_reg(ot, 1, reg);
4246 gen_op(s, op, ot, opreg);
4248 case 1: /* OP Gv, Ev */
4249 modrm = ldub_code(s->pc++);
4250 mod = (modrm >> 6) & 3;
4251 reg = ((modrm >> 3) & 7) | rex_r;
4252 rm = (modrm & 7) | REX_B(s);
4254 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4255 gen_op_ld_T1_A0(ot + s->mem_index);
4256 } else if (op == OP_XORL && rm == reg) {
4259 gen_op_mov_TN_reg(ot, 1, rm);
4261 gen_op(s, op, ot, reg);
4263 case 2: /* OP A, Iv */
4264 val = insn_get(s, ot);
4265 gen_op_movl_T1_im(val);
4266 gen_op(s, op, ot, OR_EAX);
4275 case 0x80: /* GRP1 */
4284 ot = dflag + OT_WORD;
4286 modrm = ldub_code(s->pc++);
4287 mod = (modrm >> 6) & 3;
4288 rm = (modrm & 7) | REX_B(s);
4289 op = (modrm >> 3) & 7;
4295 s->rip_offset = insn_const_size(ot);
4296 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4307 val = insn_get(s, ot);
4310 val = (int8_t)insn_get(s, OT_BYTE);
4313 gen_op_movl_T1_im(val);
4314 gen_op(s, op, ot, opreg);
4318 /**************************/
4319 /* inc, dec, and other misc arith */
4320 case 0x40 ... 0x47: /* inc Gv */
4321 ot = dflag ? OT_LONG : OT_WORD;
4322 gen_inc(s, ot, OR_EAX + (b & 7), 1);
4324 case 0x48 ... 0x4f: /* dec Gv */
4325 ot = dflag ? OT_LONG : OT_WORD;
4326 gen_inc(s, ot, OR_EAX + (b & 7), -1);
4328 case 0xf6: /* GRP3 */
4333 ot = dflag + OT_WORD;
4335 modrm = ldub_code(s->pc++);
4336 mod = (modrm >> 6) & 3;
4337 rm = (modrm & 7) | REX_B(s);
4338 op = (modrm >> 3) & 7;
4341 s->rip_offset = insn_const_size(ot);
4342 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4343 gen_op_ld_T0_A0(ot + s->mem_index);
4345 gen_op_mov_TN_reg(ot, 0, rm);
4350 val = insn_get(s, ot);
4351 gen_op_movl_T1_im(val);
4352 gen_op_testl_T0_T1_cc();
4353 s->cc_op = CC_OP_LOGICB + ot;
4356 tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
4358 gen_op_st_T0_A0(ot + s->mem_index);
4360 gen_op_mov_reg_T0(ot, rm);
4364 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
4366 gen_op_st_T0_A0(ot + s->mem_index);
4368 gen_op_mov_reg_T0(ot, rm);
4370 gen_op_update_neg_cc();
4371 s->cc_op = CC_OP_SUBB + ot;
4376 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
4377 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4378 tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
4379 /* XXX: use 32 bit mul which could be faster */
4380 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4381 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4382 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4383 tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
4384 s->cc_op = CC_OP_MULB;
4387 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
4388 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
4389 tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
4390 /* XXX: use 32 bit mul which could be faster */
4391 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4392 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4393 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4394 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
4395 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4396 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4397 s->cc_op = CC_OP_MULW;
4401 #ifdef TARGET_X86_64
4402 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4403 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
4404 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
4405 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4406 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4407 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4408 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
4409 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4410 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4414 t0 = tcg_temp_new_i64();
4415 t1 = tcg_temp_new_i64();
4416 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4417 tcg_gen_extu_i32_i64(t0, cpu_T[0]);
4418 tcg_gen_extu_i32_i64(t1, cpu_T[1]);
4419 tcg_gen_mul_i64(t0, t0, t1);
4420 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4421 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4422 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4423 tcg_gen_shri_i64(t0, t0, 32);
4424 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4425 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4426 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4429 s->cc_op = CC_OP_MULL;
4431 #ifdef TARGET_X86_64
4433 gen_helper_mulq_EAX_T0(cpu_T[0]);
4434 s->cc_op = CC_OP_MULQ;
4442 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
4443 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4444 tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
4445 /* XXX: use 32 bit mul which could be faster */
4446 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4447 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4448 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4449 tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
4450 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4451 s->cc_op = CC_OP_MULB;
4454 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
4455 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4456 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
4457 /* XXX: use 32 bit mul which could be faster */
4458 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4459 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4460 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4461 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
4462 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4463 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
4464 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4465 s->cc_op = CC_OP_MULW;
4469 #ifdef TARGET_X86_64
4470 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4471 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4472 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
4473 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4474 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4475 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4476 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
4477 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4478 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
4479 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4483 t0 = tcg_temp_new_i64();
4484 t1 = tcg_temp_new_i64();
4485 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4486 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
4487 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
4488 tcg_gen_mul_i64(t0, t0, t1);
4489 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4490 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4491 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4492 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
4493 tcg_gen_shri_i64(t0, t0, 32);
4494 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4495 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4496 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4499 s->cc_op = CC_OP_MULL;
4501 #ifdef TARGET_X86_64
4503 gen_helper_imulq_EAX_T0(cpu_T[0]);
4504 s->cc_op = CC_OP_MULQ;
4512 gen_jmp_im(pc_start - s->cs_base);
4513 gen_helper_divb_AL(cpu_T[0]);
4516 gen_jmp_im(pc_start - s->cs_base);
4517 gen_helper_divw_AX(cpu_T[0]);
4521 gen_jmp_im(pc_start - s->cs_base);
4522 gen_helper_divl_EAX(cpu_T[0]);
4524 #ifdef TARGET_X86_64
4526 gen_jmp_im(pc_start - s->cs_base);
4527 gen_helper_divq_EAX(cpu_T[0]);
4535 gen_jmp_im(pc_start - s->cs_base);
4536 gen_helper_idivb_AL(cpu_T[0]);
4539 gen_jmp_im(pc_start - s->cs_base);
4540 gen_helper_idivw_AX(cpu_T[0]);
4544 gen_jmp_im(pc_start - s->cs_base);
4545 gen_helper_idivl_EAX(cpu_T[0]);
4547 #ifdef TARGET_X86_64
4549 gen_jmp_im(pc_start - s->cs_base);
4550 gen_helper_idivq_EAX(cpu_T[0]);
4560 case 0xfe: /* GRP4 */
4561 case 0xff: /* GRP5 */
4565 ot = dflag + OT_WORD;
4567 modrm = ldub_code(s->pc++);
4568 mod = (modrm >> 6) & 3;
4569 rm = (modrm & 7) | REX_B(s);
4570 op = (modrm >> 3) & 7;
4571 if (op >= 2 && b == 0xfe) {
4575 if (op == 2 || op == 4) {
4576 /* operand size for jumps is 64 bit */
4578 } else if (op == 3 || op == 5) {
4579 ot = dflag ? OT_LONG + (rex_w == 1) : OT_WORD;
4580 } else if (op == 6) {
4581 /* default push size is 64 bit */
4582 ot = dflag ? OT_QUAD : OT_WORD;
4586 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4587 if (op >= 2 && op != 3 && op != 5)
4588 gen_op_ld_T0_A0(ot + s->mem_index);
4590 gen_op_mov_TN_reg(ot, 0, rm);
4594 case 0: /* inc Ev */
4599 gen_inc(s, ot, opreg, 1);
4601 case 1: /* dec Ev */
4606 gen_inc(s, ot, opreg, -1);
4608 case 2: /* call Ev */
4609 /* XXX: optimize if memory (no 'and' is necessary) */
4611 gen_op_andl_T0_ffff();
4612 next_eip = s->pc - s->cs_base;
4613 gen_movtl_T1_im(next_eip);
4618 case 3: /* lcall Ev */
4619 gen_op_ld_T1_A0(ot + s->mem_index);
4620 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4621 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4623 if (s->pe && !s->vm86) {
4624 if (s->cc_op != CC_OP_DYNAMIC)
4625 gen_op_set_cc_op(s->cc_op);
4626 gen_jmp_im(pc_start - s->cs_base);
4627 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4628 gen_helper_lcall_protected(cpu_tmp2_i32, cpu_T[1],
4629 tcg_const_i32(dflag),
4630 tcg_const_i32(s->pc - pc_start));
4632 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4633 gen_helper_lcall_real(cpu_tmp2_i32, cpu_T[1],
4634 tcg_const_i32(dflag),
4635 tcg_const_i32(s->pc - s->cs_base));
4639 case 4: /* jmp Ev */
4641 gen_op_andl_T0_ffff();
4645 case 5: /* ljmp Ev */
4646 gen_op_ld_T1_A0(ot + s->mem_index);
4647 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4648 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4650 if (s->pe && !s->vm86) {
4651 if (s->cc_op != CC_OP_DYNAMIC)
4652 gen_op_set_cc_op(s->cc_op);
4653 gen_jmp_im(pc_start - s->cs_base);
4654 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4655 gen_helper_ljmp_protected(cpu_tmp2_i32, cpu_T[1],
4656 tcg_const_i32(s->pc - pc_start));
4658 gen_op_movl_seg_T0_vm(R_CS);
4659 gen_op_movl_T0_T1();
4664 case 6: /* push Ev */
4672 case 0x84: /* test Ev, Gv */
4677 ot = dflag + OT_WORD;
4679 modrm = ldub_code(s->pc++);
4680 reg = ((modrm >> 3) & 7) | rex_r;
4682 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4683 gen_op_mov_TN_reg(ot, 1, reg);
4684 gen_op_testl_T0_T1_cc();
4685 s->cc_op = CC_OP_LOGICB + ot;
4688 case 0xa8: /* test eAX, Iv */
4693 ot = dflag + OT_WORD;
4694 val = insn_get(s, ot);
4696 gen_op_mov_TN_reg(ot, 0, OR_EAX);
4697 gen_op_movl_T1_im(val);
4698 gen_op_testl_T0_T1_cc();
4699 s->cc_op = CC_OP_LOGICB + ot;
4702 case 0x98: /* CWDE/CBW */
4703 #ifdef TARGET_X86_64
4705 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4706 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4707 gen_op_mov_reg_T0(OT_QUAD, R_EAX);
4711 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4712 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4713 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4715 gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
4716 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4717 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4720 case 0x99: /* CDQ/CWD */
4721 #ifdef TARGET_X86_64
4723 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
4724 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
4725 gen_op_mov_reg_T0(OT_QUAD, R_EDX);
4729 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4730 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4731 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
4732 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4734 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4735 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4736 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
4737 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4740 case 0x1af: /* imul Gv, Ev */
4741 case 0x69: /* imul Gv, Ev, I */
4743 ot = dflag + OT_WORD;
4744 modrm = ldub_code(s->pc++);
4745 reg = ((modrm >> 3) & 7) | rex_r;
4747 s->rip_offset = insn_const_size(ot);
4750 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4752 val = insn_get(s, ot);
4753 gen_op_movl_T1_im(val);
4754 } else if (b == 0x6b) {
4755 val = (int8_t)insn_get(s, OT_BYTE);
4756 gen_op_movl_T1_im(val);
4758 gen_op_mov_TN_reg(ot, 1, reg);
4761 #ifdef TARGET_X86_64
4762 if (ot == OT_QUAD) {
4763 gen_helper_imulq_T0_T1(cpu_T[0], cpu_T[0], cpu_T[1]);
4766 if (ot == OT_LONG) {
4767 #ifdef TARGET_X86_64
4768 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4769 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
4770 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4771 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4772 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
4773 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4777 t0 = tcg_temp_new_i64();
4778 t1 = tcg_temp_new_i64();
4779 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
4780 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
4781 tcg_gen_mul_i64(t0, t0, t1);
4782 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4783 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4784 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
4785 tcg_gen_shri_i64(t0, t0, 32);
4786 tcg_gen_trunc_i64_i32(cpu_T[1], t0);
4787 tcg_gen_sub_tl(cpu_cc_src, cpu_T[1], cpu_tmp0);
4791 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4792 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
4793 /* XXX: use 32 bit mul which could be faster */
4794 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4795 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4796 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
4797 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4799 gen_op_mov_reg_T0(ot, reg);
4800 s->cc_op = CC_OP_MULB + ot;
4803 case 0x1c1: /* xadd Ev, Gv */
4807 ot = dflag + OT_WORD;
4808 modrm = ldub_code(s->pc++);
4809 reg = ((modrm >> 3) & 7) | rex_r;
4810 mod = (modrm >> 6) & 3;
4812 rm = (modrm & 7) | REX_B(s);
4813 gen_op_mov_TN_reg(ot, 0, reg);
4814 gen_op_mov_TN_reg(ot, 1, rm);
4815 gen_op_addl_T0_T1();
4816 gen_op_mov_reg_T1(ot, reg);
4817 gen_op_mov_reg_T0(ot, rm);
4819 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4820 gen_op_mov_TN_reg(ot, 0, reg);
4821 gen_op_ld_T1_A0(ot + s->mem_index);
4822 gen_op_addl_T0_T1();
4823 gen_op_st_T0_A0(ot + s->mem_index);
4824 gen_op_mov_reg_T1(ot, reg);
4826 gen_op_update2_cc();
4827 s->cc_op = CC_OP_ADDB + ot;
4830 case 0x1b1: /* cmpxchg Ev, Gv */
4833 TCGv t0, t1, t2, a0;
4838 ot = dflag + OT_WORD;
4839 modrm = ldub_code(s->pc++);
4840 reg = ((modrm >> 3) & 7) | rex_r;
4841 mod = (modrm >> 6) & 3;
4842 t0 = tcg_temp_local_new();
4843 t1 = tcg_temp_local_new();
4844 t2 = tcg_temp_local_new();
4845 a0 = tcg_temp_local_new();
4846 gen_op_mov_v_reg(ot, t1, reg);
4848 rm = (modrm & 7) | REX_B(s);
4849 gen_op_mov_v_reg(ot, t0, rm);
4851 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4852 tcg_gen_mov_tl(a0, cpu_A0);
4853 gen_op_ld_v(ot + s->mem_index, t0, a0);
4854 rm = 0; /* avoid warning */
4856 label1 = gen_new_label();
4857 tcg_gen_sub_tl(t2, cpu_regs[R_EAX], t0);
4859 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
4861 label2 = gen_new_label();
4862 gen_op_mov_reg_v(ot, R_EAX, t0);
4864 gen_set_label(label1);
4865 gen_op_mov_reg_v(ot, rm, t1);
4866 gen_set_label(label2);
4868 tcg_gen_mov_tl(t1, t0);
4869 gen_op_mov_reg_v(ot, R_EAX, t0);
4870 gen_set_label(label1);
4872 gen_op_st_v(ot + s->mem_index, t1, a0);
4874 tcg_gen_mov_tl(cpu_cc_src, t0);
4875 tcg_gen_mov_tl(cpu_cc_dst, t2);
4876 s->cc_op = CC_OP_SUBB + ot;
4883 case 0x1c7: /* cmpxchg8b */
4884 modrm = ldub_code(s->pc++);
4885 mod = (modrm >> 6) & 3;
4886 if ((mod == 3) || ((modrm & 0x38) != 0x8))
4888 #ifdef TARGET_X86_64
4890 if (!(s->cpuid_ext_features & CPUID_EXT_CX16))
4892 gen_jmp_im(pc_start - s->cs_base);
4893 if (s->cc_op != CC_OP_DYNAMIC)
4894 gen_op_set_cc_op(s->cc_op);
4895 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4896 gen_helper_cmpxchg16b(cpu_A0);
4900 if (!(s->cpuid_features & CPUID_CX8))
4902 gen_jmp_im(pc_start - s->cs_base);
4903 if (s->cc_op != CC_OP_DYNAMIC)
4904 gen_op_set_cc_op(s->cc_op);
4905 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4906 gen_helper_cmpxchg8b(cpu_A0);
4908 s->cc_op = CC_OP_EFLAGS;
4911 /**************************/
4913 case 0x50 ... 0x57: /* push */
4914 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
4917 case 0x58 ... 0x5f: /* pop */
4919 ot = dflag ? OT_QUAD : OT_WORD;
4921 ot = dflag + OT_WORD;
4924 /* NOTE: order is important for pop %sp */
4926 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
4928 case 0x60: /* pusha */
4933 case 0x61: /* popa */
4938 case 0x68: /* push Iv */
4941 ot = dflag ? OT_QUAD : OT_WORD;
4943 ot = dflag + OT_WORD;
4946 val = insn_get(s, ot);
4948 val = (int8_t)insn_get(s, OT_BYTE);
4949 gen_op_movl_T0_im(val);
4952 case 0x8f: /* pop Ev */
4954 ot = dflag ? OT_QUAD : OT_WORD;
4956 ot = dflag + OT_WORD;
4958 modrm = ldub_code(s->pc++);
4959 mod = (modrm >> 6) & 3;
4962 /* NOTE: order is important for pop %sp */
4964 rm = (modrm & 7) | REX_B(s);
4965 gen_op_mov_reg_T0(ot, rm);
4967 /* NOTE: order is important too for MMU exceptions */
4968 s->popl_esp_hack = 1 << ot;
4969 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4970 s->popl_esp_hack = 0;
4974 case 0xc8: /* enter */
4977 val = lduw_code(s->pc);
4979 level = ldub_code(s->pc++);
4980 gen_enter(s, val, level);
4983 case 0xc9: /* leave */
4984 /* XXX: exception not precise (ESP is updated before potential exception) */
4986 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
4987 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
4988 } else if (s->ss32) {
4989 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
4990 gen_op_mov_reg_T0(OT_LONG, R_ESP);
4992 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
4993 gen_op_mov_reg_T0(OT_WORD, R_ESP);
4997 ot = dflag ? OT_QUAD : OT_WORD;
4999 ot = dflag + OT_WORD;
5001 gen_op_mov_reg_T0(ot, R_EBP);
5004 case 0x06: /* push es */
5005 case 0x0e: /* push cs */
5006 case 0x16: /* push ss */
5007 case 0x1e: /* push ds */
5010 gen_op_movl_T0_seg(b >> 3);
5013 case 0x1a0: /* push fs */
5014 case 0x1a8: /* push gs */
5015 gen_op_movl_T0_seg((b >> 3) & 7);
5018 case 0x07: /* pop es */
5019 case 0x17: /* pop ss */
5020 case 0x1f: /* pop ds */
5025 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5028 /* if reg == SS, inhibit interrupts/trace. */
5029 /* If several instructions disable interrupts, only the
5031 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5032 gen_helper_set_inhibit_irq();
5036 gen_jmp_im(s->pc - s->cs_base);
5040 case 0x1a1: /* pop fs */
5041 case 0x1a9: /* pop gs */
5043 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
5046 gen_jmp_im(s->pc - s->cs_base);
5051 /**************************/
5054 case 0x89: /* mov Gv, Ev */
5058 ot = dflag + OT_WORD;
5059 modrm = ldub_code(s->pc++);
5060 reg = ((modrm >> 3) & 7) | rex_r;
5062 /* generate a generic store */
5063 gen_ldst_modrm(s, modrm, ot, reg, 1);
5066 case 0xc7: /* mov Ev, Iv */
5070 ot = dflag + OT_WORD;
5071 modrm = ldub_code(s->pc++);
5072 mod = (modrm >> 6) & 3;
5074 s->rip_offset = insn_const_size(ot);
5075 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5077 val = insn_get(s, ot);
5078 gen_op_movl_T0_im(val);
5080 gen_op_st_T0_A0(ot + s->mem_index);
5082 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
5085 case 0x8b: /* mov Ev, Gv */
5089 ot = OT_WORD + dflag;
5090 modrm = ldub_code(s->pc++);
5091 reg = ((modrm >> 3) & 7) | rex_r;
5093 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5094 gen_op_mov_reg_T0(ot, reg);
5096 case 0x8e: /* mov seg, Gv */
5097 modrm = ldub_code(s->pc++);
5098 reg = (modrm >> 3) & 7;
5099 if (reg >= 6 || reg == R_CS)
5101 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5102 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5104 /* if reg == SS, inhibit interrupts/trace */
5105 /* If several instructions disable interrupts, only the
5107 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5108 gen_helper_set_inhibit_irq();
5112 gen_jmp_im(s->pc - s->cs_base);
5116 case 0x8c: /* mov Gv, seg */
5117 modrm = ldub_code(s->pc++);
5118 reg = (modrm >> 3) & 7;
5119 mod = (modrm >> 6) & 3;
5122 gen_op_movl_T0_seg(reg);
5124 ot = OT_WORD + dflag;
5127 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5130 case 0x1b6: /* movzbS Gv, Eb */
5131 case 0x1b7: /* movzwS Gv, Eb */
5132 case 0x1be: /* movsbS Gv, Eb */
5133 case 0x1bf: /* movswS Gv, Eb */
5136 /* d_ot is the size of destination */
5137 d_ot = dflag + OT_WORD;
5138 /* ot is the size of source */
5139 ot = (b & 1) + OT_BYTE;
5140 modrm = ldub_code(s->pc++);
5141 reg = ((modrm >> 3) & 7) | rex_r;
5142 mod = (modrm >> 6) & 3;
5143 rm = (modrm & 7) | REX_B(s);
5146 gen_op_mov_TN_reg(ot, 0, rm);
5147 switch(ot | (b & 8)) {
5149 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
5152 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5155 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
5159 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5162 gen_op_mov_reg_T0(d_ot, reg);
5164 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5166 gen_op_lds_T0_A0(ot + s->mem_index);
5168 gen_op_ldu_T0_A0(ot + s->mem_index);
5170 gen_op_mov_reg_T0(d_ot, reg);
5175 case 0x8d: /* lea */
5176 ot = dflag + OT_WORD;
5177 modrm = ldub_code(s->pc++);
5178 mod = (modrm >> 6) & 3;
5181 reg = ((modrm >> 3) & 7) | rex_r;
5182 /* we must ensure that no segment is added */
5186 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5188 gen_op_mov_reg_A0(ot - OT_WORD, reg);
5191 case 0xa0: /* mov EAX, Ov */
5193 case 0xa2: /* mov Ov, EAX */
5196 target_ulong offset_addr;
5201 ot = dflag + OT_WORD;
5202 #ifdef TARGET_X86_64
5203 if (s->aflag == 2) {
5204 offset_addr = ldq_code(s->pc);
5206 gen_op_movq_A0_im(offset_addr);
5211 offset_addr = insn_get(s, OT_LONG);
5213 offset_addr = insn_get(s, OT_WORD);
5215 gen_op_movl_A0_im(offset_addr);
5217 gen_add_A0_ds_seg(s);
5219 gen_op_ld_T0_A0(ot + s->mem_index);
5220 gen_op_mov_reg_T0(ot, R_EAX);
5222 gen_op_mov_TN_reg(ot, 0, R_EAX);
5223 gen_op_st_T0_A0(ot + s->mem_index);
5227 case 0xd7: /* xlat */
5228 #ifdef TARGET_X86_64
5229 if (s->aflag == 2) {
5230 gen_op_movq_A0_reg(R_EBX);
5231 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5232 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5233 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5237 gen_op_movl_A0_reg(R_EBX);
5238 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5239 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5240 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5242 gen_op_andl_A0_ffff();
5244 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
5246 gen_add_A0_ds_seg(s);
5247 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
5248 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
5250 case 0xb0 ... 0xb7: /* mov R, Ib */
5251 val = insn_get(s, OT_BYTE);
5252 gen_op_movl_T0_im(val);
5253 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
5255 case 0xb8 ... 0xbf: /* mov R, Iv */
5256 #ifdef TARGET_X86_64
5260 tmp = ldq_code(s->pc);
5262 reg = (b & 7) | REX_B(s);
5263 gen_movtl_T0_im(tmp);
5264 gen_op_mov_reg_T0(OT_QUAD, reg);
5268 ot = dflag ? OT_LONG : OT_WORD;
5269 val = insn_get(s, ot);
5270 reg = (b & 7) | REX_B(s);
5271 gen_op_movl_T0_im(val);
5272 gen_op_mov_reg_T0(ot, reg);
5276 case 0x91 ... 0x97: /* xchg R, EAX */
5278 ot = dflag + OT_WORD;
5279 reg = (b & 7) | REX_B(s);
5283 case 0x87: /* xchg Ev, Gv */
5287 ot = dflag + OT_WORD;
5288 modrm = ldub_code(s->pc++);
5289 reg = ((modrm >> 3) & 7) | rex_r;
5290 mod = (modrm >> 6) & 3;
5292 rm = (modrm & 7) | REX_B(s);
5294 gen_op_mov_TN_reg(ot, 0, reg);
5295 gen_op_mov_TN_reg(ot, 1, rm);
5296 gen_op_mov_reg_T0(ot, rm);
5297 gen_op_mov_reg_T1(ot, reg);
5299 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5300 gen_op_mov_TN_reg(ot, 0, reg);
5301 /* for xchg, lock is implicit */
5302 if (!(prefixes & PREFIX_LOCK))
5304 gen_op_ld_T1_A0(ot + s->mem_index);
5305 gen_op_st_T0_A0(ot + s->mem_index);
5306 if (!(prefixes & PREFIX_LOCK))
5307 gen_helper_unlock();
5308 gen_op_mov_reg_T1(ot, reg);
5311 case 0xc4: /* les Gv */
5316 case 0xc5: /* lds Gv */
5321 case 0x1b2: /* lss Gv */
5324 case 0x1b4: /* lfs Gv */
5327 case 0x1b5: /* lgs Gv */
5330 ot = dflag ? OT_LONG : OT_WORD;
5331 modrm = ldub_code(s->pc++);
5332 reg = ((modrm >> 3) & 7) | rex_r;
5333 mod = (modrm >> 6) & 3;
5336 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5337 gen_op_ld_T1_A0(ot + s->mem_index);
5338 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5339 /* load the segment first to handle exceptions properly */
5340 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5341 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
5342 /* then put the data */
5343 gen_op_mov_reg_T1(ot, reg);
5345 gen_jmp_im(s->pc - s->cs_base);
5350 /************************/
5361 ot = dflag + OT_WORD;
5363 modrm = ldub_code(s->pc++);
5364 mod = (modrm >> 6) & 3;
5365 op = (modrm >> 3) & 7;
5371 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5374 opreg = (modrm & 7) | REX_B(s);
5379 gen_shift(s, op, ot, opreg, OR_ECX);
5382 shift = ldub_code(s->pc++);
5384 gen_shifti(s, op, ot, opreg, shift);
5399 case 0x1a4: /* shld imm */
5403 case 0x1a5: /* shld cl */
5407 case 0x1ac: /* shrd imm */
5411 case 0x1ad: /* shrd cl */
5415 ot = dflag + OT_WORD;
5416 modrm = ldub_code(s->pc++);
5417 mod = (modrm >> 6) & 3;
5418 rm = (modrm & 7) | REX_B(s);
5419 reg = ((modrm >> 3) & 7) | rex_r;
5421 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5426 gen_op_mov_TN_reg(ot, 1, reg);
5429 val = ldub_code(s->pc++);
5430 tcg_gen_movi_tl(cpu_T3, val);
5432 tcg_gen_mov_tl(cpu_T3, cpu_regs[R_ECX]);
5434 gen_shiftd_rm_T1_T3(s, ot, opreg, op);
5437 /************************/
5440 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
5441 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
5442 /* XXX: what to do if illegal op ? */
5443 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5446 modrm = ldub_code(s->pc++);
5447 mod = (modrm >> 6) & 3;
5449 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
5452 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5454 case 0x00 ... 0x07: /* fxxxs */
5455 case 0x10 ... 0x17: /* fixxxl */
5456 case 0x20 ... 0x27: /* fxxxl */
5457 case 0x30 ... 0x37: /* fixxx */
5464 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5465 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5466 gen_helper_flds_FT0(cpu_tmp2_i32);
5469 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5470 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5471 gen_helper_fildl_FT0(cpu_tmp2_i32);
5474 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
5475 (s->mem_index >> 2) - 1);
5476 gen_helper_fldl_FT0(cpu_tmp1_i64);
5480 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
5481 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5482 gen_helper_fildl_FT0(cpu_tmp2_i32);
5486 gen_helper_fp_arith_ST0_FT0(op1);
5488 /* fcomp needs pop */
5493 case 0x08: /* flds */
5494 case 0x0a: /* fsts */
5495 case 0x0b: /* fstps */
5496 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
5497 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
5498 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
5503 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5504 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5505 gen_helper_flds_ST0(cpu_tmp2_i32);
5508 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5509 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5510 gen_helper_fildl_ST0(cpu_tmp2_i32);
5513 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
5514 (s->mem_index >> 2) - 1);
5515 gen_helper_fldl_ST0(cpu_tmp1_i64);
5519 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
5520 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5521 gen_helper_fildl_ST0(cpu_tmp2_i32);
5526 /* XXX: the corresponding CPUID bit must be tested ! */
5529 gen_helper_fisttl_ST0(cpu_tmp2_i32);
5530 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5531 gen_op_st_T0_A0(OT_LONG + s->mem_index);
5534 gen_helper_fisttll_ST0(cpu_tmp1_i64);
5535 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
5536 (s->mem_index >> 2) - 1);
5540 gen_helper_fistt_ST0(cpu_tmp2_i32);
5541 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5542 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5550 gen_helper_fsts_ST0(cpu_tmp2_i32);
5551 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5552 gen_op_st_T0_A0(OT_LONG + s->mem_index);
5555 gen_helper_fistl_ST0(cpu_tmp2_i32);
5556 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5557 gen_op_st_T0_A0(OT_LONG + s->mem_index);
5560 gen_helper_fstl_ST0(cpu_tmp1_i64);
5561 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
5562 (s->mem_index >> 2) - 1);
5566 gen_helper_fist_ST0(cpu_tmp2_i32);
5567 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5568 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5576 case 0x0c: /* fldenv mem */
5577 if (s->cc_op != CC_OP_DYNAMIC)
5578 gen_op_set_cc_op(s->cc_op);
5579 gen_jmp_im(pc_start - s->cs_base);
5581 cpu_A0, tcg_const_i32(s->dflag));
5583 case 0x0d: /* fldcw mem */
5584 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
5585 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5586 gen_helper_fldcw(cpu_tmp2_i32);
5588 case 0x0e: /* fnstenv mem */
5589 if (s->cc_op != CC_OP_DYNAMIC)
5590 gen_op_set_cc_op(s->cc_op);
5591 gen_jmp_im(pc_start - s->cs_base);
5592 gen_helper_fstenv(cpu_A0, tcg_const_i32(s->dflag));
5594 case 0x0f: /* fnstcw mem */
5595 gen_helper_fnstcw(cpu_tmp2_i32);
5596 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5597 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5599 case 0x1d: /* fldt mem */
5600 if (s->cc_op != CC_OP_DYNAMIC)
5601 gen_op_set_cc_op(s->cc_op);
5602 gen_jmp_im(pc_start - s->cs_base);
5603 gen_helper_fldt_ST0(cpu_A0);
5605 case 0x1f: /* fstpt mem */
5606 if (s->cc_op != CC_OP_DYNAMIC)
5607 gen_op_set_cc_op(s->cc_op);
5608 gen_jmp_im(pc_start - s->cs_base);
5609 gen_helper_fstt_ST0(cpu_A0);
5612 case 0x2c: /* frstor mem */
5613 if (s->cc_op != CC_OP_DYNAMIC)
5614 gen_op_set_cc_op(s->cc_op);
5615 gen_jmp_im(pc_start - s->cs_base);
5616 gen_helper_frstor(cpu_A0, tcg_const_i32(s->dflag));
5618 case 0x2e: /* fnsave mem */
5619 if (s->cc_op != CC_OP_DYNAMIC)
5620 gen_op_set_cc_op(s->cc_op);
5621 gen_jmp_im(pc_start - s->cs_base);
5622 gen_helper_fsave(cpu_A0, tcg_const_i32(s->dflag));
5624 case 0x2f: /* fnstsw mem */
5625 gen_helper_fnstsw(cpu_tmp2_i32);
5626 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5627 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5629 case 0x3c: /* fbld */
5630 if (s->cc_op != CC_OP_DYNAMIC)
5631 gen_op_set_cc_op(s->cc_op);
5632 gen_jmp_im(pc_start - s->cs_base);
5633 gen_helper_fbld_ST0(cpu_A0);
5635 case 0x3e: /* fbstp */
5636 if (s->cc_op != CC_OP_DYNAMIC)
5637 gen_op_set_cc_op(s->cc_op);
5638 gen_jmp_im(pc_start - s->cs_base);
5639 gen_helper_fbst_ST0(cpu_A0);
5642 case 0x3d: /* fildll */
5643 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
5644 (s->mem_index >> 2) - 1);
5645 gen_helper_fildll_ST0(cpu_tmp1_i64);
5647 case 0x3f: /* fistpll */
5648 gen_helper_fistll_ST0(cpu_tmp1_i64);
5649 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
5650 (s->mem_index >> 2) - 1);
5657 /* register float ops */
5661 case 0x08: /* fld sti */
5663 gen_helper_fmov_ST0_STN(tcg_const_i32((opreg + 1) & 7));
5665 case 0x09: /* fxchg sti */
5666 case 0x29: /* fxchg4 sti, undocumented op */
5667 case 0x39: /* fxchg7 sti, undocumented op */
5668 gen_helper_fxchg_ST0_STN(tcg_const_i32(opreg));
5670 case 0x0a: /* grp d9/2 */
5673 /* check exceptions (FreeBSD FPU probe) */
5674 if (s->cc_op != CC_OP_DYNAMIC)
5675 gen_op_set_cc_op(s->cc_op);
5676 gen_jmp_im(pc_start - s->cs_base);
5683 case 0x0c: /* grp d9/4 */
5686 gen_helper_fchs_ST0();
5689 gen_helper_fabs_ST0();
5692 gen_helper_fldz_FT0();
5693 gen_helper_fcom_ST0_FT0();
5696 gen_helper_fxam_ST0();
5702 case 0x0d: /* grp d9/5 */
5707 gen_helper_fld1_ST0();
5711 gen_helper_fldl2t_ST0();
5715 gen_helper_fldl2e_ST0();
5719 gen_helper_fldpi_ST0();
5723 gen_helper_fldlg2_ST0();
5727 gen_helper_fldln2_ST0();
5731 gen_helper_fldz_ST0();
5738 case 0x0e: /* grp d9/6 */
5749 case 3: /* fpatan */
5750 gen_helper_fpatan();
5752 case 4: /* fxtract */
5753 gen_helper_fxtract();
5755 case 5: /* fprem1 */
5756 gen_helper_fprem1();
5758 case 6: /* fdecstp */
5759 gen_helper_fdecstp();
5762 case 7: /* fincstp */
5763 gen_helper_fincstp();
5767 case 0x0f: /* grp d9/7 */
5772 case 1: /* fyl2xp1 */
5773 gen_helper_fyl2xp1();
5778 case 3: /* fsincos */
5779 gen_helper_fsincos();
5781 case 5: /* fscale */
5782 gen_helper_fscale();
5784 case 4: /* frndint */
5785 gen_helper_frndint();
5796 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
5797 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
5798 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
5804 gen_helper_fp_arith_STN_ST0(op1, opreg);
5808 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
5809 gen_helper_fp_arith_ST0_FT0(op1);
5813 case 0x02: /* fcom */
5814 case 0x22: /* fcom2, undocumented op */
5815 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
5816 gen_helper_fcom_ST0_FT0();
5818 case 0x03: /* fcomp */
5819 case 0x23: /* fcomp3, undocumented op */
5820 case 0x32: /* fcomp5, undocumented op */
5821 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
5822 gen_helper_fcom_ST0_FT0();
5825 case 0x15: /* da/5 */
5827 case 1: /* fucompp */
5828 gen_helper_fmov_FT0_STN(tcg_const_i32(1));
5829 gen_helper_fucom_ST0_FT0();
5839 case 0: /* feni (287 only, just do nop here) */
5841 case 1: /* fdisi (287 only, just do nop here) */
5846 case 3: /* fninit */
5847 gen_helper_fninit();
5849 case 4: /* fsetpm (287 only, just do nop here) */
5855 case 0x1d: /* fucomi */
5856 if (s->cc_op != CC_OP_DYNAMIC)
5857 gen_op_set_cc_op(s->cc_op);
5858 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
5859 gen_helper_fucomi_ST0_FT0();
5860 s->cc_op = CC_OP_EFLAGS;
5862 case 0x1e: /* fcomi */
5863 if (s->cc_op != CC_OP_DYNAMIC)
5864 gen_op_set_cc_op(s->cc_op);
5865 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
5866 gen_helper_fcomi_ST0_FT0();
5867 s->cc_op = CC_OP_EFLAGS;
5869 case 0x28: /* ffree sti */
5870 gen_helper_ffree_STN(tcg_const_i32(opreg));
5872 case 0x2a: /* fst sti */
5873 gen_helper_fmov_STN_ST0(tcg_const_i32(opreg));
5875 case 0x2b: /* fstp sti */
5876 case 0x0b: /* fstp1 sti, undocumented op */
5877 case 0x3a: /* fstp8 sti, undocumented op */
5878 case 0x3b: /* fstp9 sti, undocumented op */
5879 gen_helper_fmov_STN_ST0(tcg_const_i32(opreg));
5882 case 0x2c: /* fucom st(i) */
5883 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
5884 gen_helper_fucom_ST0_FT0();
5886 case 0x2d: /* fucomp st(i) */
5887 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
5888 gen_helper_fucom_ST0_FT0();
5891 case 0x33: /* de/3 */
5893 case 1: /* fcompp */
5894 gen_helper_fmov_FT0_STN(tcg_const_i32(1));
5895 gen_helper_fcom_ST0_FT0();
5903 case 0x38: /* ffreep sti, undocumented op */
5904 gen_helper_ffree_STN(tcg_const_i32(opreg));
5907 case 0x3c: /* df/4 */
5910 gen_helper_fnstsw(cpu_tmp2_i32);
5911 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5912 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5918 case 0x3d: /* fucomip */
5919 if (s->cc_op != CC_OP_DYNAMIC)
5920 gen_op_set_cc_op(s->cc_op);
5921 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
5922 gen_helper_fucomi_ST0_FT0();
5924 s->cc_op = CC_OP_EFLAGS;
5926 case 0x3e: /* fcomip */
5927 if (s->cc_op != CC_OP_DYNAMIC)
5928 gen_op_set_cc_op(s->cc_op);
5929 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
5930 gen_helper_fcomi_ST0_FT0();
5932 s->cc_op = CC_OP_EFLAGS;
5934 case 0x10 ... 0x13: /* fcmovxx */
5938 static const uint8_t fcmov_cc[8] = {
5944 op1 = fcmov_cc[op & 3] | (((op >> 3) & 1) ^ 1);
5945 l1 = gen_new_label();
5946 gen_jcc1(s, s->cc_op, op1, l1);
5947 gen_helper_fmov_ST0_STN(tcg_const_i32(opreg));
5956 /************************/
5959 case 0xa4: /* movsS */
5964 ot = dflag + OT_WORD;
5966 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5967 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5973 case 0xaa: /* stosS */
5978 ot = dflag + OT_WORD;
5980 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5981 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5986 case 0xac: /* lodsS */
5991 ot = dflag + OT_WORD;
5992 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5993 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5998 case 0xae: /* scasS */
6003 ot = dflag + OT_WORD;
6004 if (prefixes & PREFIX_REPNZ) {
6005 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6006 } else if (prefixes & PREFIX_REPZ) {
6007 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6010 s->cc_op = CC_OP_SUBB + ot;
6014 case 0xa6: /* cmpsS */
6019 ot = dflag + OT_WORD;
6020 if (prefixes & PREFIX_REPNZ) {
6021 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6022 } else if (prefixes & PREFIX_REPZ) {
6023 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6026 s->cc_op = CC_OP_SUBB + ot;
6029 case 0x6c: /* insS */
6034 ot = dflag ? OT_LONG : OT_WORD;
6035 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6036 gen_op_andl_T0_ffff();
6037 gen_check_io(s, ot, pc_start - s->cs_base,
6038 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
6039 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6040 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6044 gen_jmp(s, s->pc - s->cs_base);
6048 case 0x6e: /* outsS */
6053 ot = dflag ? OT_LONG : OT_WORD;
6054 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6055 gen_op_andl_T0_ffff();
6056 gen_check_io(s, ot, pc_start - s->cs_base,
6057 svm_is_rep(prefixes) | 4);
6058 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6059 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6063 gen_jmp(s, s->pc - s->cs_base);
6068 /************************/
6076 ot = dflag ? OT_LONG : OT_WORD;
6077 val = ldub_code(s->pc++);
6078 gen_op_movl_T0_im(val);
6079 gen_check_io(s, ot, pc_start - s->cs_base,
6080 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6083 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6084 gen_helper_in_func(ot, cpu_T[1], cpu_tmp2_i32);
6085 gen_op_mov_reg_T1(ot, R_EAX);
6088 gen_jmp(s, s->pc - s->cs_base);
6096 ot = dflag ? OT_LONG : OT_WORD;
6097 val = ldub_code(s->pc++);
6098 gen_op_movl_T0_im(val);
6099 gen_check_io(s, ot, pc_start - s->cs_base,
6100 svm_is_rep(prefixes));
6101 gen_op_mov_TN_reg(ot, 1, R_EAX);
6105 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6106 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6107 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6108 gen_helper_out_func(ot, cpu_tmp2_i32, cpu_tmp3_i32);
6111 gen_jmp(s, s->pc - s->cs_base);
6119 ot = dflag ? OT_LONG : OT_WORD;
6120 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6121 gen_op_andl_T0_ffff();
6122 gen_check_io(s, ot, pc_start - s->cs_base,
6123 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6126 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6127 gen_helper_in_func(ot, cpu_T[1], cpu_tmp2_i32);
6128 gen_op_mov_reg_T1(ot, R_EAX);
6131 gen_jmp(s, s->pc - s->cs_base);
6139 ot = dflag ? OT_LONG : OT_WORD;
6140 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6141 gen_op_andl_T0_ffff();
6142 gen_check_io(s, ot, pc_start - s->cs_base,
6143 svm_is_rep(prefixes));
6144 gen_op_mov_TN_reg(ot, 1, R_EAX);
6148 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6149 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6150 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6151 gen_helper_out_func(ot, cpu_tmp2_i32, cpu_tmp3_i32);
6154 gen_jmp(s, s->pc - s->cs_base);
6158 /************************/
6160 case 0xc2: /* ret im */
6161 val = ldsw_code(s->pc);
6164 if (CODE64(s) && s->dflag)
6166 gen_stack_update(s, val + (2 << s->dflag));
6168 gen_op_andl_T0_ffff();
6172 case 0xc3: /* ret */
6176 gen_op_andl_T0_ffff();
6180 case 0xca: /* lret im */
6181 val = ldsw_code(s->pc);
6184 if (s->pe && !s->vm86) {
6185 if (s->cc_op != CC_OP_DYNAMIC)
6186 gen_op_set_cc_op(s->cc_op);
6187 gen_jmp_im(pc_start - s->cs_base);
6188 gen_helper_lret_protected(tcg_const_i32(s->dflag),
6189 tcg_const_i32(val));
6193 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6195 gen_op_andl_T0_ffff();
6196 /* NOTE: keeping EIP updated is not a problem in case of
6200 gen_op_addl_A0_im(2 << s->dflag);
6201 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6202 gen_op_movl_seg_T0_vm(R_CS);
6203 /* add stack offset */
6204 gen_stack_update(s, val + (4 << s->dflag));
6208 case 0xcb: /* lret */
6211 case 0xcf: /* iret */
6212 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET);
6215 gen_helper_iret_real(tcg_const_i32(s->dflag));
6216 s->cc_op = CC_OP_EFLAGS;
6217 } else if (s->vm86) {
6219 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6221 gen_helper_iret_real(tcg_const_i32(s->dflag));
6222 s->cc_op = CC_OP_EFLAGS;
6225 if (s->cc_op != CC_OP_DYNAMIC)
6226 gen_op_set_cc_op(s->cc_op);
6227 gen_jmp_im(pc_start - s->cs_base);
6228 gen_helper_iret_protected(tcg_const_i32(s->dflag),
6229 tcg_const_i32(s->pc - s->cs_base));
6230 s->cc_op = CC_OP_EFLAGS;
6234 case 0xe8: /* call im */
6237 tval = (int32_t)insn_get(s, OT_LONG);
6239 tval = (int16_t)insn_get(s, OT_WORD);
6240 next_eip = s->pc - s->cs_base;
6246 gen_movtl_T0_im(next_eip);
6251 case 0x9a: /* lcall im */
6253 unsigned int selector, offset;
6257 ot = dflag ? OT_LONG : OT_WORD;
6258 offset = insn_get(s, ot);
6259 selector = insn_get(s, OT_WORD);
6261 gen_op_movl_T0_im(selector);
6262 gen_op_movl_T1_imu(offset);
6265 case 0xe9: /* jmp im */
6267 tval = (int32_t)insn_get(s, OT_LONG);
6269 tval = (int16_t)insn_get(s, OT_WORD);
6270 tval += s->pc - s->cs_base;
6277 case 0xea: /* ljmp im */
6279 unsigned int selector, offset;
6283 ot = dflag ? OT_LONG : OT_WORD;
6284 offset = insn_get(s, ot);
6285 selector = insn_get(s, OT_WORD);
6287 gen_op_movl_T0_im(selector);
6288 gen_op_movl_T1_imu(offset);
6291 case 0xeb: /* jmp Jb */
6292 tval = (int8_t)insn_get(s, OT_BYTE);
6293 tval += s->pc - s->cs_base;
6298 case 0x70 ... 0x7f: /* jcc Jb */
6299 tval = (int8_t)insn_get(s, OT_BYTE);
6301 case 0x180 ... 0x18f: /* jcc Jv */
6303 tval = (int32_t)insn_get(s, OT_LONG);
6305 tval = (int16_t)insn_get(s, OT_WORD);
6308 next_eip = s->pc - s->cs_base;
6312 gen_jcc(s, b, tval, next_eip);
6315 case 0x190 ... 0x19f: /* setcc Gv */
6316 modrm = ldub_code(s->pc++);
6318 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
6320 case 0x140 ... 0x14f: /* cmov Gv, Ev */
6325 ot = dflag + OT_WORD;
6326 modrm = ldub_code(s->pc++);
6327 reg = ((modrm >> 3) & 7) | rex_r;
6328 mod = (modrm >> 6) & 3;
6329 t0 = tcg_temp_local_new();
6331 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6332 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
6334 rm = (modrm & 7) | REX_B(s);
6335 gen_op_mov_v_reg(ot, t0, rm);
6337 #ifdef TARGET_X86_64
6338 if (ot == OT_LONG) {
6339 /* XXX: specific Intel behaviour ? */
6340 l1 = gen_new_label();
6341 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6342 tcg_gen_mov_tl(cpu_regs[reg], t0);
6344 tcg_gen_ext32u_tl(cpu_regs[reg], cpu_regs[reg]);
6348 l1 = gen_new_label();
6349 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6350 gen_op_mov_reg_v(ot, reg, t0);
6357 /************************/
6359 case 0x9c: /* pushf */
6360 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF);
6361 if (s->vm86 && s->iopl != 3) {
6362 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6364 if (s->cc_op != CC_OP_DYNAMIC)
6365 gen_op_set_cc_op(s->cc_op);
6366 gen_helper_read_eflags(cpu_T[0]);
6370 case 0x9d: /* popf */
6371 gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF);
6372 if (s->vm86 && s->iopl != 3) {
6373 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6378 gen_helper_write_eflags(cpu_T[0],
6379 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK)));
6381 gen_helper_write_eflags(cpu_T[0],
6382 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK) & 0xffff));
6385 if (s->cpl <= s->iopl) {
6387 gen_helper_write_eflags(cpu_T[0],
6388 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK)));
6390 gen_helper_write_eflags(cpu_T[0],
6391 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK) & 0xffff));
6395 gen_helper_write_eflags(cpu_T[0],
6396 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK)));
6398 gen_helper_write_eflags(cpu_T[0],
6399 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK) & 0xffff));
6404 s->cc_op = CC_OP_EFLAGS;
6405 /* abort translation because TF flag may change */
6406 gen_jmp_im(s->pc - s->cs_base);
6410 case 0x9e: /* sahf */
6411 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
6413 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
6414 if (s->cc_op != CC_OP_DYNAMIC)
6415 gen_op_set_cc_op(s->cc_op);
6416 gen_compute_eflags(cpu_cc_src);
6417 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, CC_O);
6418 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_S | CC_Z | CC_A | CC_P | CC_C);
6419 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
6420 s->cc_op = CC_OP_EFLAGS;
6422 case 0x9f: /* lahf */
6423 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
6425 if (s->cc_op != CC_OP_DYNAMIC)
6426 gen_op_set_cc_op(s->cc_op);
6427 gen_compute_eflags(cpu_T[0]);
6428 /* Note: gen_compute_eflags() only gives the condition codes */
6429 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], 0x02);
6430 gen_op_mov_reg_T0(OT_BYTE, R_AH);
6432 case 0xf5: /* cmc */
6433 if (s->cc_op != CC_OP_DYNAMIC)
6434 gen_op_set_cc_op(s->cc_op);
6435 gen_compute_eflags(cpu_cc_src);
6436 tcg_gen_xori_tl(cpu_cc_src, cpu_cc_src, CC_C);
6437 s->cc_op = CC_OP_EFLAGS;
6439 case 0xf8: /* clc */
6440 if (s->cc_op != CC_OP_DYNAMIC)
6441 gen_op_set_cc_op(s->cc_op);
6442 gen_compute_eflags(cpu_cc_src);
6443 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_C);
6444 s->cc_op = CC_OP_EFLAGS;
6446 case 0xf9: /* stc */
6447 if (s->cc_op != CC_OP_DYNAMIC)
6448 gen_op_set_cc_op(s->cc_op);
6449 gen_compute_eflags(cpu_cc_src);
6450 tcg_gen_ori_tl(cpu_cc_src, cpu_cc_src, CC_C);
6451 s->cc_op = CC_OP_EFLAGS;
6453 case 0xfc: /* cld */
6454 tcg_gen_movi_i32(cpu_tmp2_i32, 1);
6455 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
6457 case 0xfd: /* std */
6458 tcg_gen_movi_i32(cpu_tmp2_i32, -1);
6459 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
6462 /************************/
6463 /* bit operations */
6464 case 0x1ba: /* bt/bts/btr/btc Gv, im */
6465 ot = dflag + OT_WORD;
6466 modrm = ldub_code(s->pc++);
6467 op = (modrm >> 3) & 7;
6468 mod = (modrm >> 6) & 3;
6469 rm = (modrm & 7) | REX_B(s);
6472 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6473 gen_op_ld_T0_A0(ot + s->mem_index);
6475 gen_op_mov_TN_reg(ot, 0, rm);
6478 val = ldub_code(s->pc++);
6479 gen_op_movl_T1_im(val);
6484 case 0x1a3: /* bt Gv, Ev */
6487 case 0x1ab: /* bts */
6490 case 0x1b3: /* btr */
6493 case 0x1bb: /* btc */
6496 ot = dflag + OT_WORD;
6497 modrm = ldub_code(s->pc++);
6498 reg = ((modrm >> 3) & 7) | rex_r;
6499 mod = (modrm >> 6) & 3;
6500 rm = (modrm & 7) | REX_B(s);
6501 gen_op_mov_TN_reg(OT_LONG, 1, reg);
6503 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6504 /* specific case: we need to add a displacement */
6505 gen_exts(ot, cpu_T[1]);
6506 tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
6507 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
6508 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
6509 gen_op_ld_T0_A0(ot + s->mem_index);
6511 gen_op_mov_TN_reg(ot, 0, rm);
6514 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
6517 tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
6518 tcg_gen_movi_tl(cpu_cc_dst, 0);
6521 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
6522 tcg_gen_movi_tl(cpu_tmp0, 1);
6523 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
6524 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
6527 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
6528 tcg_gen_movi_tl(cpu_tmp0, 1);
6529 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
6530 tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
6531 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
6535 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
6536 tcg_gen_movi_tl(cpu_tmp0, 1);
6537 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
6538 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
6541 s->cc_op = CC_OP_SARB + ot;
6544 gen_op_st_T0_A0(ot + s->mem_index);
6546 gen_op_mov_reg_T0(ot, rm);
6547 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
6548 tcg_gen_movi_tl(cpu_cc_dst, 0);
6551 case 0x1bc: /* bsf */
6552 case 0x1bd: /* bsr */
6557 ot = dflag + OT_WORD;
6558 modrm = ldub_code(s->pc++);
6559 reg = ((modrm >> 3) & 7) | rex_r;
6560 gen_ldst_modrm(s,modrm, ot, OR_TMP0, 0);
6561 gen_extu(ot, cpu_T[0]);
6562 t0 = tcg_temp_local_new();
6563 tcg_gen_mov_tl(t0, cpu_T[0]);
6564 if ((b & 1) && (prefixes & PREFIX_REPZ) &&
6565 (s->cpuid_ext3_features & CPUID_EXT3_ABM)) {
6567 case OT_WORD: gen_helper_lzcnt(cpu_T[0], t0,
6568 tcg_const_i32(16)); break;
6569 case OT_LONG: gen_helper_lzcnt(cpu_T[0], t0,
6570 tcg_const_i32(32)); break;
6571 case OT_QUAD: gen_helper_lzcnt(cpu_T[0], t0,
6572 tcg_const_i32(64)); break;
6574 gen_op_mov_reg_T0(ot, reg);
6576 label1 = gen_new_label();
6577 tcg_gen_movi_tl(cpu_cc_dst, 0);
6578 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, label1);
6580 gen_helper_bsr(cpu_T[0], t0);
6582 gen_helper_bsf(cpu_T[0], t0);
6584 gen_op_mov_reg_T0(ot, reg);
6585 tcg_gen_movi_tl(cpu_cc_dst, 1);
6586 gen_set_label(label1);
6587 tcg_gen_discard_tl(cpu_cc_src);
6588 s->cc_op = CC_OP_LOGICB + ot;
6593 /************************/
6595 case 0x27: /* daa */
6598 if (s->cc_op != CC_OP_DYNAMIC)
6599 gen_op_set_cc_op(s->cc_op);
6601 s->cc_op = CC_OP_EFLAGS;
6603 case 0x2f: /* das */
6606 if (s->cc_op != CC_OP_DYNAMIC)
6607 gen_op_set_cc_op(s->cc_op);
6609 s->cc_op = CC_OP_EFLAGS;
6611 case 0x37: /* aaa */
6614 if (s->cc_op != CC_OP_DYNAMIC)
6615 gen_op_set_cc_op(s->cc_op);
6617 s->cc_op = CC_OP_EFLAGS;
6619 case 0x3f: /* aas */
6622 if (s->cc_op != CC_OP_DYNAMIC)
6623 gen_op_set_cc_op(s->cc_op);
6625 s->cc_op = CC_OP_EFLAGS;
6627 case 0xd4: /* aam */
6630 val = ldub_code(s->pc++);
6632 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
6634 gen_helper_aam(tcg_const_i32(val));
6635 s->cc_op = CC_OP_LOGICB;
6638 case 0xd5: /* aad */
6641 val = ldub_code(s->pc++);
6642 gen_helper_aad(tcg_const_i32(val));
6643 s->cc_op = CC_OP_LOGICB;
6645 /************************/
6647 case 0x90: /* nop */
6648 /* XXX: correct lock test for all insn */
6649 if (prefixes & PREFIX_LOCK) {
6652 /* If REX_B is set, then this is xchg eax, r8d, not a nop. */
6654 goto do_xchg_reg_eax;
6656 if (prefixes & PREFIX_REPZ) {
6657 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
6660 case 0x9b: /* fwait */
6661 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
6662 (HF_MP_MASK | HF_TS_MASK)) {
6663 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6665 if (s->cc_op != CC_OP_DYNAMIC)
6666 gen_op_set_cc_op(s->cc_op);
6667 gen_jmp_im(pc_start - s->cs_base);
6671 case 0xcc: /* int3 */
6672 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
6674 case 0xcd: /* int N */
6675 val = ldub_code(s->pc++);
6676 if (s->vm86 && s->iopl != 3) {
6677 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6679 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
6682 case 0xce: /* into */
6685 if (s->cc_op != CC_OP_DYNAMIC)
6686 gen_op_set_cc_op(s->cc_op);
6687 gen_jmp_im(pc_start - s->cs_base);
6688 gen_helper_into(tcg_const_i32(s->pc - pc_start));
6691 case 0xf1: /* icebp (undocumented, exits to external debugger) */
6692 gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP);
6694 gen_debug(s, pc_start - s->cs_base);
6697 tb_flush(cpu_single_env);
6698 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
6702 case 0xfa: /* cli */
6704 if (s->cpl <= s->iopl) {
6707 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6713 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6717 case 0xfb: /* sti */
6719 if (s->cpl <= s->iopl) {
6722 /* interruptions are enabled only the first insn after sti */
6723 /* If several instructions disable interrupts, only the
6725 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
6726 gen_helper_set_inhibit_irq();
6727 /* give a chance to handle pending irqs */
6728 gen_jmp_im(s->pc - s->cs_base);
6731 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6737 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6741 case 0x62: /* bound */
6744 ot = dflag ? OT_LONG : OT_WORD;
6745 modrm = ldub_code(s->pc++);
6746 reg = (modrm >> 3) & 7;
6747 mod = (modrm >> 6) & 3;
6750 gen_op_mov_TN_reg(ot, 0, reg);
6751 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6752 gen_jmp_im(pc_start - s->cs_base);
6753 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6755 gen_helper_boundw(cpu_A0, cpu_tmp2_i32);
6757 gen_helper_boundl(cpu_A0, cpu_tmp2_i32);
6759 case 0x1c8 ... 0x1cf: /* bswap reg */
6760 reg = (b & 7) | REX_B(s);
6761 #ifdef TARGET_X86_64
6763 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
6764 tcg_gen_bswap64_i64(cpu_T[0], cpu_T[0]);
6765 gen_op_mov_reg_T0(OT_QUAD, reg);
6769 gen_op_mov_TN_reg(OT_LONG, 0, reg);
6770 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
6771 tcg_gen_bswap32_tl(cpu_T[0], cpu_T[0]);
6772 gen_op_mov_reg_T0(OT_LONG, reg);
6775 case 0xd6: /* salc */
6778 if (s->cc_op != CC_OP_DYNAMIC)
6779 gen_op_set_cc_op(s->cc_op);
6780 gen_compute_eflags_c(cpu_T[0]);
6781 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
6782 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
6784 case 0xe0: /* loopnz */
6785 case 0xe1: /* loopz */
6786 case 0xe2: /* loop */
6787 case 0xe3: /* jecxz */
6791 tval = (int8_t)insn_get(s, OT_BYTE);
6792 next_eip = s->pc - s->cs_base;
6797 l1 = gen_new_label();
6798 l2 = gen_new_label();
6799 l3 = gen_new_label();
6802 case 0: /* loopnz */
6804 if (s->cc_op != CC_OP_DYNAMIC)
6805 gen_op_set_cc_op(s->cc_op);
6806 gen_op_add_reg_im(s->aflag, R_ECX, -1);
6807 gen_op_jz_ecx(s->aflag, l3);
6808 gen_compute_eflags(cpu_tmp0);
6809 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_Z);
6811 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
6813 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, l1);
6817 gen_op_add_reg_im(s->aflag, R_ECX, -1);
6818 gen_op_jnz_ecx(s->aflag, l1);
6822 gen_op_jz_ecx(s->aflag, l1);
6827 gen_jmp_im(next_eip);
6836 case 0x130: /* wrmsr */
6837 case 0x132: /* rdmsr */
6839 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6841 if (s->cc_op != CC_OP_DYNAMIC)
6842 gen_op_set_cc_op(s->cc_op);
6843 gen_jmp_im(pc_start - s->cs_base);
6851 case 0x131: /* rdtsc */
6852 if (s->cc_op != CC_OP_DYNAMIC)
6853 gen_op_set_cc_op(s->cc_op);
6854 gen_jmp_im(pc_start - s->cs_base);
6860 gen_jmp(s, s->pc - s->cs_base);
6863 case 0x133: /* rdpmc */
6864 if (s->cc_op != CC_OP_DYNAMIC)
6865 gen_op_set_cc_op(s->cc_op);
6866 gen_jmp_im(pc_start - s->cs_base);
6869 case 0x134: /* sysenter */
6870 /* For Intel SYSENTER is valid on 64-bit */
6871 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
6874 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6876 gen_update_cc_op(s);
6877 gen_jmp_im(pc_start - s->cs_base);
6878 gen_helper_sysenter();
6882 case 0x135: /* sysexit */
6883 /* For Intel SYSEXIT is valid on 64-bit */
6884 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
6887 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6889 gen_update_cc_op(s);
6890 gen_jmp_im(pc_start - s->cs_base);
6891 gen_helper_sysexit(tcg_const_i32(dflag));
6895 #ifdef TARGET_X86_64
6896 case 0x105: /* syscall */
6897 /* XXX: is it usable in real mode ? */
6898 gen_update_cc_op(s);
6899 gen_jmp_im(pc_start - s->cs_base);
6900 gen_helper_syscall(tcg_const_i32(s->pc - pc_start));
6903 case 0x107: /* sysret */
6905 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6907 gen_update_cc_op(s);
6908 gen_jmp_im(pc_start - s->cs_base);
6909 gen_helper_sysret(tcg_const_i32(s->dflag));
6910 /* condition codes are modified only in long mode */
6912 s->cc_op = CC_OP_EFLAGS;
6917 case 0x1a2: /* cpuid */
6918 if (s->cc_op != CC_OP_DYNAMIC)
6919 gen_op_set_cc_op(s->cc_op);
6920 gen_jmp_im(pc_start - s->cs_base);
6923 case 0xf4: /* hlt */
6925 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6927 if (s->cc_op != CC_OP_DYNAMIC)
6928 gen_op_set_cc_op(s->cc_op);
6929 gen_jmp_im(pc_start - s->cs_base);
6930 gen_helper_hlt(tcg_const_i32(s->pc - pc_start));
6931 s->is_jmp = DISAS_TB_JUMP;
6935 modrm = ldub_code(s->pc++);
6936 mod = (modrm >> 6) & 3;
6937 op = (modrm >> 3) & 7;
6940 if (!s->pe || s->vm86)
6942 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ);
6943 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,ldt.selector));
6947 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6950 if (!s->pe || s->vm86)
6953 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6955 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE);
6956 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6957 gen_jmp_im(pc_start - s->cs_base);
6958 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6959 gen_helper_lldt(cpu_tmp2_i32);
6963 if (!s->pe || s->vm86)
6965 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ);
6966 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,tr.selector));
6970 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6973 if (!s->pe || s->vm86)
6976 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6978 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE);
6979 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6980 gen_jmp_im(pc_start - s->cs_base);
6981 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6982 gen_helper_ltr(cpu_tmp2_i32);
6987 if (!s->pe || s->vm86)
6989 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6990 if (s->cc_op != CC_OP_DYNAMIC)
6991 gen_op_set_cc_op(s->cc_op);
6993 gen_helper_verr(cpu_T[0]);
6995 gen_helper_verw(cpu_T[0]);
6996 s->cc_op = CC_OP_EFLAGS;
7003 modrm = ldub_code(s->pc++);
7004 mod = (modrm >> 6) & 3;
7005 op = (modrm >> 3) & 7;
7011 gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ);
7012 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7013 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.limit));
7014 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7015 gen_add_A0_im(s, 2);
7016 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.base));
7018 gen_op_andl_T0_im(0xffffff);
7019 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7024 case 0: /* monitor */
7025 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7028 if (s->cc_op != CC_OP_DYNAMIC)
7029 gen_op_set_cc_op(s->cc_op);
7030 gen_jmp_im(pc_start - s->cs_base);
7031 #ifdef TARGET_X86_64
7032 if (s->aflag == 2) {
7033 gen_op_movq_A0_reg(R_EAX);
7037 gen_op_movl_A0_reg(R_EAX);
7039 gen_op_andl_A0_ffff();
7041 gen_add_A0_ds_seg(s);
7042 gen_helper_monitor(cpu_A0);
7045 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7048 gen_update_cc_op(s);
7049 gen_jmp_im(pc_start - s->cs_base);
7050 gen_helper_mwait(tcg_const_i32(s->pc - pc_start));
7057 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ);
7058 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7059 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.limit));
7060 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7061 gen_add_A0_im(s, 2);
7062 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.base));
7064 gen_op_andl_T0_im(0xffffff);
7065 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7071 if (s->cc_op != CC_OP_DYNAMIC)
7072 gen_op_set_cc_op(s->cc_op);
7073 gen_jmp_im(pc_start - s->cs_base);
7076 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7079 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7082 gen_helper_vmrun(tcg_const_i32(s->aflag),
7083 tcg_const_i32(s->pc - pc_start));
7085 s->is_jmp = DISAS_TB_JUMP;
7088 case 1: /* VMMCALL */
7089 if (!(s->flags & HF_SVME_MASK))
7091 gen_helper_vmmcall();
7093 case 2: /* VMLOAD */
7094 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7097 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7100 gen_helper_vmload(tcg_const_i32(s->aflag));
7103 case 3: /* VMSAVE */
7104 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7107 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7110 gen_helper_vmsave(tcg_const_i32(s->aflag));
7114 if ((!(s->flags & HF_SVME_MASK) &&
7115 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7119 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7126 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7129 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7135 case 6: /* SKINIT */
7136 if ((!(s->flags & HF_SVME_MASK) &&
7137 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7140 gen_helper_skinit();
7142 case 7: /* INVLPGA */
7143 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7146 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7149 gen_helper_invlpga(tcg_const_i32(s->aflag));
7155 } else if (s->cpl != 0) {
7156 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7158 gen_svm_check_intercept(s, pc_start,
7159 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE);
7160 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7161 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
7162 gen_add_A0_im(s, 2);
7163 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7165 gen_op_andl_T0_im(0xffffff);
7167 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,gdt.base));
7168 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,gdt.limit));
7170 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,idt.base));
7171 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,idt.limit));
7176 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0);
7177 #if defined TARGET_X86_64 && defined HOST_WORDS_BIGENDIAN
7178 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]) + 4);
7180 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]));
7182 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
7186 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7188 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
7189 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7190 gen_helper_lmsw(cpu_T[0]);
7191 gen_jmp_im(s->pc - s->cs_base);
7196 if (mod != 3) { /* invlpg */
7198 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7200 if (s->cc_op != CC_OP_DYNAMIC)
7201 gen_op_set_cc_op(s->cc_op);
7202 gen_jmp_im(pc_start - s->cs_base);
7203 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7204 gen_helper_invlpg(cpu_A0);
7205 gen_jmp_im(s->pc - s->cs_base);
7210 case 0: /* swapgs */
7211 #ifdef TARGET_X86_64
7214 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7216 tcg_gen_ld_tl(cpu_T[0], cpu_env,
7217 offsetof(CPUX86State,segs[R_GS].base));
7218 tcg_gen_ld_tl(cpu_T[1], cpu_env,
7219 offsetof(CPUX86State,kernelgsbase));
7220 tcg_gen_st_tl(cpu_T[1], cpu_env,
7221 offsetof(CPUX86State,segs[R_GS].base));
7222 tcg_gen_st_tl(cpu_T[0], cpu_env,
7223 offsetof(CPUX86State,kernelgsbase));
7231 case 1: /* rdtscp */
7232 if (!(s->cpuid_ext2_features & CPUID_EXT2_RDTSCP))
7234 if (s->cc_op != CC_OP_DYNAMIC)
7235 gen_op_set_cc_op(s->cc_op);
7236 gen_jmp_im(pc_start - s->cs_base);
7239 gen_helper_rdtscp();
7242 gen_jmp(s, s->pc - s->cs_base);
7254 case 0x108: /* invd */
7255 case 0x109: /* wbinvd */
7257 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7259 gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD);
7263 case 0x63: /* arpl or movslS (x86_64) */
7264 #ifdef TARGET_X86_64
7267 /* d_ot is the size of destination */
7268 d_ot = dflag + OT_WORD;
7270 modrm = ldub_code(s->pc++);
7271 reg = ((modrm >> 3) & 7) | rex_r;
7272 mod = (modrm >> 6) & 3;
7273 rm = (modrm & 7) | REX_B(s);
7276 gen_op_mov_TN_reg(OT_LONG, 0, rm);
7278 if (d_ot == OT_QUAD)
7279 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
7280 gen_op_mov_reg_T0(d_ot, reg);
7282 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7283 if (d_ot == OT_QUAD) {
7284 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
7286 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
7288 gen_op_mov_reg_T0(d_ot, reg);
7294 TCGv t0, t1, t2, a0;
7296 if (!s->pe || s->vm86)
7298 t0 = tcg_temp_local_new();
7299 t1 = tcg_temp_local_new();
7300 t2 = tcg_temp_local_new();
7302 modrm = ldub_code(s->pc++);
7303 reg = (modrm >> 3) & 7;
7304 mod = (modrm >> 6) & 3;
7307 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7308 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
7309 a0 = tcg_temp_local_new();
7310 tcg_gen_mov_tl(a0, cpu_A0);
7312 gen_op_mov_v_reg(ot, t0, rm);
7315 gen_op_mov_v_reg(ot, t1, reg);
7316 tcg_gen_andi_tl(cpu_tmp0, t0, 3);
7317 tcg_gen_andi_tl(t1, t1, 3);
7318 tcg_gen_movi_tl(t2, 0);
7319 label1 = gen_new_label();
7320 tcg_gen_brcond_tl(TCG_COND_GE, cpu_tmp0, t1, label1);
7321 tcg_gen_andi_tl(t0, t0, ~3);
7322 tcg_gen_or_tl(t0, t0, t1);
7323 tcg_gen_movi_tl(t2, CC_Z);
7324 gen_set_label(label1);
7326 gen_op_st_v(ot + s->mem_index, t0, a0);
7329 gen_op_mov_reg_v(ot, rm, t0);
7331 if (s->cc_op != CC_OP_DYNAMIC)
7332 gen_op_set_cc_op(s->cc_op);
7333 gen_compute_eflags(cpu_cc_src);
7334 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_Z);
7335 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t2);
7336 s->cc_op = CC_OP_EFLAGS;
7342 case 0x102: /* lar */
7343 case 0x103: /* lsl */
7347 if (!s->pe || s->vm86)
7349 ot = dflag ? OT_LONG : OT_WORD;
7350 modrm = ldub_code(s->pc++);
7351 reg = ((modrm >> 3) & 7) | rex_r;
7352 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7353 t0 = tcg_temp_local_new();
7354 if (s->cc_op != CC_OP_DYNAMIC)
7355 gen_op_set_cc_op(s->cc_op);
7357 gen_helper_lar(t0, cpu_T[0]);
7359 gen_helper_lsl(t0, cpu_T[0]);
7360 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
7361 label1 = gen_new_label();
7362 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
7363 gen_op_mov_reg_v(ot, reg, t0);
7364 gen_set_label(label1);
7365 s->cc_op = CC_OP_EFLAGS;
7370 modrm = ldub_code(s->pc++);
7371 mod = (modrm >> 6) & 3;
7372 op = (modrm >> 3) & 7;
7374 case 0: /* prefetchnta */
7375 case 1: /* prefetchnt0 */
7376 case 2: /* prefetchnt0 */
7377 case 3: /* prefetchnt0 */
7380 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7381 /* nothing more to do */
7383 default: /* nop (multi byte) */
7384 gen_nop_modrm(s, modrm);
7388 case 0x119 ... 0x11f: /* nop (multi byte) */
7389 modrm = ldub_code(s->pc++);
7390 gen_nop_modrm(s, modrm);
7392 case 0x120: /* mov reg, crN */
7393 case 0x122: /* mov crN, reg */
7395 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7397 modrm = ldub_code(s->pc++);
7398 if ((modrm & 0xc0) != 0xc0)
7400 rm = (modrm & 7) | REX_B(s);
7401 reg = ((modrm >> 3) & 7) | rex_r;
7406 if ((prefixes & PREFIX_LOCK) && (reg == 0) &&
7407 (s->cpuid_ext3_features & CPUID_EXT3_CR8LEG)) {
7416 if (s->cc_op != CC_OP_DYNAMIC)
7417 gen_op_set_cc_op(s->cc_op);
7418 gen_jmp_im(pc_start - s->cs_base);
7420 gen_op_mov_TN_reg(ot, 0, rm);
7421 gen_helper_write_crN(tcg_const_i32(reg), cpu_T[0]);
7422 gen_jmp_im(s->pc - s->cs_base);
7425 gen_helper_read_crN(cpu_T[0], tcg_const_i32(reg));
7426 gen_op_mov_reg_T0(ot, rm);
7434 case 0x121: /* mov reg, drN */
7435 case 0x123: /* mov drN, reg */
7437 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7439 modrm = ldub_code(s->pc++);
7440 if ((modrm & 0xc0) != 0xc0)
7442 rm = (modrm & 7) | REX_B(s);
7443 reg = ((modrm >> 3) & 7) | rex_r;
7448 /* XXX: do it dynamically with CR4.DE bit */
7449 if (reg == 4 || reg == 5 || reg >= 8)
7452 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
7453 gen_op_mov_TN_reg(ot, 0, rm);
7454 gen_helper_movl_drN_T0(tcg_const_i32(reg), cpu_T[0]);
7455 gen_jmp_im(s->pc - s->cs_base);
7458 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
7459 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,dr[reg]));
7460 gen_op_mov_reg_T0(ot, rm);
7464 case 0x106: /* clts */
7466 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7468 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
7470 /* abort block because static cpu state changed */
7471 gen_jmp_im(s->pc - s->cs_base);
7475 /* MMX/3DNow!/SSE/SSE2/SSE3/SSSE3/SSE4 support */
7476 case 0x1c3: /* MOVNTI reg, mem */
7477 if (!(s->cpuid_features & CPUID_SSE2))
7479 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
7480 modrm = ldub_code(s->pc++);
7481 mod = (modrm >> 6) & 3;
7484 reg = ((modrm >> 3) & 7) | rex_r;
7485 /* generate a generic store */
7486 gen_ldst_modrm(s, modrm, ot, reg, 1);
7489 modrm = ldub_code(s->pc++);
7490 mod = (modrm >> 6) & 3;
7491 op = (modrm >> 3) & 7;
7493 case 0: /* fxsave */
7494 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
7495 (s->prefix & PREFIX_LOCK))
7497 if ((s->flags & HF_EM_MASK) || (s->flags & HF_TS_MASK)) {
7498 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7501 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7502 if (s->cc_op != CC_OP_DYNAMIC)
7503 gen_op_set_cc_op(s->cc_op);
7504 gen_jmp_im(pc_start - s->cs_base);
7505 gen_helper_fxsave(cpu_A0, tcg_const_i32((s->dflag == 2)));
7507 case 1: /* fxrstor */
7508 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
7509 (s->prefix & PREFIX_LOCK))
7511 if ((s->flags & HF_EM_MASK) || (s->flags & HF_TS_MASK)) {
7512 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7515 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7516 if (s->cc_op != CC_OP_DYNAMIC)
7517 gen_op_set_cc_op(s->cc_op);
7518 gen_jmp_im(pc_start - s->cs_base);
7519 gen_helper_fxrstor(cpu_A0, tcg_const_i32((s->dflag == 2)));
7521 case 2: /* ldmxcsr */
7522 case 3: /* stmxcsr */
7523 if (s->flags & HF_TS_MASK) {
7524 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7527 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
7530 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7532 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
7533 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
7535 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
7536 gen_op_st_T0_A0(OT_LONG + s->mem_index);
7539 case 5: /* lfence */
7540 case 6: /* mfence */
7541 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
7544 case 7: /* sfence / clflush */
7545 if ((modrm & 0xc7) == 0xc0) {
7547 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
7548 if (!(s->cpuid_features & CPUID_SSE))
7552 if (!(s->cpuid_features & CPUID_CLFLUSH))
7554 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7561 case 0x10d: /* 3DNow! prefetch(w) */
7562 modrm = ldub_code(s->pc++);
7563 mod = (modrm >> 6) & 3;
7566 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7567 /* ignore for now */
7569 case 0x1aa: /* rsm */
7570 gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM);
7571 if (!(s->flags & HF_SMM_MASK))
7573 gen_update_cc_op(s);
7574 gen_jmp_im(s->pc - s->cs_base);
7578 case 0x1b8: /* SSE4.2 popcnt */
7579 if ((prefixes & (PREFIX_REPZ | PREFIX_LOCK | PREFIX_REPNZ)) !=
7582 if (!(s->cpuid_ext_features & CPUID_EXT_POPCNT))
7585 modrm = ldub_code(s->pc++);
7586 reg = ((modrm >> 3) & 7);
7588 if (s->prefix & PREFIX_DATA)
7590 else if (s->dflag != 2)
7595 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
7596 gen_helper_popcnt(cpu_T[0], cpu_T[0], tcg_const_i32(ot));
7597 gen_op_mov_reg_T0(ot, reg);
7599 s->cc_op = CC_OP_EFLAGS;
7601 case 0x10e ... 0x10f:
7602 /* 3DNow! instructions, ignore prefixes */
7603 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
7604 case 0x110 ... 0x117:
7605 case 0x128 ... 0x12f:
7606 case 0x138 ... 0x13a:
7607 case 0x150 ... 0x179:
7608 case 0x17c ... 0x17f:
7610 case 0x1c4 ... 0x1c6:
7611 case 0x1d0 ... 0x1fe:
7612 gen_sse(s, b, pc_start, rex_r);
7617 /* lock generation */
7618 if (s->prefix & PREFIX_LOCK)
7619 gen_helper_unlock();
7622 if (s->prefix & PREFIX_LOCK)
7623 gen_helper_unlock();
7624 /* XXX: ensure that no lock was generated */
7625 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
7629 void optimize_flags_init(void)
7631 #if TCG_TARGET_REG_BITS == 32
7632 assert(sizeof(CCTable) == (1 << 3));
7634 assert(sizeof(CCTable) == (1 << 4));
7636 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
7637 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0,
7638 offsetof(CPUState, cc_op), "cc_op");
7639 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
7641 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
7643 cpu_cc_tmp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_tmp),
7646 #ifdef TARGET_X86_64
7647 cpu_regs[R_EAX] = tcg_global_mem_new_i64(TCG_AREG0,
7648 offsetof(CPUState, regs[R_EAX]), "rax");
7649 cpu_regs[R_ECX] = tcg_global_mem_new_i64(TCG_AREG0,
7650 offsetof(CPUState, regs[R_ECX]), "rcx");
7651 cpu_regs[R_EDX] = tcg_global_mem_new_i64(TCG_AREG0,
7652 offsetof(CPUState, regs[R_EDX]), "rdx");
7653 cpu_regs[R_EBX] = tcg_global_mem_new_i64(TCG_AREG0,
7654 offsetof(CPUState, regs[R_EBX]), "rbx");
7655 cpu_regs[R_ESP] = tcg_global_mem_new_i64(TCG_AREG0,
7656 offsetof(CPUState, regs[R_ESP]), "rsp");
7657 cpu_regs[R_EBP] = tcg_global_mem_new_i64(TCG_AREG0,
7658 offsetof(CPUState, regs[R_EBP]), "rbp");
7659 cpu_regs[R_ESI] = tcg_global_mem_new_i64(TCG_AREG0,
7660 offsetof(CPUState, regs[R_ESI]), "rsi");
7661 cpu_regs[R_EDI] = tcg_global_mem_new_i64(TCG_AREG0,
7662 offsetof(CPUState, regs[R_EDI]), "rdi");
7663 cpu_regs[8] = tcg_global_mem_new_i64(TCG_AREG0,
7664 offsetof(CPUState, regs[8]), "r8");
7665 cpu_regs[9] = tcg_global_mem_new_i64(TCG_AREG0,
7666 offsetof(CPUState, regs[9]), "r9");
7667 cpu_regs[10] = tcg_global_mem_new_i64(TCG_AREG0,
7668 offsetof(CPUState, regs[10]), "r10");
7669 cpu_regs[11] = tcg_global_mem_new_i64(TCG_AREG0,
7670 offsetof(CPUState, regs[11]), "r11");
7671 cpu_regs[12] = tcg_global_mem_new_i64(TCG_AREG0,
7672 offsetof(CPUState, regs[12]), "r12");
7673 cpu_regs[13] = tcg_global_mem_new_i64(TCG_AREG0,
7674 offsetof(CPUState, regs[13]), "r13");
7675 cpu_regs[14] = tcg_global_mem_new_i64(TCG_AREG0,
7676 offsetof(CPUState, regs[14]), "r14");
7677 cpu_regs[15] = tcg_global_mem_new_i64(TCG_AREG0,
7678 offsetof(CPUState, regs[15]), "r15");
7680 cpu_regs[R_EAX] = tcg_global_mem_new_i32(TCG_AREG0,
7681 offsetof(CPUState, regs[R_EAX]), "eax");
7682 cpu_regs[R_ECX] = tcg_global_mem_new_i32(TCG_AREG0,
7683 offsetof(CPUState, regs[R_ECX]), "ecx");
7684 cpu_regs[R_EDX] = tcg_global_mem_new_i32(TCG_AREG0,
7685 offsetof(CPUState, regs[R_EDX]), "edx");
7686 cpu_regs[R_EBX] = tcg_global_mem_new_i32(TCG_AREG0,
7687 offsetof(CPUState, regs[R_EBX]), "ebx");
7688 cpu_regs[R_ESP] = tcg_global_mem_new_i32(TCG_AREG0,
7689 offsetof(CPUState, regs[R_ESP]), "esp");
7690 cpu_regs[R_EBP] = tcg_global_mem_new_i32(TCG_AREG0,
7691 offsetof(CPUState, regs[R_EBP]), "ebp");
7692 cpu_regs[R_ESI] = tcg_global_mem_new_i32(TCG_AREG0,
7693 offsetof(CPUState, regs[R_ESI]), "esi");
7694 cpu_regs[R_EDI] = tcg_global_mem_new_i32(TCG_AREG0,
7695 offsetof(CPUState, regs[R_EDI]), "edi");
7698 /* register helpers */
7699 #define GEN_HELPER 2
7703 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
7704 basic block 'tb'. If search_pc is TRUE, also generate PC
7705 information for each intermediate instruction. */
7706 static inline void gen_intermediate_code_internal(CPUState *env,
7707 TranslationBlock *tb,
7710 DisasContext dc1, *dc = &dc1;
7711 target_ulong pc_ptr;
7712 uint16_t *gen_opc_end;
7716 target_ulong pc_start;
7717 target_ulong cs_base;
7721 /* generate intermediate code */
7723 cs_base = tb->cs_base;
7726 dc->pe = (flags >> HF_PE_SHIFT) & 1;
7727 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
7728 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
7729 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
7731 dc->vm86 = (flags >> VM_SHIFT) & 1;
7732 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
7733 dc->iopl = (flags >> IOPL_SHIFT) & 3;
7734 dc->tf = (flags >> TF_SHIFT) & 1;
7735 dc->singlestep_enabled = env->singlestep_enabled;
7736 dc->cc_op = CC_OP_DYNAMIC;
7737 dc->cs_base = cs_base;
7739 dc->popl_esp_hack = 0;
7740 /* select memory access functions */
7742 if (flags & HF_SOFTMMU_MASK) {
7744 dc->mem_index = 2 * 4;
7746 dc->mem_index = 1 * 4;
7748 dc->cpuid_features = env->cpuid_features;
7749 dc->cpuid_ext_features = env->cpuid_ext_features;
7750 dc->cpuid_ext2_features = env->cpuid_ext2_features;
7751 dc->cpuid_ext3_features = env->cpuid_ext3_features;
7752 #ifdef TARGET_X86_64
7753 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
7754 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
7757 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
7758 (flags & HF_INHIBIT_IRQ_MASK)
7759 #ifndef CONFIG_SOFTMMU
7760 || (flags & HF_SOFTMMU_MASK)
7764 /* check addseg logic */
7765 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
7766 printf("ERROR addseg\n");
7769 cpu_T[0] = tcg_temp_new();
7770 cpu_T[1] = tcg_temp_new();
7771 cpu_A0 = tcg_temp_new();
7772 cpu_T3 = tcg_temp_new();
7774 cpu_tmp0 = tcg_temp_new();
7775 cpu_tmp1_i64 = tcg_temp_new_i64();
7776 cpu_tmp2_i32 = tcg_temp_new_i32();
7777 cpu_tmp3_i32 = tcg_temp_new_i32();
7778 cpu_tmp4 = tcg_temp_new();
7779 cpu_tmp5 = tcg_temp_new();
7780 cpu_ptr0 = tcg_temp_new_ptr();
7781 cpu_ptr1 = tcg_temp_new_ptr();
7783 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
7785 dc->is_jmp = DISAS_NEXT;
7789 max_insns = tb->cflags & CF_COUNT_MASK;
7791 max_insns = CF_COUNT_MASK;
7795 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
7796 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
7797 if (bp->pc == pc_ptr &&
7798 !((bp->flags & BP_CPU) && (tb->flags & HF_RF_MASK))) {
7799 gen_debug(dc, pc_ptr - dc->cs_base);
7805 j = gen_opc_ptr - gen_opc_buf;
7809 gen_opc_instr_start[lj++] = 0;
7811 gen_opc_pc[lj] = pc_ptr;
7812 gen_opc_cc_op[lj] = dc->cc_op;
7813 gen_opc_instr_start[lj] = 1;
7814 gen_opc_icount[lj] = num_insns;
7816 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
7819 pc_ptr = disas_insn(dc, pc_ptr);
7821 /* stop translation if indicated */
7824 /* if single step mode, we generate only one instruction and
7825 generate an exception */
7826 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
7827 the flag and abort the translation to give the irqs a
7828 change to be happen */
7829 if (dc->tf || dc->singlestep_enabled ||
7830 (flags & HF_INHIBIT_IRQ_MASK)) {
7831 gen_jmp_im(pc_ptr - dc->cs_base);
7835 /* if too long translation, stop generation too */
7836 if (gen_opc_ptr >= gen_opc_end ||
7837 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32) ||
7838 num_insns >= max_insns) {
7839 gen_jmp_im(pc_ptr - dc->cs_base);
7844 gen_jmp_im(pc_ptr - dc->cs_base);
7849 if (tb->cflags & CF_LAST_IO)
7851 gen_icount_end(tb, num_insns);
7852 *gen_opc_ptr = INDEX_op_end;
7853 /* we don't forget to fill the last values */
7855 j = gen_opc_ptr - gen_opc_buf;
7858 gen_opc_instr_start[lj++] = 0;
7862 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
7864 qemu_log("----------------\n");
7865 qemu_log("IN: %s\n", lookup_symbol(pc_start));
7866 #ifdef TARGET_X86_64
7871 disas_flags = !dc->code32;
7872 log_target_disas(pc_start, pc_ptr - pc_start, disas_flags);
7878 tb->size = pc_ptr - pc_start;
7879 tb->icount = num_insns;
7883 void gen_intermediate_code(CPUState *env, TranslationBlock *tb)
7885 gen_intermediate_code_internal(env, tb, 0);
7888 void gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
7890 gen_intermediate_code_internal(env, tb, 1);
7893 void gen_pc_load(CPUState *env, TranslationBlock *tb,
7894 unsigned long searched_pc, int pc_pos, void *puc)
7898 if (qemu_loglevel_mask(CPU_LOG_TB_OP)) {
7900 qemu_log("RESTORE:\n");
7901 for(i = 0;i <= pc_pos; i++) {
7902 if (gen_opc_instr_start[i]) {
7903 qemu_log("0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
7906 qemu_log("spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
7907 searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
7908 (uint32_t)tb->cs_base);
7911 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
7912 cc_op = gen_opc_cc_op[pc_pos];
7913 if (cc_op != CC_OP_DYNAMIC)