4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
35 /* XXX: move that elsewhere */
36 static uint16_t *gen_opc_ptr;
37 static uint32_t *gen_opparam_ptr;
39 #define PREFIX_REPZ 0x01
40 #define PREFIX_REPNZ 0x02
41 #define PREFIX_LOCK 0x04
42 #define PREFIX_DATA 0x08
43 #define PREFIX_ADR 0x10
45 typedef struct DisasContext {
46 /* current insn context */
47 int override; /* -1 if no override */
50 uint8_t *pc; /* pc = eip + cs_base */
51 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
52 static state change (stop translation) */
53 /* current block context */
54 uint8_t *cs_base; /* base of CS segment */
55 int code32; /* 32 bit code segment */
56 int ss32; /* 32 bit stack segment */
57 int cc_op; /* current CC operation */
58 int addseg; /* non zero if either DS/ES/SS have a non zero base */
59 int f_st; /* currently unused */
60 int vm86; /* vm86 mode */
63 int tf; /* TF cpu flag */
64 struct TranslationBlock *tb;
65 int popl_esp_hack; /* for correct popl with esp base handling */
68 /* i386 arith/logic operations */
88 OP_SHL1, /* undocumented */
93 #define DEF(s, n, copy_size) INDEX_op_ ## s,
99 #include "gen-op-i386.h"
110 /* I386 int registers */
111 OR_EAX, /* MUST be even numbered */
119 OR_TMP0, /* temporary operand register */
121 OR_A0, /* temporary register used when doing address evaluation */
122 OR_ZERO, /* fixed zero register */
126 typedef void (GenOpFunc)(void);
127 typedef void (GenOpFunc1)(long);
128 typedef void (GenOpFunc2)(long, long);
129 typedef void (GenOpFunc3)(long, long, long);
131 static GenOpFunc *gen_op_mov_reg_T0[3][8] = {
164 static GenOpFunc *gen_op_mov_reg_T1[3][8] = {
197 static GenOpFunc *gen_op_mov_reg_A0[2][8] = {
220 static GenOpFunc *gen_op_mov_TN_reg[3][2][8] =
290 static GenOpFunc *gen_op_movl_A0_reg[8] = {
301 static GenOpFunc *gen_op_addl_A0_reg_sN[4][8] = {
313 gen_op_addl_A0_EAX_s1,
314 gen_op_addl_A0_ECX_s1,
315 gen_op_addl_A0_EDX_s1,
316 gen_op_addl_A0_EBX_s1,
317 gen_op_addl_A0_ESP_s1,
318 gen_op_addl_A0_EBP_s1,
319 gen_op_addl_A0_ESI_s1,
320 gen_op_addl_A0_EDI_s1,
323 gen_op_addl_A0_EAX_s2,
324 gen_op_addl_A0_ECX_s2,
325 gen_op_addl_A0_EDX_s2,
326 gen_op_addl_A0_EBX_s2,
327 gen_op_addl_A0_ESP_s2,
328 gen_op_addl_A0_EBP_s2,
329 gen_op_addl_A0_ESI_s2,
330 gen_op_addl_A0_EDI_s2,
333 gen_op_addl_A0_EAX_s3,
334 gen_op_addl_A0_ECX_s3,
335 gen_op_addl_A0_EDX_s3,
336 gen_op_addl_A0_EBX_s3,
337 gen_op_addl_A0_ESP_s3,
338 gen_op_addl_A0_EBP_s3,
339 gen_op_addl_A0_ESI_s3,
340 gen_op_addl_A0_EDI_s3,
344 static GenOpFunc *gen_op_cmov_reg_T1_T0[2][8] = {
346 gen_op_cmovw_EAX_T1_T0,
347 gen_op_cmovw_ECX_T1_T0,
348 gen_op_cmovw_EDX_T1_T0,
349 gen_op_cmovw_EBX_T1_T0,
350 gen_op_cmovw_ESP_T1_T0,
351 gen_op_cmovw_EBP_T1_T0,
352 gen_op_cmovw_ESI_T1_T0,
353 gen_op_cmovw_EDI_T1_T0,
356 gen_op_cmovl_EAX_T1_T0,
357 gen_op_cmovl_ECX_T1_T0,
358 gen_op_cmovl_EDX_T1_T0,
359 gen_op_cmovl_EBX_T1_T0,
360 gen_op_cmovl_ESP_T1_T0,
361 gen_op_cmovl_EBP_T1_T0,
362 gen_op_cmovl_ESI_T1_T0,
363 gen_op_cmovl_EDI_T1_T0,
367 static GenOpFunc *gen_op_arith_T0_T1_cc[8] = {
378 static GenOpFunc *gen_op_arithc_T0_T1_cc[3][2] = {
380 gen_op_adcb_T0_T1_cc,
381 gen_op_sbbb_T0_T1_cc,
384 gen_op_adcw_T0_T1_cc,
385 gen_op_sbbw_T0_T1_cc,
388 gen_op_adcl_T0_T1_cc,
389 gen_op_sbbl_T0_T1_cc,
393 static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3][2] = {
395 gen_op_adcb_mem_T0_T1_cc,
396 gen_op_sbbb_mem_T0_T1_cc,
399 gen_op_adcw_mem_T0_T1_cc,
400 gen_op_sbbw_mem_T0_T1_cc,
403 gen_op_adcl_mem_T0_T1_cc,
404 gen_op_sbbl_mem_T0_T1_cc,
408 static const int cc_op_arithb[8] = {
419 static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[3] = {
420 gen_op_cmpxchgb_T0_T1_EAX_cc,
421 gen_op_cmpxchgw_T0_T1_EAX_cc,
422 gen_op_cmpxchgl_T0_T1_EAX_cc,
425 static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3] = {
426 gen_op_cmpxchgb_mem_T0_T1_EAX_cc,
427 gen_op_cmpxchgw_mem_T0_T1_EAX_cc,
428 gen_op_cmpxchgl_mem_T0_T1_EAX_cc,
431 static GenOpFunc *gen_op_shift_T0_T1_cc[3][8] = {
433 gen_op_rolb_T0_T1_cc,
434 gen_op_rorb_T0_T1_cc,
435 gen_op_rclb_T0_T1_cc,
436 gen_op_rcrb_T0_T1_cc,
437 gen_op_shlb_T0_T1_cc,
438 gen_op_shrb_T0_T1_cc,
439 gen_op_shlb_T0_T1_cc,
440 gen_op_sarb_T0_T1_cc,
443 gen_op_rolw_T0_T1_cc,
444 gen_op_rorw_T0_T1_cc,
445 gen_op_rclw_T0_T1_cc,
446 gen_op_rcrw_T0_T1_cc,
447 gen_op_shlw_T0_T1_cc,
448 gen_op_shrw_T0_T1_cc,
449 gen_op_shlw_T0_T1_cc,
450 gen_op_sarw_T0_T1_cc,
453 gen_op_roll_T0_T1_cc,
454 gen_op_rorl_T0_T1_cc,
455 gen_op_rcll_T0_T1_cc,
456 gen_op_rcrl_T0_T1_cc,
457 gen_op_shll_T0_T1_cc,
458 gen_op_shrl_T0_T1_cc,
459 gen_op_shll_T0_T1_cc,
460 gen_op_sarl_T0_T1_cc,
464 static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3][8] = {
466 gen_op_rolb_mem_T0_T1_cc,
467 gen_op_rorb_mem_T0_T1_cc,
468 gen_op_rclb_mem_T0_T1_cc,
469 gen_op_rcrb_mem_T0_T1_cc,
470 gen_op_shlb_mem_T0_T1_cc,
471 gen_op_shrb_mem_T0_T1_cc,
472 gen_op_shlb_mem_T0_T1_cc,
473 gen_op_sarb_mem_T0_T1_cc,
476 gen_op_rolw_mem_T0_T1_cc,
477 gen_op_rorw_mem_T0_T1_cc,
478 gen_op_rclw_mem_T0_T1_cc,
479 gen_op_rcrw_mem_T0_T1_cc,
480 gen_op_shlw_mem_T0_T1_cc,
481 gen_op_shrw_mem_T0_T1_cc,
482 gen_op_shlw_mem_T0_T1_cc,
483 gen_op_sarw_mem_T0_T1_cc,
486 gen_op_roll_mem_T0_T1_cc,
487 gen_op_rorl_mem_T0_T1_cc,
488 gen_op_rcll_mem_T0_T1_cc,
489 gen_op_rcrl_mem_T0_T1_cc,
490 gen_op_shll_mem_T0_T1_cc,
491 gen_op_shrl_mem_T0_T1_cc,
492 gen_op_shll_mem_T0_T1_cc,
493 gen_op_sarl_mem_T0_T1_cc,
497 static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[2][2] = {
499 gen_op_shldw_T0_T1_im_cc,
500 gen_op_shrdw_T0_T1_im_cc,
503 gen_op_shldl_T0_T1_im_cc,
504 gen_op_shrdl_T0_T1_im_cc,
508 static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[2][2] = {
510 gen_op_shldw_T0_T1_ECX_cc,
511 gen_op_shrdw_T0_T1_ECX_cc,
514 gen_op_shldl_T0_T1_ECX_cc,
515 gen_op_shrdl_T0_T1_ECX_cc,
519 static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[2][2] = {
521 gen_op_shldw_mem_T0_T1_im_cc,
522 gen_op_shrdw_mem_T0_T1_im_cc,
525 gen_op_shldl_mem_T0_T1_im_cc,
526 gen_op_shrdl_mem_T0_T1_im_cc,
530 static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[2][2] = {
532 gen_op_shldw_mem_T0_T1_ECX_cc,
533 gen_op_shrdw_mem_T0_T1_ECX_cc,
536 gen_op_shldl_mem_T0_T1_ECX_cc,
537 gen_op_shrdl_mem_T0_T1_ECX_cc,
541 static GenOpFunc *gen_op_btx_T0_T1_cc[2][4] = {
544 gen_op_btsw_T0_T1_cc,
545 gen_op_btrw_T0_T1_cc,
546 gen_op_btcw_T0_T1_cc,
550 gen_op_btsl_T0_T1_cc,
551 gen_op_btrl_T0_T1_cc,
552 gen_op_btcl_T0_T1_cc,
556 static GenOpFunc *gen_op_bsx_T0_cc[2][2] = {
567 static GenOpFunc *gen_op_lds_T0_A0[3] = {
572 static GenOpFunc *gen_op_ldu_T0_A0[3] = {
577 /* sign does not matter */
578 static GenOpFunc *gen_op_ld_T0_A0[3] = {
584 static GenOpFunc *gen_op_ld_T1_A0[3] = {
590 static GenOpFunc *gen_op_st_T0_A0[3] = {
596 /* the _a32 and _a16 string operations use A0 as the base register. */
598 #define STRINGOP(x) \
599 gen_op_ ## x ## b_fast, \
600 gen_op_ ## x ## w_fast, \
601 gen_op_ ## x ## l_fast, \
602 gen_op_ ## x ## b_a32, \
603 gen_op_ ## x ## w_a32, \
604 gen_op_ ## x ## l_a32, \
605 gen_op_ ## x ## b_a16, \
606 gen_op_ ## x ## w_a16, \
607 gen_op_ ## x ## l_a16,
609 static GenOpFunc *gen_op_movs[9 * 2] = {
614 static GenOpFunc *gen_op_stos[9 * 2] = {
619 static GenOpFunc *gen_op_lods[9 * 2] = {
624 static GenOpFunc *gen_op_scas[9 * 3] = {
630 static GenOpFunc *gen_op_cmps[9 * 3] = {
636 static GenOpFunc *gen_op_ins[9 * 2] = {
642 static GenOpFunc *gen_op_outs[9 * 2] = {
648 static inline void gen_string_ds(DisasContext *s, int ot, GenOpFunc **func)
652 override = s->override;
655 if (s->addseg && override < 0)
658 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[override].base));
666 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[override].base));
667 /* 16 address, always override */
673 static inline void gen_string_es(DisasContext *s, int ot, GenOpFunc **func)
690 static GenOpFunc *gen_op_in[3] = {
696 static GenOpFunc *gen_op_out[3] = {
713 static GenOpFunc3 *gen_jcc_sub[3][8] = {
745 static GenOpFunc2 *gen_op_loop[2][4] = {
760 static GenOpFunc *gen_setcc_slow[8] = {
771 static GenOpFunc *gen_setcc_sub[3][8] = {
776 gen_op_setbe_T0_subb,
780 gen_op_setle_T0_subb,
786 gen_op_setbe_T0_subw,
790 gen_op_setle_T0_subw,
796 gen_op_setbe_T0_subl,
800 gen_op_setle_T0_subl,
804 static GenOpFunc *gen_op_fp_arith_ST0_FT0[8] = {
810 gen_op_fsubr_ST0_FT0,
812 gen_op_fdivr_ST0_FT0,
815 /* NOTE the exception in "r" op ordering */
816 static GenOpFunc1 *gen_op_fp_arith_STN_ST0[8] = {
821 gen_op_fsubr_STN_ST0,
823 gen_op_fdivr_STN_ST0,
827 /* if d == OR_TMP0, it means memory operand (address in A0) */
828 static void gen_op(DisasContext *s1, int op, int ot, int d)
830 GenOpFunc *gen_update_cc;
833 gen_op_mov_TN_reg[ot][0][d]();
835 gen_op_ld_T0_A0[ot]();
840 if (s1->cc_op != CC_OP_DYNAMIC)
841 gen_op_set_cc_op(s1->cc_op);
843 gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
844 gen_op_mov_reg_T0[ot][d]();
846 gen_op_arithc_mem_T0_T1_cc[ot][op - OP_ADCL]();
848 s1->cc_op = CC_OP_DYNAMIC;
852 s1->cc_op = CC_OP_ADDB + ot;
853 gen_update_cc = gen_op_update2_cc;
857 s1->cc_op = CC_OP_SUBB + ot;
858 gen_update_cc = gen_op_update2_cc;
864 gen_op_arith_T0_T1_cc[op]();
865 s1->cc_op = CC_OP_LOGICB + ot;
866 gen_update_cc = gen_op_update1_cc;
869 gen_op_cmpl_T0_T1_cc();
870 s1->cc_op = CC_OP_SUBB + ot;
871 gen_update_cc = NULL;
876 gen_op_mov_reg_T0[ot][d]();
878 gen_op_st_T0_A0[ot]();
880 /* the flags update must happen after the memory write (precise
881 exception support) */
887 /* if d == OR_TMP0, it means memory operand (address in A0) */
888 static void gen_inc(DisasContext *s1, int ot, int d, int c)
891 gen_op_mov_TN_reg[ot][0][d]();
893 gen_op_ld_T0_A0[ot]();
894 if (s1->cc_op != CC_OP_DYNAMIC)
895 gen_op_set_cc_op(s1->cc_op);
898 s1->cc_op = CC_OP_INCB + ot;
901 s1->cc_op = CC_OP_DECB + ot;
904 gen_op_mov_reg_T0[ot][d]();
906 gen_op_st_T0_A0[ot]();
907 gen_op_update_inc_cc();
910 static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
913 gen_op_mov_TN_reg[ot][0][d]();
915 gen_op_ld_T0_A0[ot]();
917 gen_op_mov_TN_reg[ot][1][s]();
918 /* for zero counts, flags are not updated, so must do it dynamically */
919 if (s1->cc_op != CC_OP_DYNAMIC)
920 gen_op_set_cc_op(s1->cc_op);
923 gen_op_shift_T0_T1_cc[ot][op]();
925 gen_op_shift_mem_T0_T1_cc[ot][op]();
927 gen_op_mov_reg_T0[ot][d]();
928 s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
931 static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
933 /* currently not optimized */
934 gen_op_movl_T1_im(c);
935 gen_shift(s1, op, ot, d, OR_TMP1);
938 static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
945 int mod, rm, code, override, must_add_seg;
947 override = s->override;
948 must_add_seg = s->addseg;
951 mod = (modrm >> 6) & 3;
963 code = ldub(s->pc++);
964 scale = (code >> 6) & 3;
965 index = (code >> 3) & 7;
980 disp = (int8_t)ldub(s->pc++);
990 /* for correct popl handling with esp */
991 if (base == 4 && s->popl_esp_hack)
993 gen_op_movl_A0_reg[base]();
995 gen_op_addl_A0_im(disp);
997 gen_op_movl_A0_im(disp);
999 /* XXX: index == 4 is always invalid */
1000 if (havesib && (index != 4 || scale != 0)) {
1001 gen_op_addl_A0_reg_sN[scale][index]();
1005 if (base == R_EBP || base == R_ESP)
1010 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1018 gen_op_movl_A0_im(disp);
1019 rm = 0; /* avoid SS override */
1026 disp = (int8_t)ldub(s->pc++);
1036 gen_op_movl_A0_reg[R_EBX]();
1037 gen_op_addl_A0_reg_sN[0][R_ESI]();
1040 gen_op_movl_A0_reg[R_EBX]();
1041 gen_op_addl_A0_reg_sN[0][R_EDI]();
1044 gen_op_movl_A0_reg[R_EBP]();
1045 gen_op_addl_A0_reg_sN[0][R_ESI]();
1048 gen_op_movl_A0_reg[R_EBP]();
1049 gen_op_addl_A0_reg_sN[0][R_EDI]();
1052 gen_op_movl_A0_reg[R_ESI]();
1055 gen_op_movl_A0_reg[R_EDI]();
1058 gen_op_movl_A0_reg[R_EBP]();
1062 gen_op_movl_A0_reg[R_EBX]();
1066 gen_op_addl_A0_im(disp);
1067 gen_op_andl_A0_ffff();
1071 if (rm == 2 || rm == 3 || rm == 6)
1076 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1086 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1088 static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1090 int mod, rm, opreg, disp;
1092 mod = (modrm >> 6) & 3;
1097 gen_op_mov_TN_reg[ot][0][reg]();
1098 gen_op_mov_reg_T0[ot][rm]();
1100 gen_op_mov_TN_reg[ot][0][rm]();
1102 gen_op_mov_reg_T0[ot][reg]();
1105 gen_lea_modrm(s, modrm, &opreg, &disp);
1108 gen_op_mov_TN_reg[ot][0][reg]();
1109 gen_op_st_T0_A0[ot]();
1111 gen_op_ld_T0_A0[ot]();
1113 gen_op_mov_reg_T0[ot][reg]();
1118 static inline uint32_t insn_get(DisasContext *s, int ot)
1140 static inline void gen_jcc(DisasContext *s, int b, int val, int next_eip)
1142 TranslationBlock *tb;
1147 jcc_op = (b >> 1) & 7;
1149 /* we optimize the cmp/jcc case */
1153 func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1156 /* some jumps are easy to compute */
1183 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 3][jcc_op];
1186 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 3][jcc_op];
1198 if (s->cc_op != CC_OP_DYNAMIC)
1199 gen_op_set_cc_op(s->cc_op);
1202 gen_setcc_slow[jcc_op]();
1208 func((long)tb, val, next_eip);
1210 func((long)tb, next_eip, val);
1215 static void gen_setcc(DisasContext *s, int b)
1221 jcc_op = (b >> 1) & 7;
1223 /* we optimize the cmp/jcc case */
1227 func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1232 /* some jumps are easy to compute */
1250 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 3][jcc_op];
1253 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 3][jcc_op];
1261 if (s->cc_op != CC_OP_DYNAMIC)
1262 gen_op_set_cc_op(s->cc_op);
1263 func = gen_setcc_slow[jcc_op];
1272 /* move T0 to seg_reg and compute if the CPU state may change */
1273 static void gen_movl_seg_T0(DisasContext *s, int seg_reg, unsigned int cur_eip)
1276 gen_op_movl_seg_T0(seg_reg, cur_eip);
1278 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
1279 if (!s->addseg && seg_reg < R_FS)
1280 s->is_jmp = 2; /* abort translation because the register may
1281 have a non zero base */
1284 /* generate a push. It depends on ss32, addseg and dflag */
1285 static void gen_push_T0(DisasContext *s)
1295 gen_op_pushl_ss32_T0();
1297 gen_op_pushw_ss32_T0();
1301 gen_op_pushl_ss16_T0();
1303 gen_op_pushw_ss16_T0();
1307 /* two step pop is necessary for precise exceptions */
1308 static void gen_pop_T0(DisasContext *s)
1318 gen_op_popl_ss32_T0();
1320 gen_op_popw_ss32_T0();
1324 gen_op_popl_ss16_T0();
1326 gen_op_popw_ss16_T0();
1330 static inline void gen_stack_update(DisasContext *s, int addend)
1334 gen_op_addl_ESP_2();
1335 else if (addend == 4)
1336 gen_op_addl_ESP_4();
1338 gen_op_addl_ESP_im(addend);
1341 gen_op_addw_ESP_2();
1342 else if (addend == 4)
1343 gen_op_addw_ESP_4();
1345 gen_op_addw_ESP_im(addend);
1349 static void gen_pop_update(DisasContext *s)
1351 gen_stack_update(s, 2 << s->dflag);
1354 static void gen_stack_A0(DisasContext *s)
1356 gen_op_movl_A0_ESP();
1358 gen_op_andl_A0_ffff();
1359 gen_op_movl_T1_A0();
1361 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
1364 /* NOTE: wrap around in 16 bit not fully handled */
1365 static void gen_pusha(DisasContext *s)
1368 gen_op_movl_A0_ESP();
1369 gen_op_addl_A0_im(-16 << s->dflag);
1371 gen_op_andl_A0_ffff();
1372 gen_op_movl_T1_A0();
1374 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
1375 for(i = 0;i < 8; i++) {
1376 gen_op_mov_TN_reg[OT_LONG][0][7 - i]();
1377 gen_op_st_T0_A0[OT_WORD + s->dflag]();
1378 gen_op_addl_A0_im(2 << s->dflag);
1380 gen_op_mov_reg_T1[OT_WORD + s->dflag][R_ESP]();
1383 /* NOTE: wrap around in 16 bit not fully handled */
1384 static void gen_popa(DisasContext *s)
1387 gen_op_movl_A0_ESP();
1389 gen_op_andl_A0_ffff();
1390 gen_op_movl_T1_A0();
1391 gen_op_addl_T1_im(16 << s->dflag);
1393 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
1394 for(i = 0;i < 8; i++) {
1395 /* ESP is not reloaded */
1397 gen_op_ld_T0_A0[OT_WORD + s->dflag]();
1398 gen_op_mov_reg_T0[OT_WORD + s->dflag][7 - i]();
1400 gen_op_addl_A0_im(2 << s->dflag);
1402 gen_op_mov_reg_T1[OT_WORD + s->dflag][R_ESP]();
1405 /* NOTE: wrap around in 16 bit not fully handled */
1406 /* XXX: check this */
1407 static void gen_enter(DisasContext *s, int esp_addend, int level)
1409 int ot, level1, addend, opsize;
1411 ot = s->dflag + OT_WORD;
1414 opsize = 2 << s->dflag;
1416 gen_op_movl_A0_ESP();
1417 gen_op_addl_A0_im(-opsize);
1419 gen_op_andl_A0_ffff();
1420 gen_op_movl_T1_A0();
1422 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
1424 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
1425 gen_op_st_T0_A0[ot]();
1428 gen_op_addl_A0_im(-opsize);
1429 gen_op_addl_T0_im(-opsize);
1430 gen_op_st_T0_A0[ot]();
1432 gen_op_addl_A0_im(-opsize);
1433 /* XXX: add st_T1_A0 ? */
1434 gen_op_movl_T0_T1();
1435 gen_op_st_T0_A0[ot]();
1437 gen_op_mov_reg_T1[ot][R_EBP]();
1438 addend = -esp_addend;
1440 addend -= opsize * (level1 + 1);
1441 gen_op_addl_T1_im(addend);
1442 gen_op_mov_reg_T1[ot][R_ESP]();
1445 static void gen_exception(DisasContext *s, int trapno, unsigned int cur_eip)
1447 if (s->cc_op != CC_OP_DYNAMIC)
1448 gen_op_set_cc_op(s->cc_op);
1449 gen_op_jmp_im(cur_eip);
1450 gen_op_raise_exception(trapno);
1454 /* an interrupt is different from an exception because of the
1455 priviledge checks */
1456 static void gen_interrupt(DisasContext *s, int intno,
1457 unsigned int cur_eip, unsigned int next_eip)
1459 if (s->cc_op != CC_OP_DYNAMIC)
1460 gen_op_set_cc_op(s->cc_op);
1461 gen_op_jmp_im(cur_eip);
1462 gen_op_raise_interrupt(intno, next_eip);
1466 /* generate a jump to eip. No segment change must happen before as a
1467 direct call to the next block may occur */
1468 static void gen_jmp(DisasContext *s, unsigned int eip)
1470 TranslationBlock *tb = s->tb;
1472 if (s->cc_op != CC_OP_DYNAMIC)
1473 gen_op_set_cc_op(s->cc_op);
1474 gen_op_jmp_tb_next((long)tb, eip);
1478 /* return the next pc address. Return -1 if no insn found. *is_jmp_ptr
1479 is set to true if the instruction sets the PC (last instruction of
1481 long disas_insn(DisasContext *s, uint8_t *pc_start)
1483 int b, prefixes, aflag, dflag;
1485 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
1486 unsigned int next_eip;
1496 /* check prefixes */
1499 prefixes |= PREFIX_REPZ;
1502 prefixes |= PREFIX_REPNZ;
1505 prefixes |= PREFIX_LOCK;
1526 prefixes |= PREFIX_DATA;
1529 prefixes |= PREFIX_ADR;
1533 if (prefixes & PREFIX_DATA)
1535 if (prefixes & PREFIX_ADR)
1538 s->prefix = prefixes;
1542 /* lock generation */
1543 if (prefixes & PREFIX_LOCK)
1546 /* now check op code */
1550 /**************************/
1551 /* extended op code */
1552 b = ldub(s->pc++) | 0x100;
1555 /**************************/
1573 ot = dflag ? OT_LONG : OT_WORD;
1576 case 0: /* OP Ev, Gv */
1577 modrm = ldub(s->pc++);
1578 reg = ((modrm >> 3) & 7);
1579 mod = (modrm >> 6) & 3;
1582 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
1584 } else if (op == OP_XORL && rm == reg) {
1586 /* xor reg, reg optimisation */
1588 s->cc_op = CC_OP_LOGICB + ot;
1589 gen_op_mov_reg_T0[ot][reg]();
1590 gen_op_update1_cc();
1595 gen_op_mov_TN_reg[ot][1][reg]();
1596 gen_op(s, op, ot, opreg);
1598 case 1: /* OP Gv, Ev */
1599 modrm = ldub(s->pc++);
1600 mod = (modrm >> 6) & 3;
1601 reg = ((modrm >> 3) & 7);
1604 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
1605 gen_op_ld_T1_A0[ot]();
1606 } else if (op == OP_XORL && rm == reg) {
1609 gen_op_mov_TN_reg[ot][1][rm]();
1611 gen_op(s, op, ot, reg);
1613 case 2: /* OP A, Iv */
1614 val = insn_get(s, ot);
1615 gen_op_movl_T1_im(val);
1616 gen_op(s, op, ot, OR_EAX);
1622 case 0x80: /* GRP1 */
1631 ot = dflag ? OT_LONG : OT_WORD;
1633 modrm = ldub(s->pc++);
1634 mod = (modrm >> 6) & 3;
1636 op = (modrm >> 3) & 7;
1639 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
1642 opreg = rm + OR_EAX;
1649 val = insn_get(s, ot);
1652 val = (int8_t)insn_get(s, OT_BYTE);
1655 gen_op_movl_T1_im(val);
1656 gen_op(s, op, ot, opreg);
1660 /**************************/
1661 /* inc, dec, and other misc arith */
1662 case 0x40 ... 0x47: /* inc Gv */
1663 ot = dflag ? OT_LONG : OT_WORD;
1664 gen_inc(s, ot, OR_EAX + (b & 7), 1);
1666 case 0x48 ... 0x4f: /* dec Gv */
1667 ot = dflag ? OT_LONG : OT_WORD;
1668 gen_inc(s, ot, OR_EAX + (b & 7), -1);
1670 case 0xf6: /* GRP3 */
1675 ot = dflag ? OT_LONG : OT_WORD;
1677 modrm = ldub(s->pc++);
1678 mod = (modrm >> 6) & 3;
1680 op = (modrm >> 3) & 7;
1682 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
1683 gen_op_ld_T0_A0[ot]();
1685 gen_op_mov_TN_reg[ot][0][rm]();
1690 val = insn_get(s, ot);
1691 gen_op_movl_T1_im(val);
1692 gen_op_testl_T0_T1_cc();
1693 s->cc_op = CC_OP_LOGICB + ot;
1698 gen_op_st_T0_A0[ot]();
1700 gen_op_mov_reg_T0[ot][rm]();
1706 gen_op_st_T0_A0[ot]();
1708 gen_op_mov_reg_T0[ot][rm]();
1710 gen_op_update_neg_cc();
1711 s->cc_op = CC_OP_SUBB + ot;
1716 gen_op_mulb_AL_T0();
1719 gen_op_mulw_AX_T0();
1723 gen_op_mull_EAX_T0();
1726 s->cc_op = CC_OP_MUL;
1731 gen_op_imulb_AL_T0();
1734 gen_op_imulw_AX_T0();
1738 gen_op_imull_EAX_T0();
1741 s->cc_op = CC_OP_MUL;
1746 gen_op_divb_AL_T0(pc_start - s->cs_base);
1749 gen_op_divw_AX_T0(pc_start - s->cs_base);
1753 gen_op_divl_EAX_T0(pc_start - s->cs_base);
1760 gen_op_idivb_AL_T0(pc_start - s->cs_base);
1763 gen_op_idivw_AX_T0(pc_start - s->cs_base);
1767 gen_op_idivl_EAX_T0(pc_start - s->cs_base);
1776 case 0xfe: /* GRP4 */
1777 case 0xff: /* GRP5 */
1781 ot = dflag ? OT_LONG : OT_WORD;
1783 modrm = ldub(s->pc++);
1784 mod = (modrm >> 6) & 3;
1786 op = (modrm >> 3) & 7;
1787 if (op >= 2 && b == 0xfe) {
1791 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
1792 if (op >= 2 && op != 3 && op != 5)
1793 gen_op_ld_T0_A0[ot]();
1795 gen_op_mov_TN_reg[ot][0][rm]();
1799 case 0: /* inc Ev */
1804 gen_inc(s, ot, opreg, 1);
1806 case 1: /* dec Ev */
1811 gen_inc(s, ot, opreg, -1);
1813 case 2: /* call Ev */
1814 /* XXX: optimize if memory (no and is necessary) */
1816 gen_op_andl_T0_ffff();
1818 next_eip = s->pc - s->cs_base;
1819 gen_op_movl_T0_im(next_eip);
1823 case 3: /* lcall Ev */
1824 /* push return segment + offset */
1825 gen_op_movl_T0_seg(R_CS);
1827 next_eip = s->pc - s->cs_base;
1828 gen_op_movl_T0_im(next_eip);
1831 gen_op_ld_T1_A0[ot]();
1832 gen_op_addl_A0_im(1 << (ot - OT_WORD + 1));
1833 gen_op_lduw_T0_A0();
1834 gen_movl_seg_T0(s, R_CS, pc_start - s->cs_base);
1835 gen_op_movl_T0_T1();
1839 case 4: /* jmp Ev */
1841 gen_op_andl_T0_ffff();
1845 case 5: /* ljmp Ev */
1846 gen_op_ld_T1_A0[ot]();
1847 gen_op_addl_A0_im(1 << (ot - OT_WORD + 1));
1848 gen_op_lduw_T0_A0();
1850 /* we compute EIP to handle the exception case */
1851 gen_op_jmp_im(pc_start - s->cs_base);
1852 gen_op_ljmp_T0_T1();
1854 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
1855 gen_op_movl_T0_T1();
1860 case 6: /* push Ev */
1868 case 0x84: /* test Ev, Gv */
1873 ot = dflag ? OT_LONG : OT_WORD;
1875 modrm = ldub(s->pc++);
1876 mod = (modrm >> 6) & 3;
1878 reg = (modrm >> 3) & 7;
1880 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
1881 gen_op_mov_TN_reg[ot][1][reg + OR_EAX]();
1882 gen_op_testl_T0_T1_cc();
1883 s->cc_op = CC_OP_LOGICB + ot;
1886 case 0xa8: /* test eAX, Iv */
1891 ot = dflag ? OT_LONG : OT_WORD;
1892 val = insn_get(s, ot);
1894 gen_op_mov_TN_reg[ot][0][OR_EAX]();
1895 gen_op_movl_T1_im(val);
1896 gen_op_testl_T0_T1_cc();
1897 s->cc_op = CC_OP_LOGICB + ot;
1900 case 0x98: /* CWDE/CBW */
1902 gen_op_movswl_EAX_AX();
1904 gen_op_movsbw_AX_AL();
1906 case 0x99: /* CDQ/CWD */
1908 gen_op_movslq_EDX_EAX();
1910 gen_op_movswl_DX_AX();
1912 case 0x1af: /* imul Gv, Ev */
1913 case 0x69: /* imul Gv, Ev, I */
1915 ot = dflag ? OT_LONG : OT_WORD;
1916 modrm = ldub(s->pc++);
1917 reg = ((modrm >> 3) & 7) + OR_EAX;
1918 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
1920 val = insn_get(s, ot);
1921 gen_op_movl_T1_im(val);
1922 } else if (b == 0x6b) {
1923 val = insn_get(s, OT_BYTE);
1924 gen_op_movl_T1_im(val);
1926 gen_op_mov_TN_reg[ot][1][reg]();
1929 if (ot == OT_LONG) {
1930 gen_op_imull_T0_T1();
1932 gen_op_imulw_T0_T1();
1934 gen_op_mov_reg_T0[ot][reg]();
1935 s->cc_op = CC_OP_MUL;
1938 case 0x1c1: /* xadd Ev, Gv */
1942 ot = dflag ? OT_LONG : OT_WORD;
1943 modrm = ldub(s->pc++);
1944 reg = (modrm >> 3) & 7;
1945 mod = (modrm >> 6) & 3;
1948 gen_op_mov_TN_reg[ot][0][reg]();
1949 gen_op_mov_TN_reg[ot][1][rm]();
1950 gen_op_addl_T0_T1();
1951 gen_op_mov_reg_T0[ot][rm]();
1952 gen_op_mov_reg_T1[ot][reg]();
1954 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
1955 gen_op_mov_TN_reg[ot][0][reg]();
1956 gen_op_ld_T1_A0[ot]();
1957 gen_op_addl_T0_T1();
1958 gen_op_st_T0_A0[ot]();
1959 gen_op_mov_reg_T1[ot][reg]();
1961 gen_op_update2_cc();
1962 s->cc_op = CC_OP_ADDB + ot;
1965 case 0x1b1: /* cmpxchg Ev, Gv */
1969 ot = dflag ? OT_LONG : OT_WORD;
1970 modrm = ldub(s->pc++);
1971 reg = (modrm >> 3) & 7;
1972 mod = (modrm >> 6) & 3;
1973 gen_op_mov_TN_reg[ot][1][reg]();
1976 gen_op_mov_TN_reg[ot][0][rm]();
1977 gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
1978 gen_op_mov_reg_T0[ot][rm]();
1980 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
1981 gen_op_ld_T0_A0[ot]();
1982 gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot]();
1984 s->cc_op = CC_OP_SUBB + ot;
1986 case 0x1c7: /* cmpxchg8b */
1987 modrm = ldub(s->pc++);
1988 mod = (modrm >> 6) & 3;
1991 if (s->cc_op != CC_OP_DYNAMIC)
1992 gen_op_set_cc_op(s->cc_op);
1993 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
1995 s->cc_op = CC_OP_EFLAGS;
1998 /**************************/
2000 case 0x50 ... 0x57: /* push */
2001 gen_op_mov_TN_reg[OT_LONG][0][b & 7]();
2004 case 0x58 ... 0x5f: /* pop */
2005 ot = dflag ? OT_LONG : OT_WORD;
2007 gen_op_mov_reg_T0[ot][b & 7]();
2010 case 0x60: /* pusha */
2013 case 0x61: /* popa */
2016 case 0x68: /* push Iv */
2018 ot = dflag ? OT_LONG : OT_WORD;
2020 val = insn_get(s, ot);
2022 val = (int8_t)insn_get(s, OT_BYTE);
2023 gen_op_movl_T0_im(val);
2026 case 0x8f: /* pop Ev */
2027 ot = dflag ? OT_LONG : OT_WORD;
2028 modrm = ldub(s->pc++);
2030 s->popl_esp_hack = 1;
2031 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
2032 s->popl_esp_hack = 0;
2035 case 0xc8: /* enter */
2040 level = ldub(s->pc++);
2041 gen_enter(s, val, level);
2044 case 0xc9: /* leave */
2045 /* XXX: exception not precise (ESP is updated before potential exception) */
2047 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2048 gen_op_mov_reg_T0[OT_LONG][R_ESP]();
2050 gen_op_mov_TN_reg[OT_WORD][0][R_EBP]();
2051 gen_op_mov_reg_T0[OT_WORD][R_ESP]();
2054 ot = dflag ? OT_LONG : OT_WORD;
2055 gen_op_mov_reg_T0[ot][R_EBP]();
2058 case 0x06: /* push es */
2059 case 0x0e: /* push cs */
2060 case 0x16: /* push ss */
2061 case 0x1e: /* push ds */
2062 gen_op_movl_T0_seg(b >> 3);
2065 case 0x1a0: /* push fs */
2066 case 0x1a8: /* push gs */
2067 gen_op_movl_T0_seg((b >> 3) & 7);
2070 case 0x07: /* pop es */
2071 case 0x17: /* pop ss */
2072 case 0x1f: /* pop ds */
2074 gen_movl_seg_T0(s, b >> 3, pc_start - s->cs_base);
2077 case 0x1a1: /* pop fs */
2078 case 0x1a9: /* pop gs */
2080 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
2084 /**************************/
2087 case 0x89: /* mov Gv, Ev */
2091 ot = dflag ? OT_LONG : OT_WORD;
2092 modrm = ldub(s->pc++);
2093 reg = (modrm >> 3) & 7;
2095 /* generate a generic store */
2096 gen_ldst_modrm(s, modrm, ot, OR_EAX + reg, 1);
2099 case 0xc7: /* mov Ev, Iv */
2103 ot = dflag ? OT_LONG : OT_WORD;
2104 modrm = ldub(s->pc++);
2105 mod = (modrm >> 6) & 3;
2107 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2108 val = insn_get(s, ot);
2109 gen_op_movl_T0_im(val);
2111 gen_op_st_T0_A0[ot]();
2113 gen_op_mov_reg_T0[ot][modrm & 7]();
2116 case 0x8b: /* mov Ev, Gv */
2120 ot = dflag ? OT_LONG : OT_WORD;
2121 modrm = ldub(s->pc++);
2122 reg = (modrm >> 3) & 7;
2124 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
2125 gen_op_mov_reg_T0[ot][reg]();
2127 case 0x8e: /* mov seg, Gv */
2128 ot = dflag ? OT_LONG : OT_WORD;
2129 modrm = ldub(s->pc++);
2130 reg = (modrm >> 3) & 7;
2131 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
2132 if (reg >= 6 || reg == R_CS)
2134 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
2136 case 0x8c: /* mov Gv, seg */
2137 ot = dflag ? OT_LONG : OT_WORD;
2138 modrm = ldub(s->pc++);
2139 reg = (modrm >> 3) & 7;
2142 gen_op_movl_T0_seg(reg);
2143 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
2146 case 0x1b6: /* movzbS Gv, Eb */
2147 case 0x1b7: /* movzwS Gv, Eb */
2148 case 0x1be: /* movsbS Gv, Eb */
2149 case 0x1bf: /* movswS Gv, Eb */
2152 /* d_ot is the size of destination */
2153 d_ot = dflag + OT_WORD;
2154 /* ot is the size of source */
2155 ot = (b & 1) + OT_BYTE;
2156 modrm = ldub(s->pc++);
2157 reg = ((modrm >> 3) & 7) + OR_EAX;
2158 mod = (modrm >> 6) & 3;
2162 gen_op_mov_TN_reg[ot][0][rm]();
2163 switch(ot | (b & 8)) {
2165 gen_op_movzbl_T0_T0();
2168 gen_op_movsbl_T0_T0();
2171 gen_op_movzwl_T0_T0();
2175 gen_op_movswl_T0_T0();
2178 gen_op_mov_reg_T0[d_ot][reg]();
2180 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2182 gen_op_lds_T0_A0[ot]();
2184 gen_op_ldu_T0_A0[ot]();
2186 gen_op_mov_reg_T0[d_ot][reg]();
2191 case 0x8d: /* lea */
2192 ot = dflag ? OT_LONG : OT_WORD;
2193 modrm = ldub(s->pc++);
2194 reg = (modrm >> 3) & 7;
2195 /* we must ensure that no segment is added */
2199 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2201 gen_op_mov_reg_A0[ot - OT_WORD][reg]();
2204 case 0xa0: /* mov EAX, Ov */
2206 case 0xa2: /* mov Ov, EAX */
2211 ot = dflag ? OT_LONG : OT_WORD;
2213 offset_addr = insn_get(s, OT_LONG);
2215 offset_addr = insn_get(s, OT_WORD);
2216 gen_op_movl_A0_im(offset_addr);
2217 /* handle override */
2219 int override, must_add_seg;
2220 must_add_seg = s->addseg;
2221 if (s->override >= 0) {
2222 override = s->override;
2228 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
2232 gen_op_ld_T0_A0[ot]();
2233 gen_op_mov_reg_T0[ot][R_EAX]();
2235 gen_op_mov_TN_reg[ot][0][R_EAX]();
2236 gen_op_st_T0_A0[ot]();
2239 case 0xd7: /* xlat */
2240 gen_op_movl_A0_reg[R_EBX]();
2241 gen_op_addl_A0_AL();
2243 gen_op_andl_A0_ffff();
2244 /* handle override */
2246 int override, must_add_seg;
2247 must_add_seg = s->addseg;
2249 if (s->override >= 0) {
2250 override = s->override;
2256 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
2259 gen_op_ldub_T0_A0();
2260 gen_op_mov_reg_T0[OT_BYTE][R_EAX]();
2262 case 0xb0 ... 0xb7: /* mov R, Ib */
2263 val = insn_get(s, OT_BYTE);
2264 gen_op_movl_T0_im(val);
2265 gen_op_mov_reg_T0[OT_BYTE][b & 7]();
2267 case 0xb8 ... 0xbf: /* mov R, Iv */
2268 ot = dflag ? OT_LONG : OT_WORD;
2269 val = insn_get(s, ot);
2270 reg = OR_EAX + (b & 7);
2271 gen_op_movl_T0_im(val);
2272 gen_op_mov_reg_T0[ot][reg]();
2275 case 0x91 ... 0x97: /* xchg R, EAX */
2276 ot = dflag ? OT_LONG : OT_WORD;
2281 case 0x87: /* xchg Ev, Gv */
2285 ot = dflag ? OT_LONG : OT_WORD;
2286 modrm = ldub(s->pc++);
2287 reg = (modrm >> 3) & 7;
2288 mod = (modrm >> 6) & 3;
2292 gen_op_mov_TN_reg[ot][0][reg]();
2293 gen_op_mov_TN_reg[ot][1][rm]();
2294 gen_op_mov_reg_T0[ot][rm]();
2295 gen_op_mov_reg_T1[ot][reg]();
2297 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2298 gen_op_mov_TN_reg[ot][0][reg]();
2299 /* for xchg, lock is implicit */
2300 if (!(prefixes & PREFIX_LOCK))
2302 gen_op_ld_T1_A0[ot]();
2303 gen_op_st_T0_A0[ot]();
2304 if (!(prefixes & PREFIX_LOCK))
2306 gen_op_mov_reg_T1[ot][reg]();
2309 case 0xc4: /* les Gv */
2312 case 0xc5: /* lds Gv */
2315 case 0x1b2: /* lss Gv */
2318 case 0x1b4: /* lfs Gv */
2321 case 0x1b5: /* lgs Gv */
2324 ot = dflag ? OT_LONG : OT_WORD;
2325 modrm = ldub(s->pc++);
2326 reg = (modrm >> 3) & 7;
2327 mod = (modrm >> 6) & 3;
2330 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2331 gen_op_ld_T1_A0[ot]();
2332 gen_op_addl_A0_im(1 << (ot - OT_WORD + 1));
2333 /* load the segment first to handle exceptions properly */
2334 gen_op_lduw_T0_A0();
2335 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
2336 /* then put the data */
2337 gen_op_mov_reg_T1[ot][reg]();
2340 /************************/
2351 ot = dflag ? OT_LONG : OT_WORD;
2353 modrm = ldub(s->pc++);
2354 mod = (modrm >> 6) & 3;
2356 op = (modrm >> 3) & 7;
2359 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2362 opreg = rm + OR_EAX;
2367 gen_shift(s, op, ot, opreg, OR_ECX);
2370 shift = ldub(s->pc++);
2372 gen_shifti(s, op, ot, opreg, shift);
2387 case 0x1a4: /* shld imm */
2391 case 0x1a5: /* shld cl */
2395 case 0x1ac: /* shrd imm */
2399 case 0x1ad: /* shrd cl */
2403 ot = dflag ? OT_LONG : OT_WORD;
2404 modrm = ldub(s->pc++);
2405 mod = (modrm >> 6) & 3;
2407 reg = (modrm >> 3) & 7;
2410 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2411 gen_op_ld_T0_A0[ot]();
2413 gen_op_mov_TN_reg[ot][0][rm]();
2415 gen_op_mov_TN_reg[ot][1][reg]();
2418 val = ldub(s->pc++);
2422 gen_op_shiftd_T0_T1_im_cc[ot - OT_WORD][op](val);
2424 gen_op_shiftd_mem_T0_T1_im_cc[ot - OT_WORD][op](val);
2425 if (op == 0 && ot != OT_WORD)
2426 s->cc_op = CC_OP_SHLB + ot;
2428 s->cc_op = CC_OP_SARB + ot;
2431 if (s->cc_op != CC_OP_DYNAMIC)
2432 gen_op_set_cc_op(s->cc_op);
2434 gen_op_shiftd_T0_T1_ECX_cc[ot - OT_WORD][op]();
2436 gen_op_shiftd_mem_T0_T1_ECX_cc[ot - OT_WORD][op]();
2437 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2440 gen_op_mov_reg_T0[ot][rm]();
2444 /************************/
2447 modrm = ldub(s->pc++);
2448 mod = (modrm >> 6) & 3;
2450 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
2454 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2456 case 0x00 ... 0x07: /* fxxxs */
2457 case 0x10 ... 0x17: /* fixxxl */
2458 case 0x20 ... 0x27: /* fxxxl */
2459 case 0x30 ... 0x37: /* fixxx */
2466 gen_op_flds_FT0_A0();
2469 gen_op_fildl_FT0_A0();
2472 gen_op_fldl_FT0_A0();
2476 gen_op_fild_FT0_A0();
2480 gen_op_fp_arith_ST0_FT0[op1]();
2482 /* fcomp needs pop */
2487 case 0x08: /* flds */
2488 case 0x0a: /* fsts */
2489 case 0x0b: /* fstps */
2490 case 0x18: /* fildl */
2491 case 0x1a: /* fistl */
2492 case 0x1b: /* fistpl */
2493 case 0x28: /* fldl */
2494 case 0x2a: /* fstl */
2495 case 0x2b: /* fstpl */
2496 case 0x38: /* filds */
2497 case 0x3a: /* fists */
2498 case 0x3b: /* fistps */
2504 gen_op_flds_ST0_A0();
2507 gen_op_fildl_ST0_A0();
2510 gen_op_fldl_ST0_A0();
2514 gen_op_fild_ST0_A0();
2521 gen_op_fsts_ST0_A0();
2524 gen_op_fistl_ST0_A0();
2527 gen_op_fstl_ST0_A0();
2531 gen_op_fist_ST0_A0();
2539 case 0x0c: /* fldenv mem */
2540 gen_op_fldenv_A0(s->dflag);
2542 case 0x0d: /* fldcw mem */
2545 case 0x0e: /* fnstenv mem */
2546 gen_op_fnstenv_A0(s->dflag);
2548 case 0x0f: /* fnstcw mem */
2551 case 0x1d: /* fldt mem */
2552 gen_op_fldt_ST0_A0();
2554 case 0x1f: /* fstpt mem */
2555 gen_op_fstt_ST0_A0();
2558 case 0x2c: /* frstor mem */
2559 gen_op_frstor_A0(s->dflag);
2561 case 0x2e: /* fnsave mem */
2562 gen_op_fnsave_A0(s->dflag);
2564 case 0x2f: /* fnstsw mem */
2567 case 0x3c: /* fbld */
2568 gen_op_fbld_ST0_A0();
2570 case 0x3e: /* fbstp */
2571 gen_op_fbst_ST0_A0();
2574 case 0x3d: /* fildll */
2575 gen_op_fildll_ST0_A0();
2577 case 0x3f: /* fistpll */
2578 gen_op_fistll_ST0_A0();
2585 /* register float ops */
2589 case 0x08: /* fld sti */
2591 gen_op_fmov_ST0_STN((opreg + 1) & 7);
2593 case 0x09: /* fxchg sti */
2594 gen_op_fxchg_ST0_STN(opreg);
2596 case 0x0a: /* grp d9/2 */
2604 case 0x0c: /* grp d9/4 */
2614 gen_op_fcom_ST0_FT0();
2623 case 0x0d: /* grp d9/5 */
2632 gen_op_fldl2t_ST0();
2636 gen_op_fldl2e_ST0();
2644 gen_op_fldlg2_ST0();
2648 gen_op_fldln2_ST0();
2659 case 0x0e: /* grp d9/6 */
2670 case 3: /* fpatan */
2673 case 4: /* fxtract */
2676 case 5: /* fprem1 */
2679 case 6: /* fdecstp */
2683 case 7: /* fincstp */
2688 case 0x0f: /* grp d9/7 */
2693 case 1: /* fyl2xp1 */
2699 case 3: /* fsincos */
2702 case 5: /* fscale */
2705 case 4: /* frndint */
2717 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
2718 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
2719 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
2725 gen_op_fp_arith_STN_ST0[op1](opreg);
2729 gen_op_fmov_FT0_STN(opreg);
2730 gen_op_fp_arith_ST0_FT0[op1]();
2734 case 0x02: /* fcom */
2735 gen_op_fmov_FT0_STN(opreg);
2736 gen_op_fcom_ST0_FT0();
2738 case 0x03: /* fcomp */
2739 gen_op_fmov_FT0_STN(opreg);
2740 gen_op_fcom_ST0_FT0();
2743 case 0x15: /* da/5 */
2745 case 1: /* fucompp */
2746 gen_op_fmov_FT0_STN(1);
2747 gen_op_fucom_ST0_FT0();
2757 case 0: /* feni (287 only, just do nop here) */
2759 case 1: /* fdisi (287 only, just do nop here) */
2764 case 3: /* fninit */
2767 case 4: /* fsetpm (287 only, just do nop here) */
2773 case 0x1d: /* fucomi */
2774 if (s->cc_op != CC_OP_DYNAMIC)
2775 gen_op_set_cc_op(s->cc_op);
2776 gen_op_fmov_FT0_STN(opreg);
2777 gen_op_fucomi_ST0_FT0();
2778 s->cc_op = CC_OP_EFLAGS;
2780 case 0x1e: /* fcomi */
2781 if (s->cc_op != CC_OP_DYNAMIC)
2782 gen_op_set_cc_op(s->cc_op);
2783 gen_op_fmov_FT0_STN(opreg);
2784 gen_op_fcomi_ST0_FT0();
2785 s->cc_op = CC_OP_EFLAGS;
2787 case 0x2a: /* fst sti */
2788 gen_op_fmov_STN_ST0(opreg);
2790 case 0x2b: /* fstp sti */
2791 gen_op_fmov_STN_ST0(opreg);
2794 case 0x2c: /* fucom st(i) */
2795 gen_op_fmov_FT0_STN(opreg);
2796 gen_op_fucom_ST0_FT0();
2798 case 0x2d: /* fucomp st(i) */
2799 gen_op_fmov_FT0_STN(opreg);
2800 gen_op_fucom_ST0_FT0();
2803 case 0x33: /* de/3 */
2805 case 1: /* fcompp */
2806 gen_op_fmov_FT0_STN(1);
2807 gen_op_fcom_ST0_FT0();
2815 case 0x3c: /* df/4 */
2818 gen_op_fnstsw_EAX();
2824 case 0x3d: /* fucomip */
2825 if (s->cc_op != CC_OP_DYNAMIC)
2826 gen_op_set_cc_op(s->cc_op);
2827 gen_op_fmov_FT0_STN(opreg);
2828 gen_op_fucomi_ST0_FT0();
2830 s->cc_op = CC_OP_EFLAGS;
2832 case 0x3e: /* fcomip */
2833 if (s->cc_op != CC_OP_DYNAMIC)
2834 gen_op_set_cc_op(s->cc_op);
2835 gen_op_fmov_FT0_STN(opreg);
2836 gen_op_fcomi_ST0_FT0();
2838 s->cc_op = CC_OP_EFLAGS;
2845 /************************/
2848 case 0xa4: /* movsS */
2853 ot = dflag ? OT_LONG : OT_WORD;
2855 if (prefixes & PREFIX_REPZ) {
2856 gen_string_ds(s, ot, gen_op_movs + 9);
2858 gen_string_ds(s, ot, gen_op_movs);
2862 case 0xaa: /* stosS */
2867 ot = dflag ? OT_LONG : OT_WORD;
2869 if (prefixes & PREFIX_REPZ) {
2870 gen_string_es(s, ot, gen_op_stos + 9);
2872 gen_string_es(s, ot, gen_op_stos);
2875 case 0xac: /* lodsS */
2880 ot = dflag ? OT_LONG : OT_WORD;
2881 if (prefixes & PREFIX_REPZ) {
2882 gen_string_ds(s, ot, gen_op_lods + 9);
2884 gen_string_ds(s, ot, gen_op_lods);
2887 case 0xae: /* scasS */
2892 ot = dflag ? OT_LONG : OT_WORD;
2893 if (prefixes & PREFIX_REPNZ) {
2894 if (s->cc_op != CC_OP_DYNAMIC)
2895 gen_op_set_cc_op(s->cc_op);
2896 gen_string_es(s, ot, gen_op_scas + 9 * 2);
2897 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2898 } else if (prefixes & PREFIX_REPZ) {
2899 if (s->cc_op != CC_OP_DYNAMIC)
2900 gen_op_set_cc_op(s->cc_op);
2901 gen_string_es(s, ot, gen_op_scas + 9);
2902 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2904 gen_string_es(s, ot, gen_op_scas);
2905 s->cc_op = CC_OP_SUBB + ot;
2909 case 0xa6: /* cmpsS */
2914 ot = dflag ? OT_LONG : OT_WORD;
2915 if (prefixes & PREFIX_REPNZ) {
2916 if (s->cc_op != CC_OP_DYNAMIC)
2917 gen_op_set_cc_op(s->cc_op);
2918 gen_string_ds(s, ot, gen_op_cmps + 9 * 2);
2919 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2920 } else if (prefixes & PREFIX_REPZ) {
2921 if (s->cc_op != CC_OP_DYNAMIC)
2922 gen_op_set_cc_op(s->cc_op);
2923 gen_string_ds(s, ot, gen_op_cmps + 9);
2924 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2926 gen_string_ds(s, ot, gen_op_cmps);
2927 s->cc_op = CC_OP_SUBB + ot;
2930 case 0x6c: /* insS */
2932 if (s->cpl > s->iopl || s->vm86) {
2933 /* NOTE: even for (E)CX = 0 the exception is raised */
2934 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
2939 ot = dflag ? OT_LONG : OT_WORD;
2940 if (prefixes & PREFIX_REPZ) {
2941 gen_string_es(s, ot, gen_op_ins + 9);
2943 gen_string_es(s, ot, gen_op_ins);
2947 case 0x6e: /* outsS */
2949 if (s->cpl > s->iopl || s->vm86) {
2950 /* NOTE: even for (E)CX = 0 the exception is raised */
2951 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
2956 ot = dflag ? OT_LONG : OT_WORD;
2957 if (prefixes & PREFIX_REPZ) {
2958 gen_string_ds(s, ot, gen_op_outs + 9);
2960 gen_string_ds(s, ot, gen_op_outs);
2965 /************************/
2969 if (s->cpl > s->iopl || s->vm86) {
2970 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
2975 ot = dflag ? OT_LONG : OT_WORD;
2976 val = ldub(s->pc++);
2977 gen_op_movl_T0_im(val);
2979 gen_op_mov_reg_T1[ot][R_EAX]();
2984 if (s->cpl > s->iopl || s->vm86) {
2985 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
2990 ot = dflag ? OT_LONG : OT_WORD;
2991 val = ldub(s->pc++);
2992 gen_op_movl_T0_im(val);
2993 gen_op_mov_TN_reg[ot][1][R_EAX]();
2999 if (s->cpl > s->iopl || s->vm86) {
3000 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3005 ot = dflag ? OT_LONG : OT_WORD;
3006 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
3008 gen_op_mov_reg_T1[ot][R_EAX]();
3013 if (s->cpl > s->iopl || s->vm86) {
3014 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3019 ot = dflag ? OT_LONG : OT_WORD;
3020 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
3021 gen_op_mov_TN_reg[ot][1][R_EAX]();
3026 /************************/
3028 case 0xc2: /* ret im */
3032 gen_stack_update(s, val + (2 << s->dflag));
3034 gen_op_andl_T0_ffff();
3038 case 0xc3: /* ret */
3042 gen_op_andl_T0_ffff();
3046 case 0xca: /* lret im */
3052 gen_op_ld_T0_A0[1 + s->dflag]();
3054 gen_op_andl_T0_ffff();
3055 /* NOTE: keeping EIP updated is not a problem in case of
3059 gen_op_addl_A0_im(2 << s->dflag);
3060 gen_op_ld_T0_A0[1 + s->dflag]();
3061 gen_movl_seg_T0(s, R_CS, pc_start - s->cs_base);
3062 /* add stack offset */
3063 gen_stack_update(s, val + (4 << s->dflag));
3066 case 0xcb: /* lret */
3069 case 0xcf: /* iret */
3070 if (s->vm86 && s->iopl != 3) {
3071 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3073 if (s->cc_op != CC_OP_DYNAMIC)
3074 gen_op_set_cc_op(s->cc_op);
3075 gen_op_jmp_im(pc_start - s->cs_base);
3076 gen_op_iret_protected(s->dflag);
3077 s->cc_op = CC_OP_EFLAGS;
3081 case 0xe8: /* call im */
3083 unsigned int next_eip;
3084 ot = dflag ? OT_LONG : OT_WORD;
3085 val = insn_get(s, ot);
3086 next_eip = s->pc - s->cs_base;
3090 gen_op_movl_T0_im(next_eip);
3095 case 0x9a: /* lcall im */
3097 unsigned int selector, offset;
3098 /* XXX: not restartable */
3100 ot = dflag ? OT_LONG : OT_WORD;
3101 offset = insn_get(s, ot);
3102 selector = insn_get(s, OT_WORD);
3104 /* push return segment + offset */
3105 gen_op_movl_T0_seg(R_CS);
3107 next_eip = s->pc - s->cs_base;
3108 gen_op_movl_T0_im(next_eip);
3111 /* change cs and pc */
3112 gen_op_movl_T0_im(selector);
3113 gen_movl_seg_T0(s, R_CS, pc_start - s->cs_base);
3114 gen_op_jmp_im((unsigned long)offset);
3118 case 0xe9: /* jmp */
3119 ot = dflag ? OT_LONG : OT_WORD;
3120 val = insn_get(s, ot);
3121 val += s->pc - s->cs_base;
3126 case 0xea: /* ljmp im */
3128 unsigned int selector, offset;
3130 ot = dflag ? OT_LONG : OT_WORD;
3131 offset = insn_get(s, ot);
3132 selector = insn_get(s, OT_WORD);
3134 /* change cs and pc */
3135 gen_op_movl_T0_im(selector);
3137 /* we compute EIP to handle the exception case */
3138 gen_op_jmp_im(pc_start - s->cs_base);
3139 gen_op_movl_T1_im(offset);
3140 gen_op_ljmp_T0_T1();
3142 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3143 gen_op_jmp_im((unsigned long)offset);
3148 case 0xeb: /* jmp Jb */
3149 val = (int8_t)insn_get(s, OT_BYTE);
3150 val += s->pc - s->cs_base;
3155 case 0x70 ... 0x7f: /* jcc Jb */
3156 val = (int8_t)insn_get(s, OT_BYTE);
3158 case 0x180 ... 0x18f: /* jcc Jv */
3160 val = insn_get(s, OT_LONG);
3162 val = (int16_t)insn_get(s, OT_WORD);
3165 next_eip = s->pc - s->cs_base;
3169 gen_jcc(s, b, val, next_eip);
3172 case 0x190 ... 0x19f: /* setcc Gv */
3173 modrm = ldub(s->pc++);
3175 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
3177 case 0x140 ... 0x14f: /* cmov Gv, Ev */
3178 ot = dflag ? OT_LONG : OT_WORD;
3179 modrm = ldub(s->pc++);
3180 reg = (modrm >> 3) & 7;
3181 mod = (modrm >> 6) & 3;
3184 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3185 gen_op_ld_T1_A0[ot]();
3188 gen_op_mov_TN_reg[ot][1][rm]();
3190 gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
3193 /************************/
3195 case 0x9c: /* pushf */
3196 if (s->vm86 && s->iopl != 3) {
3197 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3199 if (s->cc_op != CC_OP_DYNAMIC)
3200 gen_op_set_cc_op(s->cc_op);
3201 gen_op_movl_T0_eflags();
3205 case 0x9d: /* popf */
3206 if (s->vm86 && s->iopl != 3) {
3207 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3212 gen_op_movl_eflags_T0_cpl0();
3214 gen_op_movw_eflags_T0_cpl0();
3218 gen_op_movl_eflags_T0();
3220 gen_op_movw_eflags_T0();
3224 s->cc_op = CC_OP_EFLAGS;
3225 s->is_jmp = 2; /* abort translation because TF flag may change */
3228 case 0x9e: /* sahf */
3229 gen_op_mov_TN_reg[OT_BYTE][0][R_AH]();
3230 if (s->cc_op != CC_OP_DYNAMIC)
3231 gen_op_set_cc_op(s->cc_op);
3232 gen_op_movb_eflags_T0();
3233 s->cc_op = CC_OP_EFLAGS;
3235 case 0x9f: /* lahf */
3236 if (s->cc_op != CC_OP_DYNAMIC)
3237 gen_op_set_cc_op(s->cc_op);
3238 gen_op_movl_T0_eflags();
3239 gen_op_mov_reg_T0[OT_BYTE][R_AH]();
3241 case 0xf5: /* cmc */
3242 if (s->cc_op != CC_OP_DYNAMIC)
3243 gen_op_set_cc_op(s->cc_op);
3245 s->cc_op = CC_OP_EFLAGS;
3247 case 0xf8: /* clc */
3248 if (s->cc_op != CC_OP_DYNAMIC)
3249 gen_op_set_cc_op(s->cc_op);
3251 s->cc_op = CC_OP_EFLAGS;
3253 case 0xf9: /* stc */
3254 if (s->cc_op != CC_OP_DYNAMIC)
3255 gen_op_set_cc_op(s->cc_op);
3257 s->cc_op = CC_OP_EFLAGS;
3259 case 0xfc: /* cld */
3262 case 0xfd: /* std */
3266 /************************/
3267 /* bit operations */
3268 case 0x1ba: /* bt/bts/btr/btc Gv, im */
3269 ot = dflag ? OT_LONG : OT_WORD;
3270 modrm = ldub(s->pc++);
3271 op = (modrm >> 3) & 7;
3272 mod = (modrm >> 6) & 3;
3275 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3276 gen_op_ld_T0_A0[ot]();
3278 gen_op_mov_TN_reg[ot][0][rm]();
3281 val = ldub(s->pc++);
3282 gen_op_movl_T1_im(val);
3286 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
3287 s->cc_op = CC_OP_SARB + ot;
3290 gen_op_st_T0_A0[ot]();
3292 gen_op_mov_reg_T0[ot][rm]();
3293 gen_op_update_bt_cc();
3296 case 0x1a3: /* bt Gv, Ev */
3299 case 0x1ab: /* bts */
3302 case 0x1b3: /* btr */
3305 case 0x1bb: /* btc */
3308 ot = dflag ? OT_LONG : OT_WORD;
3309 modrm = ldub(s->pc++);
3310 reg = (modrm >> 3) & 7;
3311 mod = (modrm >> 6) & 3;
3313 gen_op_mov_TN_reg[OT_LONG][1][reg]();
3315 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3316 /* specific case: we need to add a displacement */
3318 gen_op_add_bitw_A0_T1();
3320 gen_op_add_bitl_A0_T1();
3321 gen_op_ld_T0_A0[ot]();
3323 gen_op_mov_TN_reg[ot][0][rm]();
3325 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
3326 s->cc_op = CC_OP_SARB + ot;
3329 gen_op_st_T0_A0[ot]();
3331 gen_op_mov_reg_T0[ot][rm]();
3332 gen_op_update_bt_cc();
3335 case 0x1bc: /* bsf */
3336 case 0x1bd: /* bsr */
3337 ot = dflag ? OT_LONG : OT_WORD;
3338 modrm = ldub(s->pc++);
3339 reg = (modrm >> 3) & 7;
3340 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3341 gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
3342 /* NOTE: we always write back the result. Intel doc says it is
3343 undefined if T0 == 0 */
3344 gen_op_mov_reg_T0[ot][reg]();
3345 s->cc_op = CC_OP_LOGICB + ot;
3347 /************************/
3349 case 0x27: /* daa */
3350 if (s->cc_op != CC_OP_DYNAMIC)
3351 gen_op_set_cc_op(s->cc_op);
3353 s->cc_op = CC_OP_EFLAGS;
3355 case 0x2f: /* das */
3356 if (s->cc_op != CC_OP_DYNAMIC)
3357 gen_op_set_cc_op(s->cc_op);
3359 s->cc_op = CC_OP_EFLAGS;
3361 case 0x37: /* aaa */
3362 if (s->cc_op != CC_OP_DYNAMIC)
3363 gen_op_set_cc_op(s->cc_op);
3365 s->cc_op = CC_OP_EFLAGS;
3367 case 0x3f: /* aas */
3368 if (s->cc_op != CC_OP_DYNAMIC)
3369 gen_op_set_cc_op(s->cc_op);
3371 s->cc_op = CC_OP_EFLAGS;
3373 case 0xd4: /* aam */
3374 val = ldub(s->pc++);
3376 s->cc_op = CC_OP_LOGICB;
3378 case 0xd5: /* aad */
3379 val = ldub(s->pc++);
3381 s->cc_op = CC_OP_LOGICB;
3383 /************************/
3385 case 0x90: /* nop */
3387 case 0x9b: /* fwait */
3389 case 0xcc: /* int3 */
3390 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
3392 case 0xcd: /* int N */
3393 val = ldub(s->pc++);
3394 /* XXX: add error code for vm86 GPF */
3396 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
3398 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3400 case 0xce: /* into */
3401 if (s->cc_op != CC_OP_DYNAMIC)
3402 gen_op_set_cc_op(s->cc_op);
3403 gen_op_into(s->pc - s->cs_base);
3405 case 0xfa: /* cli */
3407 if (s->cpl <= s->iopl) {
3410 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3416 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3420 case 0xfb: /* sti */
3422 if (s->cpl <= s->iopl) {
3424 s->is_jmp = 2; /* give a chance to handle pending irqs */
3426 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3431 s->is_jmp = 2; /* give a chance to handle pending irqs */
3433 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3437 case 0x62: /* bound */
3438 ot = dflag ? OT_LONG : OT_WORD;
3439 modrm = ldub(s->pc++);
3440 reg = (modrm >> 3) & 7;
3441 mod = (modrm >> 6) & 3;
3444 gen_op_mov_reg_T0[ot][reg]();
3445 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3447 gen_op_boundw(pc_start - s->cs_base);
3449 gen_op_boundl(pc_start - s->cs_base);
3451 case 0x1c8 ... 0x1cf: /* bswap reg */
3453 gen_op_mov_TN_reg[OT_LONG][0][reg]();
3455 gen_op_mov_reg_T0[OT_LONG][reg]();
3457 case 0xd6: /* salc */
3458 if (s->cc_op != CC_OP_DYNAMIC)
3459 gen_op_set_cc_op(s->cc_op);
3462 case 0xe0: /* loopnz */
3463 case 0xe1: /* loopz */
3464 if (s->cc_op != CC_OP_DYNAMIC)
3465 gen_op_set_cc_op(s->cc_op);
3467 case 0xe2: /* loop */
3468 case 0xe3: /* jecxz */
3469 val = (int8_t)insn_get(s, OT_BYTE);
3470 next_eip = s->pc - s->cs_base;
3474 gen_op_loop[s->aflag][b & 3](val, next_eip);
3477 case 0x130: /* wrmsr */
3478 case 0x132: /* rdmsr */
3480 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3488 case 0x131: /* rdtsc */
3491 case 0x1a2: /* cpuid */
3494 case 0xf4: /* hlt */
3496 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3498 if (s->cc_op != CC_OP_DYNAMIC)
3499 gen_op_set_cc_op(s->cc_op);
3500 gen_op_jmp_im(s->pc - s->cs_base);
3506 modrm = ldub(s->pc++);
3507 mod = (modrm >> 6) & 3;
3508 op = (modrm >> 3) & 7;
3511 gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
3515 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3519 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3521 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3522 gen_op_jmp_im(pc_start - s->cs_base);
3527 gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
3531 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3535 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3537 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3538 gen_op_jmp_im(pc_start - s->cs_base);
3549 modrm = ldub(s->pc++);
3550 mod = (modrm >> 6) & 3;
3551 op = (modrm >> 3) & 7;
3557 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3559 gen_op_movl_T0_env(offsetof(CPUX86State,gdt.limit));
3561 gen_op_movl_T0_env(offsetof(CPUX86State,idt.limit));
3563 gen_op_addl_A0_im(2);
3565 gen_op_movl_T0_env(offsetof(CPUX86State,gdt.base));
3567 gen_op_movl_T0_env(offsetof(CPUX86State,idt.base));
3569 gen_op_andl_T0_im(0xffffff);
3577 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3579 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3580 gen_op_lduw_T1_A0();
3581 gen_op_addl_A0_im(2);
3584 gen_op_andl_T0_im(0xffffff);
3586 gen_op_movl_env_T0(offsetof(CPUX86State,gdt.base));
3587 gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
3589 gen_op_movl_env_T0(offsetof(CPUX86State,idt.base));
3590 gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
3595 gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
3596 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
3600 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3602 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3606 case 7: /* invlpg */
3608 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3612 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3620 case 0x102: /* lar */
3621 case 0x103: /* lsl */
3624 ot = dflag ? OT_LONG : OT_WORD;
3625 modrm = ldub(s->pc++);
3626 reg = (modrm >> 3) & 7;
3627 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3628 gen_op_mov_TN_reg[ot][1][reg]();
3629 if (s->cc_op != CC_OP_DYNAMIC)
3630 gen_op_set_cc_op(s->cc_op);
3635 s->cc_op = CC_OP_EFLAGS;
3636 gen_op_mov_reg_T1[ot][reg]();
3639 modrm = ldub(s->pc++);
3640 mod = (modrm >> 6) & 3;
3641 op = (modrm >> 3) & 7;
3643 case 0: /* prefetchnta */
3644 case 1: /* prefetchnt0 */
3645 case 2: /* prefetchnt0 */
3646 case 3: /* prefetchnt0 */
3649 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3650 /* nothing more to do */
3656 case 0x120: /* mov reg, crN */
3657 case 0x122: /* mov crN, reg */
3659 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3661 modrm = ldub(s->pc++);
3662 if ((modrm & 0xc0) != 0xc0)
3665 reg = (modrm >> 3) & 7;
3672 gen_op_mov_TN_reg[OT_LONG][0][rm]();
3673 gen_op_movl_crN_T0(reg);
3676 gen_op_movl_T0_env(offsetof(CPUX86State,cr[reg]));
3677 gen_op_mov_reg_T0[OT_LONG][rm]();
3685 case 0x121: /* mov reg, drN */
3686 case 0x123: /* mov drN, reg */
3688 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3690 modrm = ldub(s->pc++);
3691 if ((modrm & 0xc0) != 0xc0)
3694 reg = (modrm >> 3) & 7;
3695 /* XXX: do it dynamically with CR4.DE bit */
3696 if (reg == 4 || reg == 5)
3699 gen_op_mov_TN_reg[OT_LONG][0][rm]();
3700 gen_op_movl_drN_T0(reg);
3703 gen_op_movl_T0_env(offsetof(CPUX86State,dr[reg]));
3704 gen_op_mov_reg_T0[OT_LONG][rm]();
3708 case 0x106: /* clts */
3710 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3718 /* lock generation */
3719 if (s->prefix & PREFIX_LOCK)
3723 /* XXX: ensure that no lock was generated */
3727 #define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
3728 #define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
3730 /* flags read by an operation */
3731 static uint16_t opc_read_flags[NB_OPS] = {
3732 [INDEX_op_aas] = CC_A,
3733 [INDEX_op_aaa] = CC_A,
3734 [INDEX_op_das] = CC_A | CC_C,
3735 [INDEX_op_daa] = CC_A | CC_C,
3737 [INDEX_op_adcb_T0_T1_cc] = CC_C,
3738 [INDEX_op_adcw_T0_T1_cc] = CC_C,
3739 [INDEX_op_adcl_T0_T1_cc] = CC_C,
3740 [INDEX_op_sbbb_T0_T1_cc] = CC_C,
3741 [INDEX_op_sbbw_T0_T1_cc] = CC_C,
3742 [INDEX_op_sbbl_T0_T1_cc] = CC_C,
3744 [INDEX_op_adcb_mem_T0_T1_cc] = CC_C,
3745 [INDEX_op_adcw_mem_T0_T1_cc] = CC_C,
3746 [INDEX_op_adcl_mem_T0_T1_cc] = CC_C,
3747 [INDEX_op_sbbb_mem_T0_T1_cc] = CC_C,
3748 [INDEX_op_sbbw_mem_T0_T1_cc] = CC_C,
3749 [INDEX_op_sbbl_mem_T0_T1_cc] = CC_C,
3751 /* subtle: due to the incl/decl implementation, C is used */
3752 [INDEX_op_update_inc_cc] = CC_C,
3754 [INDEX_op_into] = CC_O,
3756 [INDEX_op_jb_subb] = CC_C,
3757 [INDEX_op_jb_subw] = CC_C,
3758 [INDEX_op_jb_subl] = CC_C,
3760 [INDEX_op_jz_subb] = CC_Z,
3761 [INDEX_op_jz_subw] = CC_Z,
3762 [INDEX_op_jz_subl] = CC_Z,
3764 [INDEX_op_jbe_subb] = CC_Z | CC_C,
3765 [INDEX_op_jbe_subw] = CC_Z | CC_C,
3766 [INDEX_op_jbe_subl] = CC_Z | CC_C,
3768 [INDEX_op_js_subb] = CC_S,
3769 [INDEX_op_js_subw] = CC_S,
3770 [INDEX_op_js_subl] = CC_S,
3772 [INDEX_op_jl_subb] = CC_O | CC_S,
3773 [INDEX_op_jl_subw] = CC_O | CC_S,
3774 [INDEX_op_jl_subl] = CC_O | CC_S,
3776 [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
3777 [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
3778 [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
3780 [INDEX_op_loopnzw] = CC_Z,
3781 [INDEX_op_loopnzl] = CC_Z,
3782 [INDEX_op_loopzw] = CC_Z,
3783 [INDEX_op_loopzl] = CC_Z,
3785 [INDEX_op_seto_T0_cc] = CC_O,
3786 [INDEX_op_setb_T0_cc] = CC_C,
3787 [INDEX_op_setz_T0_cc] = CC_Z,
3788 [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
3789 [INDEX_op_sets_T0_cc] = CC_S,
3790 [INDEX_op_setp_T0_cc] = CC_P,
3791 [INDEX_op_setl_T0_cc] = CC_O | CC_S,
3792 [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
3794 [INDEX_op_setb_T0_subb] = CC_C,
3795 [INDEX_op_setb_T0_subw] = CC_C,
3796 [INDEX_op_setb_T0_subl] = CC_C,
3798 [INDEX_op_setz_T0_subb] = CC_Z,
3799 [INDEX_op_setz_T0_subw] = CC_Z,
3800 [INDEX_op_setz_T0_subl] = CC_Z,
3802 [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
3803 [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
3804 [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
3806 [INDEX_op_sets_T0_subb] = CC_S,
3807 [INDEX_op_sets_T0_subw] = CC_S,
3808 [INDEX_op_sets_T0_subl] = CC_S,
3810 [INDEX_op_setl_T0_subb] = CC_O | CC_S,
3811 [INDEX_op_setl_T0_subw] = CC_O | CC_S,
3812 [INDEX_op_setl_T0_subl] = CC_O | CC_S,
3814 [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
3815 [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
3816 [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
3818 [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
3819 [INDEX_op_cmc] = CC_C,
3820 [INDEX_op_salc] = CC_C,
3822 [INDEX_op_rclb_T0_T1_cc] = CC_C,
3823 [INDEX_op_rclw_T0_T1_cc] = CC_C,
3824 [INDEX_op_rcll_T0_T1_cc] = CC_C,
3825 [INDEX_op_rcrb_T0_T1_cc] = CC_C,
3826 [INDEX_op_rcrw_T0_T1_cc] = CC_C,
3827 [INDEX_op_rcrl_T0_T1_cc] = CC_C,
3829 [INDEX_op_rclb_mem_T0_T1_cc] = CC_C,
3830 [INDEX_op_rclw_mem_T0_T1_cc] = CC_C,
3831 [INDEX_op_rcll_mem_T0_T1_cc] = CC_C,
3832 [INDEX_op_rcrb_mem_T0_T1_cc] = CC_C,
3833 [INDEX_op_rcrw_mem_T0_T1_cc] = CC_C,
3834 [INDEX_op_rcrl_mem_T0_T1_cc] = CC_C,
3837 /* flags written by an operation */
3838 static uint16_t opc_write_flags[NB_OPS] = {
3839 [INDEX_op_update2_cc] = CC_OSZAPC,
3840 [INDEX_op_update1_cc] = CC_OSZAPC,
3841 [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
3842 [INDEX_op_update_neg_cc] = CC_OSZAPC,
3843 /* subtle: due to the incl/decl implementation, C is used */
3844 [INDEX_op_update_inc_cc] = CC_OSZAPC,
3845 [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
3847 [INDEX_op_adcb_T0_T1_cc] = CC_OSZAPC,
3848 [INDEX_op_adcw_T0_T1_cc] = CC_OSZAPC,
3849 [INDEX_op_adcl_T0_T1_cc] = CC_OSZAPC,
3850 [INDEX_op_sbbb_T0_T1_cc] = CC_OSZAPC,
3851 [INDEX_op_sbbw_T0_T1_cc] = CC_OSZAPC,
3852 [INDEX_op_sbbl_T0_T1_cc] = CC_OSZAPC,
3854 [INDEX_op_adcb_mem_T0_T1_cc] = CC_OSZAPC,
3855 [INDEX_op_adcw_mem_T0_T1_cc] = CC_OSZAPC,
3856 [INDEX_op_adcl_mem_T0_T1_cc] = CC_OSZAPC,
3857 [INDEX_op_sbbb_mem_T0_T1_cc] = CC_OSZAPC,
3858 [INDEX_op_sbbw_mem_T0_T1_cc] = CC_OSZAPC,
3859 [INDEX_op_sbbl_mem_T0_T1_cc] = CC_OSZAPC,
3861 [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
3862 [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
3863 [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
3864 [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
3865 [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
3866 [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
3867 [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
3868 [INDEX_op_imull_T0_T1] = CC_OSZAPC,
3871 [INDEX_op_aam] = CC_OSZAPC,
3872 [INDEX_op_aad] = CC_OSZAPC,
3873 [INDEX_op_aas] = CC_OSZAPC,
3874 [INDEX_op_aaa] = CC_OSZAPC,
3875 [INDEX_op_das] = CC_OSZAPC,
3876 [INDEX_op_daa] = CC_OSZAPC,
3878 [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
3879 [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
3880 [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
3881 [INDEX_op_clc] = CC_C,
3882 [INDEX_op_stc] = CC_C,
3883 [INDEX_op_cmc] = CC_C,
3885 [INDEX_op_rolb_T0_T1_cc] = CC_O | CC_C,
3886 [INDEX_op_rolw_T0_T1_cc] = CC_O | CC_C,
3887 [INDEX_op_roll_T0_T1_cc] = CC_O | CC_C,
3888 [INDEX_op_rorb_T0_T1_cc] = CC_O | CC_C,
3889 [INDEX_op_rorw_T0_T1_cc] = CC_O | CC_C,
3890 [INDEX_op_rorl_T0_T1_cc] = CC_O | CC_C,
3892 [INDEX_op_rclb_T0_T1_cc] = CC_O | CC_C,
3893 [INDEX_op_rclw_T0_T1_cc] = CC_O | CC_C,
3894 [INDEX_op_rcll_T0_T1_cc] = CC_O | CC_C,
3895 [INDEX_op_rcrb_T0_T1_cc] = CC_O | CC_C,
3896 [INDEX_op_rcrw_T0_T1_cc] = CC_O | CC_C,
3897 [INDEX_op_rcrl_T0_T1_cc] = CC_O | CC_C,
3899 [INDEX_op_shlb_T0_T1_cc] = CC_OSZAPC,
3900 [INDEX_op_shlw_T0_T1_cc] = CC_OSZAPC,
3901 [INDEX_op_shll_T0_T1_cc] = CC_OSZAPC,
3903 [INDEX_op_shrb_T0_T1_cc] = CC_OSZAPC,
3904 [INDEX_op_shrw_T0_T1_cc] = CC_OSZAPC,
3905 [INDEX_op_shrl_T0_T1_cc] = CC_OSZAPC,
3907 [INDEX_op_sarb_T0_T1_cc] = CC_OSZAPC,
3908 [INDEX_op_sarw_T0_T1_cc] = CC_OSZAPC,
3909 [INDEX_op_sarl_T0_T1_cc] = CC_OSZAPC,
3911 [INDEX_op_shldw_T0_T1_ECX_cc] = CC_OSZAPC,
3912 [INDEX_op_shldl_T0_T1_ECX_cc] = CC_OSZAPC,
3913 [INDEX_op_shldw_T0_T1_im_cc] = CC_OSZAPC,
3914 [INDEX_op_shldl_T0_T1_im_cc] = CC_OSZAPC,
3916 [INDEX_op_shrdw_T0_T1_ECX_cc] = CC_OSZAPC,
3917 [INDEX_op_shrdl_T0_T1_ECX_cc] = CC_OSZAPC,
3918 [INDEX_op_shrdw_T0_T1_im_cc] = CC_OSZAPC,
3919 [INDEX_op_shrdl_T0_T1_im_cc] = CC_OSZAPC,
3921 [INDEX_op_rolb_mem_T0_T1_cc] = CC_O | CC_C,
3922 [INDEX_op_rolw_mem_T0_T1_cc] = CC_O | CC_C,
3923 [INDEX_op_roll_mem_T0_T1_cc] = CC_O | CC_C,
3924 [INDEX_op_rorb_mem_T0_T1_cc] = CC_O | CC_C,
3925 [INDEX_op_rorw_mem_T0_T1_cc] = CC_O | CC_C,
3926 [INDEX_op_rorl_mem_T0_T1_cc] = CC_O | CC_C,
3928 [INDEX_op_rclb_mem_T0_T1_cc] = CC_O | CC_C,
3929 [INDEX_op_rclw_mem_T0_T1_cc] = CC_O | CC_C,
3930 [INDEX_op_rcll_mem_T0_T1_cc] = CC_O | CC_C,
3931 [INDEX_op_rcrb_mem_T0_T1_cc] = CC_O | CC_C,
3932 [INDEX_op_rcrw_mem_T0_T1_cc] = CC_O | CC_C,
3933 [INDEX_op_rcrl_mem_T0_T1_cc] = CC_O | CC_C,
3935 [INDEX_op_shlb_mem_T0_T1_cc] = CC_OSZAPC,
3936 [INDEX_op_shlw_mem_T0_T1_cc] = CC_OSZAPC,
3937 [INDEX_op_shll_mem_T0_T1_cc] = CC_OSZAPC,
3939 [INDEX_op_shrb_mem_T0_T1_cc] = CC_OSZAPC,
3940 [INDEX_op_shrw_mem_T0_T1_cc] = CC_OSZAPC,
3941 [INDEX_op_shrl_mem_T0_T1_cc] = CC_OSZAPC,
3943 [INDEX_op_sarb_mem_T0_T1_cc] = CC_OSZAPC,
3944 [INDEX_op_sarw_mem_T0_T1_cc] = CC_OSZAPC,
3945 [INDEX_op_sarl_mem_T0_T1_cc] = CC_OSZAPC,
3947 [INDEX_op_shldw_mem_T0_T1_ECX_cc] = CC_OSZAPC,
3948 [INDEX_op_shldl_mem_T0_T1_ECX_cc] = CC_OSZAPC,
3949 [INDEX_op_shldw_mem_T0_T1_im_cc] = CC_OSZAPC,
3950 [INDEX_op_shldl_mem_T0_T1_im_cc] = CC_OSZAPC,
3952 [INDEX_op_shrdw_mem_T0_T1_ECX_cc] = CC_OSZAPC,
3953 [INDEX_op_shrdl_mem_T0_T1_ECX_cc] = CC_OSZAPC,
3954 [INDEX_op_shrdw_mem_T0_T1_im_cc] = CC_OSZAPC,
3955 [INDEX_op_shrdl_mem_T0_T1_im_cc] = CC_OSZAPC,
3957 [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
3958 [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
3959 [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
3960 [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
3961 [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
3962 [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
3963 [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
3964 [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
3966 [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
3967 [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
3968 [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
3969 [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
3972 #define STRINGOP(x) \
3973 [INDEX_op_ ## x ## b_fast] = CC_OSZAPC, \
3974 [INDEX_op_ ## x ## w_fast] = CC_OSZAPC, \
3975 [INDEX_op_ ## x ## l_fast] = CC_OSZAPC, \
3976 [INDEX_op_ ## x ## b_a32] = CC_OSZAPC, \
3977 [INDEX_op_ ## x ## w_a32] = CC_OSZAPC, \
3978 [INDEX_op_ ## x ## l_a32] = CC_OSZAPC, \
3979 [INDEX_op_ ## x ## b_a16] = CC_OSZAPC, \
3980 [INDEX_op_ ## x ## w_a16] = CC_OSZAPC, \
3981 [INDEX_op_ ## x ## l_a16] = CC_OSZAPC,
3985 STRINGOP(repnz_scas)
3988 STRINGOP(repnz_cmps)
3990 [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
3991 [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
3992 [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
3994 [INDEX_op_cmpxchgb_mem_T0_T1_EAX_cc] = CC_OSZAPC,
3995 [INDEX_op_cmpxchgw_mem_T0_T1_EAX_cc] = CC_OSZAPC,
3996 [INDEX_op_cmpxchgl_mem_T0_T1_EAX_cc] = CC_OSZAPC,
3998 [INDEX_op_cmpxchg8b] = CC_Z,
3999 [INDEX_op_lar] = CC_Z,
4000 [INDEX_op_lsl] = CC_Z,
4001 [INDEX_op_fcomi_ST0_FT0] = CC_Z | CC_P | CC_C,
4002 [INDEX_op_fucomi_ST0_FT0] = CC_Z | CC_P | CC_C,
4005 /* simpler form of an operation if no flags need to be generated */
4006 static uint16_t opc_simpler[NB_OPS] = {
4007 [INDEX_op_update2_cc] = INDEX_op_nop,
4008 [INDEX_op_update1_cc] = INDEX_op_nop,
4009 [INDEX_op_update_neg_cc] = INDEX_op_nop,
4011 /* broken: CC_OP logic must be rewritten */
4012 [INDEX_op_update_inc_cc] = INDEX_op_nop,
4014 [INDEX_op_rolb_T0_T1_cc] = INDEX_op_rolb_T0_T1,
4015 [INDEX_op_rolw_T0_T1_cc] = INDEX_op_rolw_T0_T1,
4016 [INDEX_op_roll_T0_T1_cc] = INDEX_op_roll_T0_T1,
4018 [INDEX_op_rorb_T0_T1_cc] = INDEX_op_rorb_T0_T1,
4019 [INDEX_op_rorw_T0_T1_cc] = INDEX_op_rorw_T0_T1,
4020 [INDEX_op_rorl_T0_T1_cc] = INDEX_op_rorl_T0_T1,
4022 [INDEX_op_rolb_mem_T0_T1_cc] = INDEX_op_rolb_mem_T0_T1,
4023 [INDEX_op_rolw_mem_T0_T1_cc] = INDEX_op_rolw_mem_T0_T1,
4024 [INDEX_op_roll_mem_T0_T1_cc] = INDEX_op_roll_mem_T0_T1,
4026 [INDEX_op_rorb_mem_T0_T1_cc] = INDEX_op_rorb_mem_T0_T1,
4027 [INDEX_op_rorw_mem_T0_T1_cc] = INDEX_op_rorw_mem_T0_T1,
4028 [INDEX_op_rorl_mem_T0_T1_cc] = INDEX_op_rorl_mem_T0_T1,
4030 [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
4031 [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
4032 [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
4034 [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
4035 [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
4036 [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
4038 [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
4039 [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
4040 [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
4043 static void optimize_flags_init(void)
4046 /* put default values in arrays */
4047 for(i = 0; i < NB_OPS; i++) {
4048 if (opc_simpler[i] == 0)
4053 /* CPU flags computation optimization: we move backward thru the
4054 generated code to see which flags are needed. The operation is
4055 modified if suitable */
4056 static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
4059 int live_flags, write_flags, op;
4061 opc_ptr = opc_buf + opc_buf_len;
4062 /* live_flags contains the flags needed by the next instructions
4063 in the code. At the end of the bloc, we consider that all the
4065 live_flags = CC_OSZAPC;
4066 while (opc_ptr > opc_buf) {
4068 /* if none of the flags written by the instruction is used,
4069 then we can try to find a simpler instruction */
4070 write_flags = opc_write_flags[op];
4071 if ((live_flags & write_flags) == 0) {
4072 *opc_ptr = opc_simpler[op];
4074 /* compute the live flags before the instruction */
4075 live_flags &= ~write_flags;
4076 live_flags |= opc_read_flags[op];
4080 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
4081 basic block 'tb'. If search_pc is TRUE, also generate PC
4082 information for each intermediate instruction. */
4083 static inline int gen_intermediate_code_internal(TranslationBlock *tb, int search_pc)
4085 DisasContext dc1, *dc = &dc1;
4087 uint16_t *gen_opc_end;
4093 /* generate intermediate code */
4094 pc_start = (uint8_t *)tb->pc;
4095 cs_base = (uint8_t *)tb->cs_base;
4098 dc->code32 = (flags >> GEN_FLAG_CODE32_SHIFT) & 1;
4099 dc->ss32 = (flags >> GEN_FLAG_SS32_SHIFT) & 1;
4100 dc->addseg = (flags >> GEN_FLAG_ADDSEG_SHIFT) & 1;
4101 dc->f_st = (flags >> GEN_FLAG_ST_SHIFT) & 7;
4102 dc->vm86 = (flags >> GEN_FLAG_VM_SHIFT) & 1;
4103 dc->cpl = (flags >> GEN_FLAG_CPL_SHIFT) & 3;
4104 dc->iopl = (flags >> GEN_FLAG_IOPL_SHIFT) & 3;
4105 dc->tf = (flags >> GEN_FLAG_TF_SHIFT) & 1;
4106 dc->cc_op = CC_OP_DYNAMIC;
4107 dc->cs_base = cs_base;
4109 dc->popl_esp_hack = 0;
4111 gen_opc_ptr = gen_opc_buf;
4112 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4113 gen_opparam_ptr = gen_opparam_buf;
4115 dc->is_jmp = DISAS_NEXT;
4120 j = gen_opc_ptr - gen_opc_buf;
4124 gen_opc_instr_start[lj++] = 0;
4125 gen_opc_pc[lj] = (uint32_t)pc_ptr;
4126 gen_opc_cc_op[lj] = dc->cc_op;
4127 gen_opc_instr_start[lj] = 1;
4130 ret = disas_insn(dc, pc_ptr);
4132 /* we trigger an illegal instruction operation only if it
4133 is the first instruction. Otherwise, we simply stop
4134 generating the code just before it */
4135 if (pc_ptr == pc_start)
4140 pc_ptr = (void *)ret;
4141 /* if single step mode, we generate only one instruction and
4142 generate an exception */
4145 } while (!dc->is_jmp && gen_opc_ptr < gen_opc_end &&
4146 (pc_ptr - pc_start) < (TARGET_PAGE_SIZE - 32));
4147 if (!dc->tf && dc->is_jmp == DISAS_NEXT) {
4148 gen_jmp(dc, ret - (unsigned long)dc->cs_base);
4151 /* we must store the eflags state if it is not already done */
4152 if (dc->is_jmp != DISAS_TB_JUMP) {
4153 if (dc->cc_op != CC_OP_DYNAMIC)
4154 gen_op_set_cc_op(dc->cc_op);
4155 if (dc->is_jmp != DISAS_JUMP) {
4156 /* we add an additionnal jmp to update the simulated PC */
4157 gen_op_jmp_im(ret - (unsigned long)dc->cs_base);
4161 gen_op_raise_exception(EXCP01_SSTP);
4163 if (dc->is_jmp != DISAS_TB_JUMP) {
4164 /* indicate that the hash table must be used to find the next TB */
4167 *gen_opc_ptr = INDEX_op_end;
4168 /* we don't forget to fill the last values */
4170 j = gen_opc_ptr - gen_opc_buf;
4173 gen_opc_instr_start[lj++] = 0;
4178 fprintf(logfile, "----------------\n");
4179 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
4180 disas(logfile, pc_start, pc_ptr - pc_start, 0, !dc->code32);
4181 fprintf(logfile, "\n");
4183 fprintf(logfile, "OP:\n");
4184 dump_ops(gen_opc_buf, gen_opparam_buf);
4185 fprintf(logfile, "\n");
4189 /* optimize flag computations */
4190 optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
4194 fprintf(logfile, "AFTER FLAGS OPT:\n");
4195 dump_ops(gen_opc_buf, gen_opparam_buf);
4196 fprintf(logfile, "\n");
4200 tb->size = pc_ptr - pc_start;
4204 int gen_intermediate_code(TranslationBlock *tb)
4206 return gen_intermediate_code_internal(tb, 0);
4209 int gen_intermediate_code_pc(TranslationBlock *tb)
4211 return gen_intermediate_code_internal(tb, 1);
4214 CPUX86State *cpu_x86_init(void)
4222 env = malloc(sizeof(CPUX86State));
4225 memset(env, 0, sizeof(CPUX86State));
4226 /* basic FPU init */
4227 for(i = 0;i < 8; i++)
4230 /* flags setup : we activate the IRQs by default as in user mode */
4231 env->eflags = 0x2 | IF_MASK;
4233 /* init various static tables */
4236 optimize_flags_init();
4241 void cpu_x86_close(CPUX86State *env)
4246 /***********************************************************/
4248 /* XXX: add PGE support */
4250 /* called when cr3 or PG bit are modified */
4251 static int last_pg_state = -1;
4254 uint8_t *phys_ram_base;
4256 void cpu_x86_update_cr0(CPUX86State *env)
4262 printf("CR0 update: CR0=0x%08x\n", env->cr[0]);
4264 pg_state = env->cr[0] & CR0_PG_MASK;
4265 if (pg_state != last_pg_state) {
4267 /* we map the physical memory at address 0 */
4269 map_addr = mmap((void *)0, phys_ram_size, PROT_WRITE | PROT_READ,
4270 MAP_SHARED | MAP_FIXED, phys_ram_fd, 0);
4271 if (map_addr == MAP_FAILED) {
4273 "Could not map physical memory at host address 0x%08x\n",
4277 page_set_flags(0, phys_ram_size,
4278 PAGE_VALID | PAGE_READ | PAGE_WRITE | PAGE_EXEC);
4280 /* we unmap the physical memory */
4281 munmap((void *)0, phys_ram_size);
4282 page_set_flags(0, phys_ram_size, 0);
4284 last_pg_state = pg_state;
4288 void cpu_x86_update_cr3(CPUX86State *env)
4290 if (env->cr[0] & CR0_PG_MASK) {
4291 #if defined(DEBUG_MMU)
4292 printf("CR3 update: CR3=%08x\n", env->cr[3]);
4298 void cpu_x86_init_mmu(CPUX86State *env)
4301 cpu_x86_update_cr0(env);
4304 /* XXX: also flush 4MB pages */
4305 void cpu_x86_flush_tlb(CPUX86State *env, uint32_t addr)
4308 unsigned long virt_addr;
4310 flags = page_get_flags(addr);
4311 if (flags & PAGE_VALID) {
4312 virt_addr = addr & ~0xfff;
4313 munmap((void *)virt_addr, 4096);
4314 page_set_flags(virt_addr, virt_addr + 4096, 0);
4319 -1 = cannot handle fault
4320 0 = nothing more to do
4321 1 = generate PF fault
4323 int cpu_x86_handle_mmu_fault(CPUX86State *env, uint32_t addr, int is_write)
4325 uint8_t *pde_ptr, *pte_ptr;
4326 uint32_t pde, pte, virt_addr;
4327 int cpl, error_code, is_dirty, is_user, prot, page_size;
4330 cpl = env->segs[R_CS].selector & 3;
4331 is_user = (cpl == 3);
4334 printf("MMU fault: addr=0x%08x w=%d u=%d eip=%08x\n",
4335 addr, is_write, is_user, env->eip);
4338 if (env->user_mode_only) {
4339 /* user mode only emulation */
4344 if (!(env->cr[0] & CR0_PG_MASK))
4347 /* page directory entry */
4348 pde_ptr = phys_ram_base + ((env->cr[3] & ~0xfff) + ((addr >> 20) & ~3));
4350 if (!(pde & PG_PRESENT_MASK)) {
4355 if (!(pde & PG_USER_MASK))
4356 goto do_fault_protect;
4357 if (is_write && !(pde & PG_RW_MASK))
4358 goto do_fault_protect;
4360 if ((env->cr[0] & CR0_WP_MASK) && (pde & PG_USER_MASK) &&
4361 is_write && !(pde & PG_RW_MASK))
4362 goto do_fault_protect;
4364 /* if PSE bit is set, then we use a 4MB page */
4365 if ((pde & PG_PSE_MASK) && (env->cr[4] & CR4_PSE_MASK)) {
4366 is_dirty = is_write && !(pde & PG_DIRTY_MASK);
4367 if (!(pde & PG_ACCESSED_MASK)) {
4368 pde |= PG_ACCESSED_MASK;
4370 pde |= PG_DIRTY_MASK;
4374 pte = pde & ~0x003ff000; /* align to 4MB */
4375 page_size = 4096 * 1024;
4376 virt_addr = addr & ~0x003fffff;
4378 if (!(pde & PG_ACCESSED_MASK)) {
4379 pde |= PG_ACCESSED_MASK;
4383 /* page directory entry */
4384 pte_ptr = phys_ram_base + ((pde & ~0xfff) + ((addr >> 10) & 0xffc));
4386 if (!(pte & PG_PRESENT_MASK)) {
4391 if (!(pte & PG_USER_MASK))
4392 goto do_fault_protect;
4393 if (is_write && !(pte & PG_RW_MASK))
4394 goto do_fault_protect;
4396 if ((env->cr[0] & CR0_WP_MASK) && (pte & PG_USER_MASK) &&
4397 is_write && !(pte & PG_RW_MASK))
4398 goto do_fault_protect;
4400 is_dirty = is_write && !(pte & PG_DIRTY_MASK);
4401 if (!(pte & PG_ACCESSED_MASK) || is_dirty) {
4402 pte |= PG_ACCESSED_MASK;
4404 pte |= PG_DIRTY_MASK;
4408 virt_addr = addr & ~0xfff;
4410 /* the page can be put in the TLB */
4413 if (pte & PG_RW_MASK)
4416 if (!(env->cr[0] & CR0_WP_MASK) || !(pte & PG_USER_MASK) ||
4420 map_addr = mmap((void *)virt_addr, page_size, prot,
4421 MAP_SHARED | MAP_FIXED, phys_ram_fd, pte & ~0xfff);
4422 if (map_addr == MAP_FAILED) {
4424 "mmap failed when mapped physical address 0x%08x to virtual address 0x%08x\n",
4425 pte & ~0xfff, virt_addr);
4428 page_set_flags(virt_addr, virt_addr + page_size,
4429 PAGE_VALID | PAGE_EXEC | prot);
4431 printf("mmaping 0x%08x to virt 0x%08x pse=%d\n",
4432 pte & ~0xfff, virt_addr, (page_size != 4096));
4436 error_code = PG_ERROR_P_MASK;
4439 env->error_code = (is_write << PG_ERROR_W_BIT) | error_code;
4441 env->error_code |= PG_ERROR_U_MASK;
4445 /***********************************************************/
4448 static const char *cc_op_str[] = {
4481 void cpu_x86_dump_state(CPUX86State *env, FILE *f, int flags)
4484 char cc_op_name[32];
4486 eflags = env->eflags;
4487 fprintf(f, "EAX=%08x EBX=%08x ECX=%08x EDX=%08x\n"
4488 "ESI=%08x EDI=%08x EBP=%08x ESP=%08x\n"
4489 "EIP=%08x EFL=%08x [%c%c%c%c%c%c%c]\n",
4490 env->regs[R_EAX], env->regs[R_EBX], env->regs[R_ECX], env->regs[R_EDX],
4491 env->regs[R_ESI], env->regs[R_EDI], env->regs[R_EBP], env->regs[R_ESP],
4493 eflags & DF_MASK ? 'D' : '-',
4494 eflags & CC_O ? 'O' : '-',
4495 eflags & CC_S ? 'S' : '-',
4496 eflags & CC_Z ? 'Z' : '-',
4497 eflags & CC_A ? 'A' : '-',
4498 eflags & CC_P ? 'P' : '-',
4499 eflags & CC_C ? 'C' : '-');
4500 fprintf(f, "CS=%04x SS=%04x DS=%04x ES=%04x FS=%04x GS=%04x\n",
4501 env->segs[R_CS].selector,
4502 env->segs[R_SS].selector,
4503 env->segs[R_DS].selector,
4504 env->segs[R_ES].selector,
4505 env->segs[R_FS].selector,
4506 env->segs[R_GS].selector);
4507 if (flags & X86_DUMP_CCOP) {
4508 if ((unsigned)env->cc_op < CC_OP_NB)
4509 strcpy(cc_op_name, cc_op_str[env->cc_op]);
4511 snprintf(cc_op_name, sizeof(cc_op_name), "[%d]", env->cc_op);
4512 fprintf(f, "CCS=%08x CCD=%08x CCO=%-8s\n",
4513 env->cc_src, env->cc_dst, cc_op_name);
4515 if (flags & X86_DUMP_FPU) {
4516 fprintf(f, "ST0=%f ST1=%f ST2=%f ST3=%f\n",
4517 (double)env->fpregs[0],
4518 (double)env->fpregs[1],
4519 (double)env->fpregs[2],
4520 (double)env->fpregs[3]);
4521 fprintf(f, "ST4=%f ST5=%f ST6=%f ST7=%f\n",
4522 (double)env->fpregs[4],
4523 (double)env->fpregs[5],
4524 (double)env->fpregs[7],
4525 (double)env->fpregs[8]);