5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
25 Rest of V9 instructions, VIS instructions
26 NPC/PC static optimisations (use JUMP_TB when possible)
27 Optimize synthetic instructions
28 Optional alignment check
44 #define DYNAMIC_PC 1 /* dynamic pc value */
45 #define JUMP_PC 2 /* dynamic pc value which takes only two values
46 according to jump_pc[T2] */
48 typedef struct DisasContext {
49 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
50 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
51 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
55 struct TranslationBlock *tb;
59 const unsigned char *name;
60 target_ulong iu_version;
65 static uint16_t *gen_opc_ptr;
66 static uint32_t *gen_opparam_ptr;
71 #define DEF(s,n,copy_size) INDEX_op_ ## s,
79 // This function uses non-native bit order
80 #define GET_FIELD(X, FROM, TO) \
81 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
83 // This function uses the order in the manuals, i.e. bit 0 is 2^0
84 #define GET_FIELD_SP(X, FROM, TO) \
85 GET_FIELD(X, 31 - (TO), 31 - (FROM))
87 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
88 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), 32 - ((b) - (a) + 1))
91 #define DFPREG(r) (((r & 1) << 6) | (r & 0x1e))
96 #ifdef USE_DIRECT_JUMP
99 #define TBPARAM(x) (long)(x)
102 static int sign_extend(int x, int len)
105 return (x << len) >> len;
108 #define IS_IMM (insn & (1<<13))
110 static void disas_sparc_insn(DisasContext * dc);
112 static GenOpFunc *gen_op_movl_TN_reg[2][32] = {
183 static GenOpFunc *gen_op_movl_reg_TN[3][32] = {
288 static GenOpFunc1 *gen_op_movl_TN_im[3] = {
294 // Sign extending version
295 static GenOpFunc1 * const gen_op_movl_TN_sim[3] = {
301 #ifdef TARGET_SPARC64
302 #define GEN32(func, NAME) \
303 static GenOpFunc *NAME ## _table [64] = { \
304 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
305 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
306 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
307 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
308 NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
309 NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
310 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
311 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
312 NAME ## 32, 0, NAME ## 34, 0, NAME ## 36, 0, NAME ## 38, 0, \
313 NAME ## 40, 0, NAME ## 42, 0, NAME ## 44, 0, NAME ## 46, 0, \
314 NAME ## 48, 0, NAME ## 50, 0, NAME ## 52, 0, NAME ## 54, 0, \
315 NAME ## 56, 0, NAME ## 58, 0, NAME ## 60, 0, NAME ## 62, 0, \
317 static inline void func(int n) \
319 NAME ## _table[n](); \
322 #define GEN32(func, NAME) \
323 static GenOpFunc *NAME ## _table [32] = { \
324 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
325 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
326 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
327 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
328 NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
329 NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
330 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
331 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
333 static inline void func(int n) \
335 NAME ## _table[n](); \
339 /* floating point registers moves */
340 GEN32(gen_op_load_fpr_FT0, gen_op_load_fpr_FT0_fprf);
341 GEN32(gen_op_load_fpr_FT1, gen_op_load_fpr_FT1_fprf);
342 GEN32(gen_op_store_FT0_fpr, gen_op_store_FT0_fpr_fprf);
343 GEN32(gen_op_store_FT1_fpr, gen_op_store_FT1_fpr_fprf);
345 GEN32(gen_op_load_fpr_DT0, gen_op_load_fpr_DT0_fprf);
346 GEN32(gen_op_load_fpr_DT1, gen_op_load_fpr_DT1_fprf);
347 GEN32(gen_op_store_DT0_fpr, gen_op_store_DT0_fpr_fprf);
348 GEN32(gen_op_store_DT1_fpr, gen_op_store_DT1_fpr_fprf);
350 #ifdef TARGET_SPARC64
351 // 'a' versions allowed to user depending on asi
352 #if defined(CONFIG_USER_ONLY)
353 #define supervisor(dc) 0
354 #define gen_op_ldst(name) gen_op_##name##_raw()
355 #define OP_LD_TABLE(width) \
356 static void gen_op_##width##a(int insn, int is_ld, int size, int sign) \
361 offset = GET_FIELD(insn, 25, 31); \
363 gen_op_ld_asi_reg(offset, size, sign); \
365 gen_op_st_asi_reg(offset, size, sign); \
368 asi = GET_FIELD(insn, 19, 26); \
370 case 0x80: /* Primary address space */ \
371 gen_op_##width##_raw(); \
373 case 0x82: /* Primary address space, non-faulting load */ \
374 gen_op_##width##_raw(); \
382 #define gen_op_ldst(name) (*gen_op_##name[dc->mem_idx])()
383 #define OP_LD_TABLE(width) \
384 static GenOpFunc *gen_op_##width[] = { \
385 &gen_op_##width##_user, \
386 &gen_op_##width##_kernel, \
389 static void gen_op_##width##a(int insn, int is_ld, int size, int sign) \
394 offset = GET_FIELD(insn, 25, 31); \
396 gen_op_ld_asi_reg(offset, size, sign); \
398 gen_op_st_asi_reg(offset, size, sign); \
401 asi = GET_FIELD(insn, 19, 26); \
403 gen_op_ld_asi(asi, size, sign); \
405 gen_op_st_asi(asi, size, sign); \
408 #define supervisor(dc) (dc->mem_idx == 1)
411 #if defined(CONFIG_USER_ONLY)
412 #define gen_op_ldst(name) gen_op_##name##_raw()
413 #define OP_LD_TABLE(width)
414 #define supervisor(dc) 0
416 #define gen_op_ldst(name) (*gen_op_##name[dc->mem_idx])()
417 #define OP_LD_TABLE(width) \
418 static GenOpFunc *gen_op_##width[] = { \
419 &gen_op_##width##_user, \
420 &gen_op_##width##_kernel, \
423 static void gen_op_##width##a(int insn, int is_ld, int size, int sign) \
427 asi = GET_FIELD(insn, 19, 26); \
429 case 10: /* User data access */ \
430 gen_op_##width##_user(); \
432 case 11: /* Supervisor data access */ \
433 gen_op_##width##_kernel(); \
435 case 0x20 ... 0x2f: /* MMU passthrough */ \
437 gen_op_ld_asi(asi, size, sign); \
439 gen_op_st_asi(asi, size, sign); \
443 gen_op_ld_asi(asi, size, sign); \
445 gen_op_st_asi(asi, size, sign); \
450 #define supervisor(dc) (dc->mem_idx == 1)
471 #ifdef TARGET_SPARC64
479 static inline void gen_movl_imm_TN(int reg, uint32_t imm)
481 gen_op_movl_TN_im[reg](imm);
484 static inline void gen_movl_imm_T1(uint32_t val)
486 gen_movl_imm_TN(1, val);
489 static inline void gen_movl_imm_T0(uint32_t val)
491 gen_movl_imm_TN(0, val);
494 static inline void gen_movl_simm_TN(int reg, int32_t imm)
496 gen_op_movl_TN_sim[reg](imm);
499 static inline void gen_movl_simm_T1(int32_t val)
501 gen_movl_simm_TN(1, val);
504 static inline void gen_movl_simm_T0(int32_t val)
506 gen_movl_simm_TN(0, val);
509 static inline void gen_movl_reg_TN(int reg, int t)
512 gen_op_movl_reg_TN[t][reg] ();
514 gen_movl_imm_TN(t, 0);
517 static inline void gen_movl_reg_T0(int reg)
519 gen_movl_reg_TN(reg, 0);
522 static inline void gen_movl_reg_T1(int reg)
524 gen_movl_reg_TN(reg, 1);
527 static inline void gen_movl_reg_T2(int reg)
529 gen_movl_reg_TN(reg, 2);
532 static inline void gen_movl_TN_reg(int reg, int t)
535 gen_op_movl_TN_reg[t][reg] ();
538 static inline void gen_movl_T0_reg(int reg)
540 gen_movl_TN_reg(reg, 0);
543 static inline void gen_movl_T1_reg(int reg)
545 gen_movl_TN_reg(reg, 1);
548 static inline void gen_jmp_im(target_ulong pc)
550 #ifdef TARGET_SPARC64
551 if (pc == (uint32_t)pc) {
554 gen_op_jmp_im64(pc >> 32, pc);
561 static inline void gen_movl_npc_im(target_ulong npc)
563 #ifdef TARGET_SPARC64
564 if (npc == (uint32_t)npc) {
565 gen_op_movl_npc_im(npc);
567 gen_op_movq_npc_im64(npc >> 32, npc);
570 gen_op_movl_npc_im(npc);
574 static inline void gen_goto_tb(DisasContext *s, int tb_num,
575 target_ulong pc, target_ulong npc)
577 TranslationBlock *tb;
580 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
581 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
582 /* jump to same page: we can use a direct jump */
584 gen_op_goto_tb0(TBPARAM(tb));
586 gen_op_goto_tb1(TBPARAM(tb));
588 gen_movl_npc_im(npc);
589 gen_op_movl_T0_im((long)tb + tb_num);
592 /* jump to another page: currently not optimized */
594 gen_movl_npc_im(npc);
600 static inline void gen_branch2(DisasContext *dc, long tb, target_ulong pc1, target_ulong pc2)
604 l1 = gen_new_label();
606 gen_op_jz_T2_label(l1);
608 gen_goto_tb(dc, 0, pc1, pc1 + 4);
611 gen_goto_tb(dc, 1, pc2, pc2 + 4);
614 static inline void gen_branch_a(DisasContext *dc, long tb, target_ulong pc1, target_ulong pc2)
618 l1 = gen_new_label();
620 gen_op_jz_T2_label(l1);
622 gen_goto_tb(dc, 0, pc2, pc1);
625 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
628 static inline void gen_branch(DisasContext *dc, long tb, target_ulong pc, target_ulong npc)
630 gen_goto_tb(dc, 0, pc, npc);
633 static inline void gen_generic_branch(DisasContext *dc, target_ulong npc1, target_ulong npc2)
637 l1 = gen_new_label();
638 l2 = gen_new_label();
639 gen_op_jz_T2_label(l1);
641 gen_movl_npc_im(npc1);
642 gen_op_jmp_label(l2);
645 gen_movl_npc_im(npc2);
649 /* call this function before using T2 as it may have been set for a jump */
650 static inline void flush_T2(DisasContext * dc)
652 if (dc->npc == JUMP_PC) {
653 gen_generic_branch(dc, dc->jump_pc[0], dc->jump_pc[1]);
654 dc->npc = DYNAMIC_PC;
658 static inline void save_npc(DisasContext * dc)
660 if (dc->npc == JUMP_PC) {
661 gen_generic_branch(dc, dc->jump_pc[0], dc->jump_pc[1]);
662 dc->npc = DYNAMIC_PC;
663 } else if (dc->npc != DYNAMIC_PC) {
664 gen_movl_npc_im(dc->npc);
668 static inline void save_state(DisasContext * dc)
674 static inline void gen_mov_pc_npc(DisasContext * dc)
676 if (dc->npc == JUMP_PC) {
677 gen_generic_branch(dc, dc->jump_pc[0], dc->jump_pc[1]);
680 } else if (dc->npc == DYNAMIC_PC) {
688 static GenOpFunc * const gen_cond[2][16] = {
708 #ifdef TARGET_SPARC64
729 static GenOpFunc * const gen_fcond[4][16] = {
748 #ifdef TARGET_SPARC64
751 gen_op_eval_fbne_fcc1,
752 gen_op_eval_fblg_fcc1,
753 gen_op_eval_fbul_fcc1,
754 gen_op_eval_fbl_fcc1,
755 gen_op_eval_fbug_fcc1,
756 gen_op_eval_fbg_fcc1,
757 gen_op_eval_fbu_fcc1,
759 gen_op_eval_fbe_fcc1,
760 gen_op_eval_fbue_fcc1,
761 gen_op_eval_fbge_fcc1,
762 gen_op_eval_fbuge_fcc1,
763 gen_op_eval_fble_fcc1,
764 gen_op_eval_fbule_fcc1,
765 gen_op_eval_fbo_fcc1,
769 gen_op_eval_fbne_fcc2,
770 gen_op_eval_fblg_fcc2,
771 gen_op_eval_fbul_fcc2,
772 gen_op_eval_fbl_fcc2,
773 gen_op_eval_fbug_fcc2,
774 gen_op_eval_fbg_fcc2,
775 gen_op_eval_fbu_fcc2,
777 gen_op_eval_fbe_fcc2,
778 gen_op_eval_fbue_fcc2,
779 gen_op_eval_fbge_fcc2,
780 gen_op_eval_fbuge_fcc2,
781 gen_op_eval_fble_fcc2,
782 gen_op_eval_fbule_fcc2,
783 gen_op_eval_fbo_fcc2,
787 gen_op_eval_fbne_fcc3,
788 gen_op_eval_fblg_fcc3,
789 gen_op_eval_fbul_fcc3,
790 gen_op_eval_fbl_fcc3,
791 gen_op_eval_fbug_fcc3,
792 gen_op_eval_fbg_fcc3,
793 gen_op_eval_fbu_fcc3,
795 gen_op_eval_fbe_fcc3,
796 gen_op_eval_fbue_fcc3,
797 gen_op_eval_fbge_fcc3,
798 gen_op_eval_fbuge_fcc3,
799 gen_op_eval_fble_fcc3,
800 gen_op_eval_fbule_fcc3,
801 gen_op_eval_fbo_fcc3,
808 #ifdef TARGET_SPARC64
809 static void gen_cond_reg(int cond)
835 /* XXX: potentially incorrect if dynamic npc */
836 static void do_branch(DisasContext * dc, int32_t offset, uint32_t insn, int cc)
838 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
839 target_ulong target = dc->pc + offset;
842 /* unconditional not taken */
844 dc->pc = dc->npc + 4;
845 dc->npc = dc->pc + 4;
848 dc->npc = dc->pc + 4;
850 } else if (cond == 0x8) {
851 /* unconditional taken */
854 dc->npc = dc->pc + 4;
861 gen_cond[cc][cond]();
863 gen_branch_a(dc, (long)dc->tb, target, dc->npc);
867 dc->jump_pc[0] = target;
868 dc->jump_pc[1] = dc->npc + 4;
874 /* XXX: potentially incorrect if dynamic npc */
875 static void do_fbranch(DisasContext * dc, int32_t offset, uint32_t insn, int cc)
877 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
878 target_ulong target = dc->pc + offset;
881 /* unconditional not taken */
883 dc->pc = dc->npc + 4;
884 dc->npc = dc->pc + 4;
887 dc->npc = dc->pc + 4;
889 } else if (cond == 0x8) {
890 /* unconditional taken */
893 dc->npc = dc->pc + 4;
900 gen_fcond[cc][cond]();
902 gen_branch_a(dc, (long)dc->tb, target, dc->npc);
906 dc->jump_pc[0] = target;
907 dc->jump_pc[1] = dc->npc + 4;
913 #ifdef TARGET_SPARC64
914 /* XXX: potentially incorrect if dynamic npc */
915 static void do_branch_reg(DisasContext * dc, int32_t offset, uint32_t insn)
917 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
918 target_ulong target = dc->pc + offset;
923 gen_branch_a(dc, (long)dc->tb, target, dc->npc);
927 dc->jump_pc[0] = target;
928 dc->jump_pc[1] = dc->npc + 4;
933 static GenOpFunc * const gen_fcmps[4] = {
940 static GenOpFunc * const gen_fcmpd[4] = {
948 static int gen_trap_ifnofpu(DisasContext * dc)
950 #if !defined(CONFIG_USER_ONLY)
951 if (!dc->fpu_enabled) {
953 gen_op_exception(TT_NFPU_INSN);
961 /* before an instruction, dc->pc must be static */
962 static void disas_sparc_insn(DisasContext * dc)
964 unsigned int insn, opc, rs1, rs2, rd;
966 insn = ldl_code(dc->pc);
967 opc = GET_FIELD(insn, 0, 1);
969 rd = GET_FIELD(insn, 2, 6);
971 case 0: /* branches/sethi */
973 unsigned int xop = GET_FIELD(insn, 7, 9);
976 #ifdef TARGET_SPARC64
977 case 0x1: /* V9 BPcc */
981 target = GET_FIELD_SP(insn, 0, 18);
982 target = sign_extend(target, 18);
984 cc = GET_FIELD_SP(insn, 20, 21);
986 do_branch(dc, target, insn, 0);
988 do_branch(dc, target, insn, 1);
993 case 0x3: /* V9 BPr */
995 target = GET_FIELD_SP(insn, 0, 13) |
996 (GET_FIELD_SP(insn, 20, 21) << 14);
997 target = sign_extend(target, 16);
999 rs1 = GET_FIELD(insn, 13, 17);
1000 gen_movl_reg_T0(rs1);
1001 do_branch_reg(dc, target, insn);
1004 case 0x5: /* V9 FBPcc */
1006 int cc = GET_FIELD_SP(insn, 20, 21);
1007 if (gen_trap_ifnofpu(dc))
1009 target = GET_FIELD_SP(insn, 0, 18);
1010 target = sign_extend(target, 19);
1012 do_fbranch(dc, target, insn, cc);
1016 case 0x7: /* CBN+x */
1021 case 0x2: /* BN+x */
1023 target = GET_FIELD(insn, 10, 31);
1024 target = sign_extend(target, 22);
1026 do_branch(dc, target, insn, 0);
1029 case 0x6: /* FBN+x */
1031 if (gen_trap_ifnofpu(dc))
1033 target = GET_FIELD(insn, 10, 31);
1034 target = sign_extend(target, 22);
1036 do_fbranch(dc, target, insn, 0);
1039 case 0x4: /* SETHI */
1044 uint32_t value = GET_FIELD(insn, 10, 31);
1045 gen_movl_imm_T0(value << 10);
1046 gen_movl_T0_reg(rd);
1051 case 0x0: /* UNIMPL */
1060 target_long target = GET_FIELDs(insn, 2, 31) << 2;
1062 #ifdef TARGET_SPARC64
1063 if (dc->pc == (uint32_t)dc->pc) {
1064 gen_op_movl_T0_im(dc->pc);
1066 gen_op_movq_T0_im64(dc->pc >> 32, dc->pc);
1069 gen_op_movl_T0_im(dc->pc);
1071 gen_movl_T0_reg(15);
1077 case 2: /* FPU & Logical Operations */
1079 unsigned int xop = GET_FIELD(insn, 7, 12);
1080 if (xop == 0x3a) { /* generate trap */
1083 rs1 = GET_FIELD(insn, 13, 17);
1084 gen_movl_reg_T0(rs1);
1086 rs2 = GET_FIELD(insn, 25, 31);
1090 gen_movl_simm_T1(rs2);
1096 rs2 = GET_FIELD(insn, 27, 31);
1100 gen_movl_reg_T1(rs2);
1106 cond = GET_FIELD(insn, 3, 6);
1110 } else if (cond != 0) {
1111 #ifdef TARGET_SPARC64
1113 int cc = GET_FIELD_SP(insn, 11, 12);
1117 gen_cond[0][cond]();
1119 gen_cond[1][cond]();
1125 gen_cond[0][cond]();
1134 } else if (xop == 0x28) {
1135 rs1 = GET_FIELD(insn, 13, 17);
1138 #ifndef TARGET_SPARC64
1139 case 0x01 ... 0x0e: /* undefined in the SPARCv8
1140 manual, rdy on the microSPARC
1142 case 0x0f: /* stbar in the SPARCv8 manual,
1143 rdy on the microSPARC II */
1144 case 0x10 ... 0x1f: /* implementation-dependent in the
1145 SPARCv8 manual, rdy on the
1148 gen_op_movtl_T0_env(offsetof(CPUSPARCState, y));
1149 gen_movl_T0_reg(rd);
1151 #ifdef TARGET_SPARC64
1152 case 0x2: /* V9 rdccr */
1154 gen_movl_T0_reg(rd);
1156 case 0x3: /* V9 rdasi */
1157 gen_op_movl_T0_env(offsetof(CPUSPARCState, asi));
1158 gen_movl_T0_reg(rd);
1160 case 0x4: /* V9 rdtick */
1162 gen_movl_T0_reg(rd);
1164 case 0x5: /* V9 rdpc */
1165 if (dc->pc == (uint32_t)dc->pc) {
1166 gen_op_movl_T0_im(dc->pc);
1168 gen_op_movq_T0_im64(dc->pc >> 32, dc->pc);
1170 gen_movl_T0_reg(rd);
1172 case 0x6: /* V9 rdfprs */
1173 gen_op_movl_T0_env(offsetof(CPUSPARCState, fprs));
1174 gen_movl_T0_reg(rd);
1176 case 0xf: /* V9 membar */
1177 break; /* no effect */
1178 case 0x13: /* Graphics Status */
1179 if (gen_trap_ifnofpu(dc))
1181 gen_op_movtl_T0_env(offsetof(CPUSPARCState, gsr));
1182 gen_movl_T0_reg(rd);
1184 case 0x17: /* Tick compare */
1185 gen_op_movtl_T0_env(offsetof(CPUSPARCState, tick_cmpr));
1186 gen_movl_T0_reg(rd);
1188 case 0x18: /* System tick */
1189 gen_op_rdtick(); // XXX
1190 gen_movl_T0_reg(rd);
1192 case 0x19: /* System tick compare */
1193 gen_op_movtl_T0_env(offsetof(CPUSPARCState, stick_cmpr));
1194 gen_movl_T0_reg(rd);
1196 case 0x10: /* Performance Control */
1197 case 0x11: /* Performance Instrumentation Counter */
1198 case 0x12: /* Dispatch Control */
1199 case 0x14: /* Softint set, WO */
1200 case 0x15: /* Softint clear, WO */
1201 case 0x16: /* Softint write */
1206 #if !defined(CONFIG_USER_ONLY)
1207 #ifndef TARGET_SPARC64
1208 } else if (xop == 0x29) { /* rdpsr / V9 unimp */
1209 if (!supervisor(dc))
1212 gen_movl_T0_reg(rd);
1215 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
1216 if (!supervisor(dc))
1218 #ifdef TARGET_SPARC64
1219 rs1 = GET_FIELD(insn, 13, 17);
1237 gen_op_movtl_T0_env(offsetof(CPUSPARCState, tbr));
1243 gen_op_movl_T0_env(offsetof(CPUSPARCState, tl));
1246 gen_op_movl_T0_env(offsetof(CPUSPARCState, psrpil));
1252 gen_op_movl_T0_env(offsetof(CPUSPARCState, cansave));
1254 case 11: // canrestore
1255 gen_op_movl_T0_env(offsetof(CPUSPARCState, canrestore));
1257 case 12: // cleanwin
1258 gen_op_movl_T0_env(offsetof(CPUSPARCState, cleanwin));
1260 case 13: // otherwin
1261 gen_op_movl_T0_env(offsetof(CPUSPARCState, otherwin));
1264 gen_op_movl_T0_env(offsetof(CPUSPARCState, wstate));
1267 gen_op_movtl_T0_env(offsetof(CPUSPARCState, version));
1274 gen_op_movl_T0_env(offsetof(CPUSPARCState, wim));
1276 gen_movl_T0_reg(rd);
1278 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
1279 #ifdef TARGET_SPARC64
1282 if (!supervisor(dc))
1284 gen_op_movtl_T0_env(offsetof(CPUSPARCState, tbr));
1285 gen_movl_T0_reg(rd);
1289 } else if (xop == 0x34) { /* FPU Operations */
1290 if (gen_trap_ifnofpu(dc))
1292 rs1 = GET_FIELD(insn, 13, 17);
1293 rs2 = GET_FIELD(insn, 27, 31);
1294 xop = GET_FIELD(insn, 18, 26);
1296 case 0x1: /* fmovs */
1297 gen_op_load_fpr_FT0(rs2);
1298 gen_op_store_FT0_fpr(rd);
1300 case 0x5: /* fnegs */
1301 gen_op_load_fpr_FT1(rs2);
1303 gen_op_store_FT0_fpr(rd);
1305 case 0x9: /* fabss */
1306 gen_op_load_fpr_FT1(rs2);
1308 gen_op_store_FT0_fpr(rd);
1310 case 0x29: /* fsqrts */
1311 gen_op_load_fpr_FT1(rs2);
1313 gen_op_store_FT0_fpr(rd);
1315 case 0x2a: /* fsqrtd */
1316 gen_op_load_fpr_DT1(DFPREG(rs2));
1318 gen_op_store_DT0_fpr(DFPREG(rd));
1320 case 0x2b: /* fsqrtq */
1323 gen_op_load_fpr_FT0(rs1);
1324 gen_op_load_fpr_FT1(rs2);
1326 gen_op_store_FT0_fpr(rd);
1329 gen_op_load_fpr_DT0(DFPREG(rs1));
1330 gen_op_load_fpr_DT1(DFPREG(rs2));
1332 gen_op_store_DT0_fpr(DFPREG(rd));
1334 case 0x43: /* faddq */
1337 gen_op_load_fpr_FT0(rs1);
1338 gen_op_load_fpr_FT1(rs2);
1340 gen_op_store_FT0_fpr(rd);
1343 gen_op_load_fpr_DT0(DFPREG(rs1));
1344 gen_op_load_fpr_DT1(DFPREG(rs2));
1346 gen_op_store_DT0_fpr(DFPREG(rd));
1348 case 0x47: /* fsubq */
1351 gen_op_load_fpr_FT0(rs1);
1352 gen_op_load_fpr_FT1(rs2);
1354 gen_op_store_FT0_fpr(rd);
1357 gen_op_load_fpr_DT0(DFPREG(rs1));
1358 gen_op_load_fpr_DT1(DFPREG(rs2));
1360 gen_op_store_DT0_fpr(rd);
1362 case 0x4b: /* fmulq */
1365 gen_op_load_fpr_FT0(rs1);
1366 gen_op_load_fpr_FT1(rs2);
1368 gen_op_store_FT0_fpr(rd);
1371 gen_op_load_fpr_DT0(DFPREG(rs1));
1372 gen_op_load_fpr_DT1(DFPREG(rs2));
1374 gen_op_store_DT0_fpr(DFPREG(rd));
1376 case 0x4f: /* fdivq */
1379 gen_op_load_fpr_FT0(rs1);
1380 gen_op_load_fpr_FT1(rs2);
1382 gen_op_store_DT0_fpr(DFPREG(rd));
1384 case 0x6e: /* fdmulq */
1387 gen_op_load_fpr_FT1(rs2);
1389 gen_op_store_FT0_fpr(rd);
1392 gen_op_load_fpr_DT1(DFPREG(rs2));
1394 gen_op_store_FT0_fpr(rd);
1396 case 0xc7: /* fqtos */
1399 gen_op_load_fpr_FT1(rs2);
1401 gen_op_store_DT0_fpr(DFPREG(rd));
1404 gen_op_load_fpr_FT1(rs2);
1406 gen_op_store_DT0_fpr(DFPREG(rd));
1408 case 0xcb: /* fqtod */
1410 case 0xcc: /* fitoq */
1412 case 0xcd: /* fstoq */
1414 case 0xce: /* fdtoq */
1417 gen_op_load_fpr_FT1(rs2);
1419 gen_op_store_FT0_fpr(rd);
1422 gen_op_load_fpr_DT1(rs2);
1424 gen_op_store_FT0_fpr(rd);
1426 case 0xd3: /* fqtoi */
1428 #ifdef TARGET_SPARC64
1429 case 0x2: /* V9 fmovd */
1430 gen_op_load_fpr_DT0(DFPREG(rs2));
1431 gen_op_store_DT0_fpr(DFPREG(rd));
1433 case 0x6: /* V9 fnegd */
1434 gen_op_load_fpr_DT1(DFPREG(rs2));
1436 gen_op_store_DT0_fpr(DFPREG(rd));
1438 case 0xa: /* V9 fabsd */
1439 gen_op_load_fpr_DT1(DFPREG(rs2));
1441 gen_op_store_DT0_fpr(DFPREG(rd));
1443 case 0x81: /* V9 fstox */
1444 gen_op_load_fpr_FT1(rs2);
1446 gen_op_store_DT0_fpr(DFPREG(rd));
1448 case 0x82: /* V9 fdtox */
1449 gen_op_load_fpr_DT1(DFPREG(rs2));
1451 gen_op_store_DT0_fpr(DFPREG(rd));
1453 case 0x84: /* V9 fxtos */
1454 gen_op_load_fpr_DT1(DFPREG(rs2));
1456 gen_op_store_FT0_fpr(rd);
1458 case 0x88: /* V9 fxtod */
1459 gen_op_load_fpr_DT1(DFPREG(rs2));
1461 gen_op_store_DT0_fpr(DFPREG(rd));
1463 case 0x3: /* V9 fmovq */
1464 case 0x7: /* V9 fnegq */
1465 case 0xb: /* V9 fabsq */
1466 case 0x83: /* V9 fqtox */
1467 case 0x8c: /* V9 fxtoq */
1473 } else if (xop == 0x35) { /* FPU Operations */
1474 #ifdef TARGET_SPARC64
1477 if (gen_trap_ifnofpu(dc))
1479 rs1 = GET_FIELD(insn, 13, 17);
1480 rs2 = GET_FIELD(insn, 27, 31);
1481 xop = GET_FIELD(insn, 18, 26);
1482 #ifdef TARGET_SPARC64
1483 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
1484 cond = GET_FIELD_SP(insn, 14, 17);
1485 gen_op_load_fpr_FT0(rd);
1486 gen_op_load_fpr_FT1(rs2);
1487 rs1 = GET_FIELD(insn, 13, 17);
1488 gen_movl_reg_T0(rs1);
1492 gen_op_store_FT0_fpr(rd);
1494 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
1495 cond = GET_FIELD_SP(insn, 14, 17);
1496 gen_op_load_fpr_DT0(rd);
1497 gen_op_load_fpr_DT1(rs2);
1499 rs1 = GET_FIELD(insn, 13, 17);
1500 gen_movl_reg_T0(rs1);
1503 gen_op_store_DT0_fpr(rd);
1505 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
1510 #ifdef TARGET_SPARC64
1511 case 0x001: /* V9 fmovscc %fcc0 */
1512 cond = GET_FIELD_SP(insn, 14, 17);
1513 gen_op_load_fpr_FT0(rd);
1514 gen_op_load_fpr_FT1(rs2);
1516 gen_fcond[0][cond]();
1518 gen_op_store_FT0_fpr(rd);
1520 case 0x002: /* V9 fmovdcc %fcc0 */
1521 cond = GET_FIELD_SP(insn, 14, 17);
1522 gen_op_load_fpr_DT0(rd);
1523 gen_op_load_fpr_DT1(rs2);
1525 gen_fcond[0][cond]();
1527 gen_op_store_DT0_fpr(rd);
1529 case 0x003: /* V9 fmovqcc %fcc0 */
1531 case 0x041: /* V9 fmovscc %fcc1 */
1532 cond = GET_FIELD_SP(insn, 14, 17);
1533 gen_op_load_fpr_FT0(rd);
1534 gen_op_load_fpr_FT1(rs2);
1536 gen_fcond[1][cond]();
1538 gen_op_store_FT0_fpr(rd);
1540 case 0x042: /* V9 fmovdcc %fcc1 */
1541 cond = GET_FIELD_SP(insn, 14, 17);
1542 gen_op_load_fpr_DT0(rd);
1543 gen_op_load_fpr_DT1(rs2);
1545 gen_fcond[1][cond]();
1547 gen_op_store_DT0_fpr(rd);
1549 case 0x043: /* V9 fmovqcc %fcc1 */
1551 case 0x081: /* V9 fmovscc %fcc2 */
1552 cond = GET_FIELD_SP(insn, 14, 17);
1553 gen_op_load_fpr_FT0(rd);
1554 gen_op_load_fpr_FT1(rs2);
1556 gen_fcond[2][cond]();
1558 gen_op_store_FT0_fpr(rd);
1560 case 0x082: /* V9 fmovdcc %fcc2 */
1561 cond = GET_FIELD_SP(insn, 14, 17);
1562 gen_op_load_fpr_DT0(rd);
1563 gen_op_load_fpr_DT1(rs2);
1565 gen_fcond[2][cond]();
1567 gen_op_store_DT0_fpr(rd);
1569 case 0x083: /* V9 fmovqcc %fcc2 */
1571 case 0x0c1: /* V9 fmovscc %fcc3 */
1572 cond = GET_FIELD_SP(insn, 14, 17);
1573 gen_op_load_fpr_FT0(rd);
1574 gen_op_load_fpr_FT1(rs2);
1576 gen_fcond[3][cond]();
1578 gen_op_store_FT0_fpr(rd);
1580 case 0x0c2: /* V9 fmovdcc %fcc3 */
1581 cond = GET_FIELD_SP(insn, 14, 17);
1582 gen_op_load_fpr_DT0(rd);
1583 gen_op_load_fpr_DT1(rs2);
1585 gen_fcond[3][cond]();
1587 gen_op_store_DT0_fpr(rd);
1589 case 0x0c3: /* V9 fmovqcc %fcc3 */
1591 case 0x101: /* V9 fmovscc %icc */
1592 cond = GET_FIELD_SP(insn, 14, 17);
1593 gen_op_load_fpr_FT0(rd);
1594 gen_op_load_fpr_FT1(rs2);
1596 gen_cond[0][cond]();
1598 gen_op_store_FT0_fpr(rd);
1600 case 0x102: /* V9 fmovdcc %icc */
1601 cond = GET_FIELD_SP(insn, 14, 17);
1602 gen_op_load_fpr_DT0(rd);
1603 gen_op_load_fpr_DT1(rs2);
1605 gen_cond[0][cond]();
1607 gen_op_store_DT0_fpr(rd);
1609 case 0x103: /* V9 fmovqcc %icc */
1611 case 0x181: /* V9 fmovscc %xcc */
1612 cond = GET_FIELD_SP(insn, 14, 17);
1613 gen_op_load_fpr_FT0(rd);
1614 gen_op_load_fpr_FT1(rs2);
1616 gen_cond[1][cond]();
1618 gen_op_store_FT0_fpr(rd);
1620 case 0x182: /* V9 fmovdcc %xcc */
1621 cond = GET_FIELD_SP(insn, 14, 17);
1622 gen_op_load_fpr_DT0(rd);
1623 gen_op_load_fpr_DT1(rs2);
1625 gen_cond[1][cond]();
1627 gen_op_store_DT0_fpr(rd);
1629 case 0x183: /* V9 fmovqcc %xcc */
1632 case 0x51: /* V9 %fcc */
1633 gen_op_load_fpr_FT0(rs1);
1634 gen_op_load_fpr_FT1(rs2);
1635 #ifdef TARGET_SPARC64
1636 gen_fcmps[rd & 3]();
1641 case 0x52: /* V9 %fcc */
1642 gen_op_load_fpr_DT0(DFPREG(rs1));
1643 gen_op_load_fpr_DT1(DFPREG(rs2));
1644 #ifdef TARGET_SPARC64
1645 gen_fcmpd[rd & 3]();
1650 case 0x53: /* fcmpq */
1652 case 0x55: /* fcmpes, V9 %fcc */
1653 gen_op_load_fpr_FT0(rs1);
1654 gen_op_load_fpr_FT1(rs2);
1655 #ifdef TARGET_SPARC64
1656 gen_fcmps[rd & 3]();
1658 gen_op_fcmps(); /* XXX should trap if qNaN or sNaN */
1661 case 0x56: /* fcmped, V9 %fcc */
1662 gen_op_load_fpr_DT0(DFPREG(rs1));
1663 gen_op_load_fpr_DT1(DFPREG(rs2));
1664 #ifdef TARGET_SPARC64
1665 gen_fcmpd[rd & 3]();
1667 gen_op_fcmpd(); /* XXX should trap if qNaN or sNaN */
1670 case 0x57: /* fcmpeq */
1676 } else if (xop == 0x2) {
1679 rs1 = GET_FIELD(insn, 13, 17);
1681 // or %g0, x, y -> mov T1, x; mov y, T1
1682 if (IS_IMM) { /* immediate */
1683 rs2 = GET_FIELDs(insn, 19, 31);
1684 gen_movl_simm_T1(rs2);
1685 } else { /* register */
1686 rs2 = GET_FIELD(insn, 27, 31);
1687 gen_movl_reg_T1(rs2);
1689 gen_movl_T1_reg(rd);
1691 gen_movl_reg_T0(rs1);
1692 if (IS_IMM) { /* immediate */
1693 // or x, #0, y -> mov T1, x; mov y, T1
1694 rs2 = GET_FIELDs(insn, 19, 31);
1696 gen_movl_simm_T1(rs2);
1699 } else { /* register */
1700 // or x, %g0, y -> mov T1, x; mov y, T1
1701 rs2 = GET_FIELD(insn, 27, 31);
1703 gen_movl_reg_T1(rs2);
1707 gen_movl_T0_reg(rd);
1710 #ifdef TARGET_SPARC64
1711 } else if (xop == 0x25) { /* sll, V9 sllx */
1712 rs1 = GET_FIELD(insn, 13, 17);
1713 gen_movl_reg_T0(rs1);
1714 if (IS_IMM) { /* immediate */
1715 rs2 = GET_FIELDs(insn, 20, 31);
1716 gen_movl_simm_T1(rs2);
1717 } else { /* register */
1718 rs2 = GET_FIELD(insn, 27, 31);
1719 gen_movl_reg_T1(rs2);
1721 if (insn & (1 << 12))
1725 gen_movl_T0_reg(rd);
1726 } else if (xop == 0x26) { /* srl, V9 srlx */
1727 rs1 = GET_FIELD(insn, 13, 17);
1728 gen_movl_reg_T0(rs1);
1729 if (IS_IMM) { /* immediate */
1730 rs2 = GET_FIELDs(insn, 20, 31);
1731 gen_movl_simm_T1(rs2);
1732 } else { /* register */
1733 rs2 = GET_FIELD(insn, 27, 31);
1734 gen_movl_reg_T1(rs2);
1736 if (insn & (1 << 12))
1740 gen_movl_T0_reg(rd);
1741 } else if (xop == 0x27) { /* sra, V9 srax */
1742 rs1 = GET_FIELD(insn, 13, 17);
1743 gen_movl_reg_T0(rs1);
1744 if (IS_IMM) { /* immediate */
1745 rs2 = GET_FIELDs(insn, 20, 31);
1746 gen_movl_simm_T1(rs2);
1747 } else { /* register */
1748 rs2 = GET_FIELD(insn, 27, 31);
1749 gen_movl_reg_T1(rs2);
1751 if (insn & (1 << 12))
1755 gen_movl_T0_reg(rd);
1757 } else if (xop < 0x36) {
1758 rs1 = GET_FIELD(insn, 13, 17);
1759 gen_movl_reg_T0(rs1);
1760 if (IS_IMM) { /* immediate */
1761 rs2 = GET_FIELDs(insn, 19, 31);
1762 gen_movl_simm_T1(rs2);
1763 } else { /* register */
1764 rs2 = GET_FIELD(insn, 27, 31);
1765 gen_movl_reg_T1(rs2);
1768 switch (xop & ~0x10) {
1771 gen_op_add_T1_T0_cc();
1778 gen_op_logic_T0_cc();
1783 gen_op_logic_T0_cc();
1788 gen_op_logic_T0_cc();
1792 gen_op_sub_T1_T0_cc();
1797 gen_op_andn_T1_T0();
1799 gen_op_logic_T0_cc();
1804 gen_op_logic_T0_cc();
1807 gen_op_xnor_T1_T0();
1809 gen_op_logic_T0_cc();
1813 gen_op_addx_T1_T0_cc();
1815 gen_op_addx_T1_T0();
1817 #ifdef TARGET_SPARC64
1818 case 0x9: /* V9 mulx */
1819 gen_op_mulx_T1_T0();
1823 gen_op_umul_T1_T0();
1825 gen_op_logic_T0_cc();
1828 gen_op_smul_T1_T0();
1830 gen_op_logic_T0_cc();
1834 gen_op_subx_T1_T0_cc();
1836 gen_op_subx_T1_T0();
1838 #ifdef TARGET_SPARC64
1839 case 0xd: /* V9 udivx */
1840 gen_op_udivx_T1_T0();
1844 gen_op_udiv_T1_T0();
1849 gen_op_sdiv_T1_T0();
1856 gen_movl_T0_reg(rd);
1859 case 0x20: /* taddcc */
1860 gen_op_tadd_T1_T0_cc();
1861 gen_movl_T0_reg(rd);
1863 case 0x21: /* tsubcc */
1864 gen_op_tsub_T1_T0_cc();
1865 gen_movl_T0_reg(rd);
1867 case 0x22: /* taddcctv */
1868 gen_op_tadd_T1_T0_ccTV();
1869 gen_movl_T0_reg(rd);
1871 case 0x23: /* tsubcctv */
1872 gen_op_tsub_T1_T0_ccTV();
1873 gen_movl_T0_reg(rd);
1875 case 0x24: /* mulscc */
1876 gen_op_mulscc_T1_T0();
1877 gen_movl_T0_reg(rd);
1879 #ifndef TARGET_SPARC64
1880 case 0x25: /* sll */
1882 gen_movl_T0_reg(rd);
1884 case 0x26: /* srl */
1886 gen_movl_T0_reg(rd);
1888 case 0x27: /* sra */
1890 gen_movl_T0_reg(rd);
1898 gen_op_movtl_env_T0(offsetof(CPUSPARCState, y));
1900 #ifndef TARGET_SPARC64
1901 case 0x01 ... 0x0f: /* undefined in the
1905 case 0x10 ... 0x1f: /* implementation-dependent
1911 case 0x2: /* V9 wrccr */
1914 case 0x3: /* V9 wrasi */
1915 gen_op_movl_env_T0(offsetof(CPUSPARCState, asi));
1917 case 0x6: /* V9 wrfprs */
1918 gen_op_movl_env_T0(offsetof(CPUSPARCState, fprs));
1920 case 0xf: /* V9 sir, nop if user */
1921 #if !defined(CONFIG_USER_ONLY)
1926 case 0x13: /* Graphics Status */
1927 if (gen_trap_ifnofpu(dc))
1929 gen_op_movtl_env_T0(offsetof(CPUSPARCState, gsr));
1931 case 0x17: /* Tick compare */
1932 #if !defined(CONFIG_USER_ONLY)
1933 if (!supervisor(dc))
1936 gen_op_movtl_env_T0(offsetof(CPUSPARCState, tick_cmpr));
1938 case 0x18: /* System tick */
1939 #if !defined(CONFIG_USER_ONLY)
1940 if (!supervisor(dc))
1943 gen_op_movtl_env_T0(offsetof(CPUSPARCState, stick_cmpr));
1945 case 0x19: /* System tick compare */
1946 #if !defined(CONFIG_USER_ONLY)
1947 if (!supervisor(dc))
1950 gen_op_movtl_env_T0(offsetof(CPUSPARCState, stick_cmpr));
1953 case 0x10: /* Performance Control */
1954 case 0x11: /* Performance Instrumentation Counter */
1955 case 0x12: /* Dispatch Control */
1956 case 0x14: /* Softint set */
1957 case 0x15: /* Softint clear */
1958 case 0x16: /* Softint write */
1965 #if !defined(CONFIG_USER_ONLY)
1966 case 0x31: /* wrpsr, V9 saved, restored */
1968 if (!supervisor(dc))
1970 #ifdef TARGET_SPARC64
1992 case 0x32: /* wrwim, V9 wrpr */
1994 if (!supervisor(dc))
1997 #ifdef TARGET_SPARC64
2015 gen_op_movtl_env_T0(offsetof(CPUSPARCState, tbr));
2026 gen_op_movl_env_T0(offsetof(CPUSPARCState, tl));
2029 gen_op_movl_env_T0(offsetof(CPUSPARCState, psrpil));
2035 gen_op_movl_env_T0(offsetof(CPUSPARCState, cansave));
2037 case 11: // canrestore
2038 gen_op_movl_env_T0(offsetof(CPUSPARCState, canrestore));
2040 case 12: // cleanwin
2041 gen_op_movl_env_T0(offsetof(CPUSPARCState, cleanwin));
2043 case 13: // otherwin
2044 gen_op_movl_env_T0(offsetof(CPUSPARCState, otherwin));
2047 gen_op_movl_env_T0(offsetof(CPUSPARCState, wstate));
2057 #ifndef TARGET_SPARC64
2058 case 0x33: /* wrtbr, V9 unimp */
2060 if (!supervisor(dc))
2063 gen_op_movtl_env_T0(offsetof(CPUSPARCState, tbr));
2068 #ifdef TARGET_SPARC64
2069 case 0x2c: /* V9 movcc */
2071 int cc = GET_FIELD_SP(insn, 11, 12);
2072 int cond = GET_FIELD_SP(insn, 14, 17);
2073 if (IS_IMM) { /* immediate */
2074 rs2 = GET_FIELD_SPs(insn, 0, 10);
2075 gen_movl_simm_T1(rs2);
2078 rs2 = GET_FIELD_SP(insn, 0, 4);
2079 gen_movl_reg_T1(rs2);
2081 gen_movl_reg_T0(rd);
2083 if (insn & (1 << 18)) {
2085 gen_cond[0][cond]();
2087 gen_cond[1][cond]();
2091 gen_fcond[cc][cond]();
2094 gen_movl_T0_reg(rd);
2097 case 0x2d: /* V9 sdivx */
2098 gen_op_sdivx_T1_T0();
2099 gen_movl_T0_reg(rd);
2101 case 0x2e: /* V9 popc */
2103 if (IS_IMM) { /* immediate */
2104 rs2 = GET_FIELD_SPs(insn, 0, 12);
2105 gen_movl_simm_T1(rs2);
2106 // XXX optimize: popc(constant)
2109 rs2 = GET_FIELD_SP(insn, 0, 4);
2110 gen_movl_reg_T1(rs2);
2113 gen_movl_T0_reg(rd);
2115 case 0x2f: /* V9 movr */
2117 int cond = GET_FIELD_SP(insn, 10, 12);
2118 rs1 = GET_FIELD(insn, 13, 17);
2120 gen_movl_reg_T0(rs1);
2122 if (IS_IMM) { /* immediate */
2123 rs2 = GET_FIELD_SPs(insn, 0, 10);
2124 gen_movl_simm_T1(rs2);
2127 rs2 = GET_FIELD_SP(insn, 0, 4);
2128 gen_movl_reg_T1(rs2);
2130 gen_movl_reg_T0(rd);
2132 gen_movl_T0_reg(rd);
2135 case 0x36: /* UltraSparc shutdown, VIS */
2137 int opf = GET_FIELD_SP(insn, 5, 13);
2138 rs1 = GET_FIELD(insn, 13, 17);
2139 rs2 = GET_FIELD(insn, 27, 31);
2142 case 0x018: /* VIS I alignaddr */
2143 if (gen_trap_ifnofpu(dc))
2145 gen_movl_reg_T0(rs1);
2146 gen_movl_reg_T1(rs2);
2148 gen_movl_T0_reg(rd);
2150 case 0x01a: /* VIS I alignaddrl */
2151 if (gen_trap_ifnofpu(dc))
2155 case 0x048: /* VIS I faligndata */
2156 if (gen_trap_ifnofpu(dc))
2158 gen_op_load_fpr_DT0(rs1);
2159 gen_op_load_fpr_DT1(rs2);
2160 gen_op_faligndata();
2161 gen_op_store_DT0_fpr(rd);
2173 } else if (xop == 0x36 || xop == 0x37) { /* CPop1 & CPop2,
2176 #ifdef TARGET_SPARC64
2181 #ifdef TARGET_SPARC64
2182 } else if (xop == 0x39) { /* V9 return */
2183 rs1 = GET_FIELD(insn, 13, 17);
2184 gen_movl_reg_T0(rs1);
2185 if (IS_IMM) { /* immediate */
2186 rs2 = GET_FIELDs(insn, 19, 31);
2190 gen_movl_simm_T1(rs2);
2195 } else { /* register */
2196 rs2 = GET_FIELD(insn, 27, 31);
2200 gen_movl_reg_T1(rs2);
2208 gen_op_movl_npc_T0();
2209 dc->npc = DYNAMIC_PC;
2213 rs1 = GET_FIELD(insn, 13, 17);
2214 gen_movl_reg_T0(rs1);
2215 if (IS_IMM) { /* immediate */
2216 rs2 = GET_FIELDs(insn, 19, 31);
2220 gen_movl_simm_T1(rs2);
2225 } else { /* register */
2226 rs2 = GET_FIELD(insn, 27, 31);
2230 gen_movl_reg_T1(rs2);
2237 case 0x38: /* jmpl */
2240 #ifdef TARGET_SPARC64
2241 if (dc->pc == (uint32_t)dc->pc) {
2242 gen_op_movl_T1_im(dc->pc);
2244 gen_op_movq_T1_im64(dc->pc >> 32, dc->pc);
2247 gen_op_movl_T1_im(dc->pc);
2249 gen_movl_T1_reg(rd);
2252 gen_op_movl_npc_T0();
2253 dc->npc = DYNAMIC_PC;
2256 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
2257 case 0x39: /* rett, V9 return */
2259 if (!supervisor(dc))
2262 gen_op_movl_npc_T0();
2263 dc->npc = DYNAMIC_PC;
2268 case 0x3b: /* flush */
2271 case 0x3c: /* save */
2274 gen_movl_T0_reg(rd);
2276 case 0x3d: /* restore */
2279 gen_movl_T0_reg(rd);
2281 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
2282 case 0x3e: /* V9 done/retry */
2286 if (!supervisor(dc))
2288 dc->npc = DYNAMIC_PC;
2289 dc->pc = DYNAMIC_PC;
2293 if (!supervisor(dc))
2295 dc->npc = DYNAMIC_PC;
2296 dc->pc = DYNAMIC_PC;
2312 case 3: /* load/store instructions */
2314 unsigned int xop = GET_FIELD(insn, 7, 12);
2315 rs1 = GET_FIELD(insn, 13, 17);
2316 gen_movl_reg_T0(rs1);
2317 if (IS_IMM) { /* immediate */
2318 rs2 = GET_FIELDs(insn, 19, 31);
2322 gen_movl_simm_T1(rs2);
2327 } else { /* register */
2328 rs2 = GET_FIELD(insn, 27, 31);
2332 gen_movl_reg_T1(rs2);
2338 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) || \
2339 (xop > 0x17 && xop <= 0x1d ) || \
2340 (xop > 0x2c && xop <= 0x33) || xop == 0x1f) {
2342 case 0x0: /* load word */
2345 case 0x1: /* load unsigned byte */
2348 case 0x2: /* load unsigned halfword */
2351 case 0x3: /* load double word */
2355 gen_movl_T0_reg(rd + 1);
2357 case 0x9: /* load signed byte */
2360 case 0xa: /* load signed halfword */
2363 case 0xd: /* ldstub -- XXX: should be atomically */
2364 gen_op_ldst(ldstub);
2366 case 0x0f: /* swap register with memory. Also atomically */
2367 gen_movl_reg_T1(rd);
2370 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2371 case 0x10: /* load word alternate */
2372 #ifndef TARGET_SPARC64
2375 if (!supervisor(dc))
2378 gen_op_lda(insn, 1, 4, 0);
2380 case 0x11: /* load unsigned byte alternate */
2381 #ifndef TARGET_SPARC64
2384 if (!supervisor(dc))
2387 gen_op_lduba(insn, 1, 1, 0);
2389 case 0x12: /* load unsigned halfword alternate */
2390 #ifndef TARGET_SPARC64
2393 if (!supervisor(dc))
2396 gen_op_lduha(insn, 1, 2, 0);
2398 case 0x13: /* load double word alternate */
2399 #ifndef TARGET_SPARC64
2402 if (!supervisor(dc))
2407 gen_op_ldda(insn, 1, 8, 0);
2408 gen_movl_T0_reg(rd + 1);
2410 case 0x19: /* load signed byte alternate */
2411 #ifndef TARGET_SPARC64
2414 if (!supervisor(dc))
2417 gen_op_ldsba(insn, 1, 1, 1);
2419 case 0x1a: /* load signed halfword alternate */
2420 #ifndef TARGET_SPARC64
2423 if (!supervisor(dc))
2426 gen_op_ldsha(insn, 1, 2 ,1);
2428 case 0x1d: /* ldstuba -- XXX: should be atomically */
2429 #ifndef TARGET_SPARC64
2432 if (!supervisor(dc))
2435 gen_op_ldstuba(insn, 1, 1, 0);
2437 case 0x1f: /* swap reg with alt. memory. Also atomically */
2438 #ifndef TARGET_SPARC64
2441 if (!supervisor(dc))
2444 gen_movl_reg_T1(rd);
2445 gen_op_swapa(insn, 1, 4, 0);
2448 #ifndef TARGET_SPARC64
2449 case 0x30: /* ldc */
2450 case 0x31: /* ldcsr */
2451 case 0x33: /* lddc */
2453 /* avoid warnings */
2454 (void) &gen_op_stfa;
2455 (void) &gen_op_stdfa;
2456 (void) &gen_op_ldfa;
2457 (void) &gen_op_lddfa;
2459 #if !defined(CONFIG_USER_ONLY)
2461 (void) &gen_op_casx;
2465 #ifdef TARGET_SPARC64
2466 case 0x08: /* V9 ldsw */
2469 case 0x0b: /* V9 ldx */
2472 case 0x18: /* V9 ldswa */
2473 gen_op_ldswa(insn, 1, 4, 1);
2475 case 0x1b: /* V9 ldxa */
2476 gen_op_ldxa(insn, 1, 8, 0);
2478 case 0x2d: /* V9 prefetch, no effect */
2480 case 0x30: /* V9 ldfa */
2481 gen_op_ldfa(insn, 1, 8, 0); // XXX
2483 case 0x33: /* V9 lddfa */
2484 gen_op_lddfa(insn, 1, 8, 0); // XXX
2487 case 0x3d: /* V9 prefetcha, no effect */
2489 case 0x32: /* V9 ldqfa */
2495 gen_movl_T1_reg(rd);
2496 #ifdef TARGET_SPARC64
2499 } else if (xop >= 0x20 && xop < 0x24) {
2500 if (gen_trap_ifnofpu(dc))
2503 case 0x20: /* load fpreg */
2505 gen_op_store_FT0_fpr(rd);
2507 case 0x21: /* load fsr */
2511 case 0x22: /* load quad fpreg */
2513 case 0x23: /* load double fpreg */
2515 gen_op_store_DT0_fpr(DFPREG(rd));
2520 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) || \
2521 xop == 0xe || xop == 0x1e) {
2522 gen_movl_reg_T1(rd);
2537 gen_movl_reg_T2(rd + 1);
2540 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2542 #ifndef TARGET_SPARC64
2545 if (!supervisor(dc))
2548 gen_op_sta(insn, 0, 4, 0);
2551 #ifndef TARGET_SPARC64
2554 if (!supervisor(dc))
2557 gen_op_stba(insn, 0, 1, 0);
2560 #ifndef TARGET_SPARC64
2563 if (!supervisor(dc))
2566 gen_op_stha(insn, 0, 2, 0);
2569 #ifndef TARGET_SPARC64
2572 if (!supervisor(dc))
2578 gen_movl_reg_T2(rd + 1);
2579 gen_op_stda(insn, 0, 8, 0);
2582 #ifdef TARGET_SPARC64
2583 case 0x0e: /* V9 stx */
2586 case 0x1e: /* V9 stxa */
2587 gen_op_stxa(insn, 0, 8, 0); // XXX
2593 } else if (xop > 0x23 && xop < 0x28) {
2594 if (gen_trap_ifnofpu(dc))
2598 gen_op_load_fpr_FT0(rd);
2601 case 0x25: /* stfsr, V9 stxfsr */
2605 case 0x26: /* stdfq */
2608 gen_op_load_fpr_DT0(DFPREG(rd));
2614 } else if (xop > 0x33 && xop < 0x3f) {
2616 #ifdef TARGET_SPARC64
2617 case 0x34: /* V9 stfa */
2618 gen_op_stfa(insn, 0, 0, 0); // XXX
2620 case 0x37: /* V9 stdfa */
2621 gen_op_stdfa(insn, 0, 0, 0); // XXX
2623 case 0x3c: /* V9 casa */
2624 gen_op_casa(insn, 0, 4, 0); // XXX
2626 case 0x3e: /* V9 casxa */
2627 gen_op_casxa(insn, 0, 8, 0); // XXX
2629 case 0x36: /* V9 stqfa */
2632 case 0x34: /* stc */
2633 case 0x35: /* stcsr */
2634 case 0x36: /* stdcq */
2635 case 0x37: /* stdc */
2647 /* default case for non jump instructions */
2648 if (dc->npc == DYNAMIC_PC) {
2649 dc->pc = DYNAMIC_PC;
2651 } else if (dc->npc == JUMP_PC) {
2652 /* we can do a static jump */
2653 gen_branch2(dc, (long)dc->tb, dc->jump_pc[0], dc->jump_pc[1]);
2657 dc->npc = dc->npc + 4;
2663 gen_op_exception(TT_ILL_INSN);
2666 #if !defined(CONFIG_USER_ONLY)
2669 gen_op_exception(TT_PRIV_INSN);
2675 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
2678 #ifndef TARGET_SPARC64
2681 gen_op_exception(TT_NCP_INSN);
2687 static inline int gen_intermediate_code_internal(TranslationBlock * tb,
2688 int spc, CPUSPARCState *env)
2690 target_ulong pc_start, last_pc;
2691 uint16_t *gen_opc_end;
2692 DisasContext dc1, *dc = &dc1;
2695 memset(dc, 0, sizeof(DisasContext));
2700 dc->npc = (target_ulong) tb->cs_base;
2701 #if defined(CONFIG_USER_ONLY)
2703 dc->fpu_enabled = 1;
2705 dc->mem_idx = ((env->psrs) != 0);
2706 #ifdef TARGET_SPARC64
2707 dc->fpu_enabled = (((env->pstate & PS_PEF) != 0) && ((env->fprs & FPRS_FEF) != 0));
2709 dc->fpu_enabled = ((env->psref) != 0);
2712 gen_opc_ptr = gen_opc_buf;
2713 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
2714 gen_opparam_ptr = gen_opparam_buf;
2718 if (env->nb_breakpoints > 0) {
2719 for(j = 0; j < env->nb_breakpoints; j++) {
2720 if (env->breakpoints[j] == dc->pc) {
2721 if (dc->pc != pc_start)
2733 fprintf(logfile, "Search PC...\n");
2734 j = gen_opc_ptr - gen_opc_buf;
2738 gen_opc_instr_start[lj++] = 0;
2739 gen_opc_pc[lj] = dc->pc;
2740 gen_opc_npc[lj] = dc->npc;
2741 gen_opc_instr_start[lj] = 1;
2745 disas_sparc_insn(dc);
2749 /* if the next PC is different, we abort now */
2750 if (dc->pc != (last_pc + 4))
2752 /* if we reach a page boundary, we stop generation so that the
2753 PC of a TT_TFAULT exception is always in the right page */
2754 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
2756 /* if single step mode, we generate only one instruction and
2757 generate an exception */
2758 if (env->singlestep_enabled) {
2764 } while ((gen_opc_ptr < gen_opc_end) &&
2765 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32));
2769 if (dc->pc != DYNAMIC_PC &&
2770 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
2771 /* static PC and NPC: we can use direct chaining */
2772 gen_branch(dc, (long)tb, dc->pc, dc->npc);
2774 if (dc->pc != DYNAMIC_PC)
2781 *gen_opc_ptr = INDEX_op_end;
2783 j = gen_opc_ptr - gen_opc_buf;
2786 gen_opc_instr_start[lj++] = 0;
2793 gen_opc_jump_pc[0] = dc->jump_pc[0];
2794 gen_opc_jump_pc[1] = dc->jump_pc[1];
2796 tb->size = last_pc + 4 - pc_start;
2799 if (loglevel & CPU_LOG_TB_IN_ASM) {
2800 fprintf(logfile, "--------------\n");
2801 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
2802 target_disas(logfile, pc_start, last_pc + 4 - pc_start, 0);
2803 fprintf(logfile, "\n");
2804 if (loglevel & CPU_LOG_TB_OP) {
2805 fprintf(logfile, "OP:\n");
2806 dump_ops(gen_opc_buf, gen_opparam_buf);
2807 fprintf(logfile, "\n");
2814 int gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
2816 return gen_intermediate_code_internal(tb, 0, env);
2819 int gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
2821 return gen_intermediate_code_internal(tb, 1, env);
2824 extern int ram_size;
2826 void cpu_reset(CPUSPARCState *env)
2828 memset(env, 0, sizeof(*env));
2832 env->regwptr = env->regbase + (env->cwp * 16);
2833 #if defined(CONFIG_USER_ONLY)
2834 env->user_mode_only = 1;
2835 #ifdef TARGET_SPARC64
2836 env->cleanwin = NWINDOWS - 1;
2837 env->cansave = NWINDOWS - 1;
2842 env->gregs[1] = ram_size;
2843 #ifdef TARGET_SPARC64
2844 env->pstate = PS_PRIV;
2845 env->pc = 0x1fff0000000ULL;
2847 env->pc = 0xffd00000;
2849 env->npc = env->pc + 4;
2853 CPUSPARCState *cpu_sparc_init(void)
2857 env = qemu_mallocz(sizeof(CPUSPARCState));
2865 static const sparc_def_t sparc_defs[] = {
2866 #ifdef TARGET_SPARC64
2868 .name = "TI UltraSparc II",
2869 .iu_version = ((0x17ULL << 48) | (0x11ULL << 32) | (0 << 24)
2870 | (MAXTL << 8) | (NWINDOWS - 1)),
2871 .fpu_version = 0x00000000,
2876 .name = "Fujitsu MB86904",
2877 .iu_version = 0x04 << 24, /* Impl 0, ver 4 */
2878 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
2879 .mmu_version = 0x04 << 24, /* Impl 0, ver 4 */
2882 /* XXX: Replace with real values */
2883 .name = "TI SuperSparc II",
2884 .iu_version = 0x40000000,
2885 .fpu_version = 0x00000000,
2886 .mmu_version = 0x00000000,
2891 int sparc_find_by_name(const unsigned char *name, const sparc_def_t **def)
2898 for (i = 0; i < sizeof(sparc_defs) / sizeof(sparc_def_t); i++) {
2899 if (strcasecmp(name, sparc_defs[i].name) == 0) {
2900 *def = &sparc_defs[i];
2909 void sparc_cpu_list (FILE *f, int (*cpu_fprintf)(FILE *f, const char *fmt, ...))
2913 for (i = 0; i < sizeof(sparc_defs) / sizeof(sparc_def_t); i++) {
2914 (*cpu_fprintf)(f, "Sparc %16s IU " TARGET_FMT_lx " FPU %08x MMU %08x\n",
2916 sparc_defs[i].iu_version,
2917 sparc_defs[i].fpu_version,
2918 sparc_defs[i].mmu_version);
2922 int cpu_sparc_register (CPUSPARCState *env, const sparc_def_t *def)
2924 env->version = def->iu_version;
2925 env->fsr = def->fpu_version;
2926 #if !defined(TARGET_SPARC64)
2927 env->mmuregs[0] = def->mmu_version;
2932 #define GET_FLAG(a,b) ((env->psr & a)?b:'-')
2934 void cpu_dump_state(CPUState *env, FILE *f,
2935 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
2940 cpu_fprintf(f, "pc: " TARGET_FMT_lx " npc: " TARGET_FMT_lx "\n", env->pc, env->npc);
2941 cpu_fprintf(f, "General Registers:\n");
2942 for (i = 0; i < 4; i++)
2943 cpu_fprintf(f, "%%g%c: " TARGET_FMT_lx "\t", i + '0', env->gregs[i]);
2944 cpu_fprintf(f, "\n");
2946 cpu_fprintf(f, "%%g%c: " TARGET_FMT_lx "\t", i + '0', env->gregs[i]);
2947 cpu_fprintf(f, "\nCurrent Register Window:\n");
2948 for (x = 0; x < 3; x++) {
2949 for (i = 0; i < 4; i++)
2950 cpu_fprintf(f, "%%%c%d: " TARGET_FMT_lx "\t",
2951 (x == 0 ? 'o' : (x == 1 ? 'l' : 'i')), i,
2952 env->regwptr[i + x * 8]);
2953 cpu_fprintf(f, "\n");
2955 cpu_fprintf(f, "%%%c%d: " TARGET_FMT_lx "\t",
2956 (x == 0 ? 'o' : x == 1 ? 'l' : 'i'), i,
2957 env->regwptr[i + x * 8]);
2958 cpu_fprintf(f, "\n");
2960 cpu_fprintf(f, "\nFloating Point Registers:\n");
2961 for (i = 0; i < 32; i++) {
2963 cpu_fprintf(f, "%%f%02d:", i);
2964 cpu_fprintf(f, " %016lf", env->fpr[i]);
2966 cpu_fprintf(f, "\n");
2968 #ifdef TARGET_SPARC64
2969 cpu_fprintf(f, "pstate: 0x%08x ccr: 0x%02x asi: 0x%02x tl: %d\n",
2970 env->pstate, GET_CCR(env), env->asi, env->tl);
2971 cpu_fprintf(f, "cansave: %d canrestore: %d otherwin: %d wstate %d cleanwin %d cwp %d\n",
2972 env->cansave, env->canrestore, env->otherwin, env->wstate,
2973 env->cleanwin, NWINDOWS - 1 - env->cwp);
2975 cpu_fprintf(f, "psr: 0x%08x -> %c%c%c%c %c%c%c wim: 0x%08x\n", GET_PSR(env),
2976 GET_FLAG(PSR_ZERO, 'Z'), GET_FLAG(PSR_OVF, 'V'),
2977 GET_FLAG(PSR_NEG, 'N'), GET_FLAG(PSR_CARRY, 'C'),
2978 env->psrs?'S':'-', env->psrps?'P':'-',
2979 env->psret?'E':'-', env->wim);
2981 cpu_fprintf(f, "fsr: 0x%08x\n", GET_FSR32(env));
2984 #if defined(CONFIG_USER_ONLY)
2985 target_ulong cpu_get_phys_page_debug(CPUState *env, target_ulong addr)
2991 extern int get_physical_address (CPUState *env, target_phys_addr_t *physical, int *prot,
2992 int *access_index, target_ulong address, int rw,
2995 target_ulong cpu_get_phys_page_debug(CPUState *env, target_ulong addr)
2997 target_phys_addr_t phys_addr;
2998 int prot, access_index;
3000 if (get_physical_address(env, &phys_addr, &prot, &access_index, addr, 2, 0) != 0)
3001 if (get_physical_address(env, &phys_addr, &prot, &access_index, addr, 0, 0) != 0)
3007 void helper_flush(target_ulong addr)
3010 tb_invalidate_page_range(addr, addr + 8);