5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
21 #include "qemu/osdep.h"
24 #include "disas/disas.h"
25 #include "exec/helper-proto.h"
26 #include "exec/exec-all.h"
28 #include "exec/cpu_ldst.h"
30 #include "exec/helper-gen.h"
32 #include "trace-tcg.h"
38 #define DYNAMIC_PC 1 /* dynamic pc value */
39 #define JUMP_PC 2 /* dynamic pc value which takes only two values
40 according to jump_pc[T2] */
42 /* global register indexes */
43 static TCGv_env cpu_env;
44 static TCGv_ptr cpu_regwptr;
45 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
46 static TCGv_i32 cpu_cc_op;
47 static TCGv_i32 cpu_psr;
48 static TCGv cpu_fsr, cpu_pc, cpu_npc;
49 static TCGv cpu_regs[32];
51 #ifndef CONFIG_USER_ONLY
56 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
58 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
59 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
63 /* Floating point registers */
64 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
66 #include "exec/gen-icount.h"
68 typedef struct DisasContext {
69 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
70 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
71 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
75 int address_mask_32bit;
77 uint32_t cc_op; /* current CC operation */
78 struct TranslationBlock *tb;
93 // This function uses non-native bit order
94 #define GET_FIELD(X, FROM, TO) \
95 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
97 // This function uses the order in the manuals, i.e. bit 0 is 2^0
98 #define GET_FIELD_SP(X, FROM, TO) \
99 GET_FIELD(X, 31 - (TO), 31 - (FROM))
101 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
102 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
104 #ifdef TARGET_SPARC64
105 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
106 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
108 #define DFPREG(r) (r & 0x1e)
109 #define QFPREG(r) (r & 0x1c)
112 #define UA2005_HTRAP_MASK 0xff
113 #define V8_TRAP_MASK 0x7f
115 static int sign_extend(int x, int len)
118 return (x << len) >> len;
121 #define IS_IMM (insn & (1<<13))
123 static inline TCGv_i32 get_temp_i32(DisasContext *dc)
126 assert(dc->n_t32 < ARRAY_SIZE(dc->t32));
127 dc->t32[dc->n_t32++] = t = tcg_temp_new_i32();
131 static inline TCGv get_temp_tl(DisasContext *dc)
134 assert(dc->n_ttl < ARRAY_SIZE(dc->ttl));
135 dc->ttl[dc->n_ttl++] = t = tcg_temp_new();
139 static inline void gen_update_fprs_dirty(int rd)
141 #if defined(TARGET_SPARC64)
142 tcg_gen_ori_i32(cpu_fprs, cpu_fprs, (rd < 32) ? 1 : 2);
146 /* floating point registers moves */
147 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
149 #if TCG_TARGET_REG_BITS == 32
151 return TCGV_LOW(cpu_fpr[src / 2]);
153 return TCGV_HIGH(cpu_fpr[src / 2]);
157 return MAKE_TCGV_I32(GET_TCGV_I64(cpu_fpr[src / 2]));
159 TCGv_i32 ret = get_temp_i32(dc);
160 TCGv_i64 t = tcg_temp_new_i64();
162 tcg_gen_shri_i64(t, cpu_fpr[src / 2], 32);
163 tcg_gen_extrl_i64_i32(ret, t);
164 tcg_temp_free_i64(t);
171 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
173 #if TCG_TARGET_REG_BITS == 32
175 tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
177 tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
180 TCGv_i64 t = MAKE_TCGV_I64(GET_TCGV_I32(v));
181 tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
182 (dst & 1 ? 0 : 32), 32);
184 gen_update_fprs_dirty(dst);
187 static TCGv_i32 gen_dest_fpr_F(DisasContext *dc)
189 return get_temp_i32(dc);
192 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
195 return cpu_fpr[src / 2];
198 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
201 tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
202 gen_update_fprs_dirty(dst);
205 static TCGv_i64 gen_dest_fpr_D(DisasContext *dc, unsigned int dst)
207 return cpu_fpr[DFPREG(dst) / 2];
210 static void gen_op_load_fpr_QT0(unsigned int src)
212 tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
213 offsetof(CPU_QuadU, ll.upper));
214 tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
215 offsetof(CPU_QuadU, ll.lower));
218 static void gen_op_load_fpr_QT1(unsigned int src)
220 tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
221 offsetof(CPU_QuadU, ll.upper));
222 tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
223 offsetof(CPU_QuadU, ll.lower));
226 static void gen_op_store_QT0_fpr(unsigned int dst)
228 tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
229 offsetof(CPU_QuadU, ll.upper));
230 tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
231 offsetof(CPU_QuadU, ll.lower));
234 #ifdef TARGET_SPARC64
235 static void gen_move_Q(unsigned int rd, unsigned int rs)
240 tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
241 tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
242 gen_update_fprs_dirty(rd);
247 #ifdef CONFIG_USER_ONLY
248 #define supervisor(dc) 0
249 #ifdef TARGET_SPARC64
250 #define hypervisor(dc) 0
253 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
254 #ifdef TARGET_SPARC64
255 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
260 #ifdef TARGET_SPARC64
262 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
264 #define AM_CHECK(dc) (1)
268 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
270 #ifdef TARGET_SPARC64
272 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
276 static inline TCGv gen_load_gpr(DisasContext *dc, int reg)
280 return cpu_regs[reg];
282 TCGv t = get_temp_tl(dc);
283 tcg_gen_movi_tl(t, 0);
288 static inline void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
292 tcg_gen_mov_tl(cpu_regs[reg], v);
296 static inline TCGv gen_dest_gpr(DisasContext *dc, int reg)
300 return cpu_regs[reg];
302 return get_temp_tl(dc);
306 static inline bool use_goto_tb(DisasContext *s, target_ulong pc,
309 if (unlikely(s->singlestep)) {
313 #ifndef CONFIG_USER_ONLY
314 return (pc & TARGET_PAGE_MASK) == (s->tb->pc & TARGET_PAGE_MASK) &&
315 (npc & TARGET_PAGE_MASK) == (s->tb->pc & TARGET_PAGE_MASK);
321 static inline void gen_goto_tb(DisasContext *s, int tb_num,
322 target_ulong pc, target_ulong npc)
324 if (use_goto_tb(s, pc, npc)) {
325 /* jump to same page: we can use a direct jump */
326 tcg_gen_goto_tb(tb_num);
327 tcg_gen_movi_tl(cpu_pc, pc);
328 tcg_gen_movi_tl(cpu_npc, npc);
329 tcg_gen_exit_tb((uintptr_t)s->tb + tb_num);
331 /* jump to another page: currently not optimized */
332 tcg_gen_movi_tl(cpu_pc, pc);
333 tcg_gen_movi_tl(cpu_npc, npc);
339 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
341 tcg_gen_extu_i32_tl(reg, src);
342 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
343 tcg_gen_andi_tl(reg, reg, 0x1);
346 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
348 tcg_gen_extu_i32_tl(reg, src);
349 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
350 tcg_gen_andi_tl(reg, reg, 0x1);
353 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
355 tcg_gen_extu_i32_tl(reg, src);
356 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
357 tcg_gen_andi_tl(reg, reg, 0x1);
360 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
362 tcg_gen_extu_i32_tl(reg, src);
363 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
364 tcg_gen_andi_tl(reg, reg, 0x1);
367 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
369 tcg_gen_mov_tl(cpu_cc_src, src1);
370 tcg_gen_mov_tl(cpu_cc_src2, src2);
371 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
372 tcg_gen_mov_tl(dst, cpu_cc_dst);
375 static TCGv_i32 gen_add32_carry32(void)
377 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
379 /* Carry is computed from a previous add: (dst < src) */
380 #if TARGET_LONG_BITS == 64
381 cc_src1_32 = tcg_temp_new_i32();
382 cc_src2_32 = tcg_temp_new_i32();
383 tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_dst);
384 tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src);
386 cc_src1_32 = cpu_cc_dst;
387 cc_src2_32 = cpu_cc_src;
390 carry_32 = tcg_temp_new_i32();
391 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
393 #if TARGET_LONG_BITS == 64
394 tcg_temp_free_i32(cc_src1_32);
395 tcg_temp_free_i32(cc_src2_32);
401 static TCGv_i32 gen_sub32_carry32(void)
403 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
405 /* Carry is computed from a previous borrow: (src1 < src2) */
406 #if TARGET_LONG_BITS == 64
407 cc_src1_32 = tcg_temp_new_i32();
408 cc_src2_32 = tcg_temp_new_i32();
409 tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_src);
410 tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src2);
412 cc_src1_32 = cpu_cc_src;
413 cc_src2_32 = cpu_cc_src2;
416 carry_32 = tcg_temp_new_i32();
417 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
419 #if TARGET_LONG_BITS == 64
420 tcg_temp_free_i32(cc_src1_32);
421 tcg_temp_free_i32(cc_src2_32);
427 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
428 TCGv src2, int update_cc)
436 /* Carry is known to be zero. Fall back to plain ADD. */
438 gen_op_add_cc(dst, src1, src2);
440 tcg_gen_add_tl(dst, src1, src2);
447 if (TARGET_LONG_BITS == 32) {
448 /* We can re-use the host's hardware carry generation by using
449 an ADD2 opcode. We discard the low part of the output.
450 Ideally we'd combine this operation with the add that
451 generated the carry in the first place. */
452 carry = tcg_temp_new();
453 tcg_gen_add2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
454 tcg_temp_free(carry);
457 carry_32 = gen_add32_carry32();
463 carry_32 = gen_sub32_carry32();
467 /* We need external help to produce the carry. */
468 carry_32 = tcg_temp_new_i32();
469 gen_helper_compute_C_icc(carry_32, cpu_env);
473 #if TARGET_LONG_BITS == 64
474 carry = tcg_temp_new();
475 tcg_gen_extu_i32_i64(carry, carry_32);
480 tcg_gen_add_tl(dst, src1, src2);
481 tcg_gen_add_tl(dst, dst, carry);
483 tcg_temp_free_i32(carry_32);
484 #if TARGET_LONG_BITS == 64
485 tcg_temp_free(carry);
490 tcg_gen_mov_tl(cpu_cc_src, src1);
491 tcg_gen_mov_tl(cpu_cc_src2, src2);
492 tcg_gen_mov_tl(cpu_cc_dst, dst);
493 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
494 dc->cc_op = CC_OP_ADDX;
498 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
500 tcg_gen_mov_tl(cpu_cc_src, src1);
501 tcg_gen_mov_tl(cpu_cc_src2, src2);
502 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
503 tcg_gen_mov_tl(dst, cpu_cc_dst);
506 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
507 TCGv src2, int update_cc)
515 /* Carry is known to be zero. Fall back to plain SUB. */
517 gen_op_sub_cc(dst, src1, src2);
519 tcg_gen_sub_tl(dst, src1, src2);
526 carry_32 = gen_add32_carry32();
532 if (TARGET_LONG_BITS == 32) {
533 /* We can re-use the host's hardware carry generation by using
534 a SUB2 opcode. We discard the low part of the output.
535 Ideally we'd combine this operation with the add that
536 generated the carry in the first place. */
537 carry = tcg_temp_new();
538 tcg_gen_sub2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
539 tcg_temp_free(carry);
542 carry_32 = gen_sub32_carry32();
546 /* We need external help to produce the carry. */
547 carry_32 = tcg_temp_new_i32();
548 gen_helper_compute_C_icc(carry_32, cpu_env);
552 #if TARGET_LONG_BITS == 64
553 carry = tcg_temp_new();
554 tcg_gen_extu_i32_i64(carry, carry_32);
559 tcg_gen_sub_tl(dst, src1, src2);
560 tcg_gen_sub_tl(dst, dst, carry);
562 tcg_temp_free_i32(carry_32);
563 #if TARGET_LONG_BITS == 64
564 tcg_temp_free(carry);
569 tcg_gen_mov_tl(cpu_cc_src, src1);
570 tcg_gen_mov_tl(cpu_cc_src2, src2);
571 tcg_gen_mov_tl(cpu_cc_dst, dst);
572 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
573 dc->cc_op = CC_OP_SUBX;
577 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
579 TCGv r_temp, zero, t0;
581 r_temp = tcg_temp_new();
588 zero = tcg_const_tl(0);
589 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
590 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
591 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
592 tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
597 // env->y = (b2 << 31) | (env->y >> 1);
598 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
599 tcg_gen_shli_tl(r_temp, r_temp, 31);
600 tcg_gen_shri_tl(t0, cpu_y, 1);
601 tcg_gen_andi_tl(t0, t0, 0x7fffffff);
602 tcg_gen_or_tl(t0, t0, r_temp);
603 tcg_gen_andi_tl(cpu_y, t0, 0xffffffff);
606 gen_mov_reg_N(t0, cpu_psr);
607 gen_mov_reg_V(r_temp, cpu_psr);
608 tcg_gen_xor_tl(t0, t0, r_temp);
609 tcg_temp_free(r_temp);
611 // T0 = (b1 << 31) | (T0 >> 1);
613 tcg_gen_shli_tl(t0, t0, 31);
614 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
615 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
618 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
620 tcg_gen_mov_tl(dst, cpu_cc_dst);
623 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
625 #if TARGET_LONG_BITS == 32
627 tcg_gen_muls2_tl(dst, cpu_y, src1, src2);
629 tcg_gen_mulu2_tl(dst, cpu_y, src1, src2);
632 TCGv t0 = tcg_temp_new_i64();
633 TCGv t1 = tcg_temp_new_i64();
636 tcg_gen_ext32s_i64(t0, src1);
637 tcg_gen_ext32s_i64(t1, src2);
639 tcg_gen_ext32u_i64(t0, src1);
640 tcg_gen_ext32u_i64(t1, src2);
643 tcg_gen_mul_i64(dst, t0, t1);
647 tcg_gen_shri_i64(cpu_y, dst, 32);
651 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
653 /* zero-extend truncated operands before multiplication */
654 gen_op_multiply(dst, src1, src2, 0);
657 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
659 /* sign-extend truncated operands before multiplication */
660 gen_op_multiply(dst, src1, src2, 1);
664 static inline void gen_op_eval_ba(TCGv dst)
666 tcg_gen_movi_tl(dst, 1);
670 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
672 gen_mov_reg_Z(dst, src);
676 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
678 TCGv t0 = tcg_temp_new();
679 gen_mov_reg_N(t0, src);
680 gen_mov_reg_V(dst, src);
681 tcg_gen_xor_tl(dst, dst, t0);
682 gen_mov_reg_Z(t0, src);
683 tcg_gen_or_tl(dst, dst, t0);
688 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
690 TCGv t0 = tcg_temp_new();
691 gen_mov_reg_V(t0, src);
692 gen_mov_reg_N(dst, src);
693 tcg_gen_xor_tl(dst, dst, t0);
698 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
700 TCGv t0 = tcg_temp_new();
701 gen_mov_reg_Z(t0, src);
702 gen_mov_reg_C(dst, src);
703 tcg_gen_or_tl(dst, dst, t0);
708 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
710 gen_mov_reg_C(dst, src);
714 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
716 gen_mov_reg_V(dst, src);
720 static inline void gen_op_eval_bn(TCGv dst)
722 tcg_gen_movi_tl(dst, 0);
726 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
728 gen_mov_reg_N(dst, src);
732 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
734 gen_mov_reg_Z(dst, src);
735 tcg_gen_xori_tl(dst, dst, 0x1);
739 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
741 gen_op_eval_ble(dst, src);
742 tcg_gen_xori_tl(dst, dst, 0x1);
746 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
748 gen_op_eval_bl(dst, src);
749 tcg_gen_xori_tl(dst, dst, 0x1);
753 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
755 gen_op_eval_bleu(dst, src);
756 tcg_gen_xori_tl(dst, dst, 0x1);
760 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
762 gen_mov_reg_C(dst, src);
763 tcg_gen_xori_tl(dst, dst, 0x1);
767 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
769 gen_mov_reg_N(dst, src);
770 tcg_gen_xori_tl(dst, dst, 0x1);
774 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
776 gen_mov_reg_V(dst, src);
777 tcg_gen_xori_tl(dst, dst, 0x1);
781 FPSR bit field FCC1 | FCC0:
787 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
788 unsigned int fcc_offset)
790 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
791 tcg_gen_andi_tl(reg, reg, 0x1);
794 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
795 unsigned int fcc_offset)
797 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
798 tcg_gen_andi_tl(reg, reg, 0x1);
802 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
803 unsigned int fcc_offset)
805 TCGv t0 = tcg_temp_new();
806 gen_mov_reg_FCC0(dst, src, fcc_offset);
807 gen_mov_reg_FCC1(t0, src, fcc_offset);
808 tcg_gen_or_tl(dst, dst, t0);
812 // 1 or 2: FCC0 ^ FCC1
813 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
814 unsigned int fcc_offset)
816 TCGv t0 = tcg_temp_new();
817 gen_mov_reg_FCC0(dst, src, fcc_offset);
818 gen_mov_reg_FCC1(t0, src, fcc_offset);
819 tcg_gen_xor_tl(dst, dst, t0);
824 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
825 unsigned int fcc_offset)
827 gen_mov_reg_FCC0(dst, src, fcc_offset);
831 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
832 unsigned int fcc_offset)
834 TCGv t0 = tcg_temp_new();
835 gen_mov_reg_FCC0(dst, src, fcc_offset);
836 gen_mov_reg_FCC1(t0, src, fcc_offset);
837 tcg_gen_andc_tl(dst, dst, t0);
842 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
843 unsigned int fcc_offset)
845 gen_mov_reg_FCC1(dst, src, fcc_offset);
849 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
850 unsigned int fcc_offset)
852 TCGv t0 = tcg_temp_new();
853 gen_mov_reg_FCC0(dst, src, fcc_offset);
854 gen_mov_reg_FCC1(t0, src, fcc_offset);
855 tcg_gen_andc_tl(dst, t0, dst);
860 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
861 unsigned int fcc_offset)
863 TCGv t0 = tcg_temp_new();
864 gen_mov_reg_FCC0(dst, src, fcc_offset);
865 gen_mov_reg_FCC1(t0, src, fcc_offset);
866 tcg_gen_and_tl(dst, dst, t0);
871 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
872 unsigned int fcc_offset)
874 TCGv t0 = tcg_temp_new();
875 gen_mov_reg_FCC0(dst, src, fcc_offset);
876 gen_mov_reg_FCC1(t0, src, fcc_offset);
877 tcg_gen_or_tl(dst, dst, t0);
878 tcg_gen_xori_tl(dst, dst, 0x1);
882 // 0 or 3: !(FCC0 ^ FCC1)
883 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
884 unsigned int fcc_offset)
886 TCGv t0 = tcg_temp_new();
887 gen_mov_reg_FCC0(dst, src, fcc_offset);
888 gen_mov_reg_FCC1(t0, src, fcc_offset);
889 tcg_gen_xor_tl(dst, dst, t0);
890 tcg_gen_xori_tl(dst, dst, 0x1);
895 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
896 unsigned int fcc_offset)
898 gen_mov_reg_FCC0(dst, src, fcc_offset);
899 tcg_gen_xori_tl(dst, dst, 0x1);
902 // !1: !(FCC0 & !FCC1)
903 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
904 unsigned int fcc_offset)
906 TCGv t0 = tcg_temp_new();
907 gen_mov_reg_FCC0(dst, src, fcc_offset);
908 gen_mov_reg_FCC1(t0, src, fcc_offset);
909 tcg_gen_andc_tl(dst, dst, t0);
910 tcg_gen_xori_tl(dst, dst, 0x1);
915 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
916 unsigned int fcc_offset)
918 gen_mov_reg_FCC1(dst, src, fcc_offset);
919 tcg_gen_xori_tl(dst, dst, 0x1);
922 // !2: !(!FCC0 & FCC1)
923 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
924 unsigned int fcc_offset)
926 TCGv t0 = tcg_temp_new();
927 gen_mov_reg_FCC0(dst, src, fcc_offset);
928 gen_mov_reg_FCC1(t0, src, fcc_offset);
929 tcg_gen_andc_tl(dst, t0, dst);
930 tcg_gen_xori_tl(dst, dst, 0x1);
934 // !3: !(FCC0 & FCC1)
935 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
936 unsigned int fcc_offset)
938 TCGv t0 = tcg_temp_new();
939 gen_mov_reg_FCC0(dst, src, fcc_offset);
940 gen_mov_reg_FCC1(t0, src, fcc_offset);
941 tcg_gen_and_tl(dst, dst, t0);
942 tcg_gen_xori_tl(dst, dst, 0x1);
946 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
947 target_ulong pc2, TCGv r_cond)
949 TCGLabel *l1 = gen_new_label();
951 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
953 gen_goto_tb(dc, 0, pc1, pc1 + 4);
956 gen_goto_tb(dc, 1, pc2, pc2 + 4);
959 static void gen_branch_a(DisasContext *dc, target_ulong pc1)
961 TCGLabel *l1 = gen_new_label();
962 target_ulong npc = dc->npc;
964 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cond, 0, l1);
966 gen_goto_tb(dc, 0, npc, pc1);
969 gen_goto_tb(dc, 1, npc + 4, npc + 8);
974 static void gen_branch_n(DisasContext *dc, target_ulong pc1)
976 target_ulong npc = dc->npc;
978 if (likely(npc != DYNAMIC_PC)) {
980 dc->jump_pc[0] = pc1;
981 dc->jump_pc[1] = npc + 4;
986 tcg_gen_mov_tl(cpu_pc, cpu_npc);
988 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
989 t = tcg_const_tl(pc1);
991 tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, z, t, cpu_npc);
999 static inline void gen_generic_branch(DisasContext *dc)
1001 TCGv npc0 = tcg_const_tl(dc->jump_pc[0]);
1002 TCGv npc1 = tcg_const_tl(dc->jump_pc[1]);
1003 TCGv zero = tcg_const_tl(0);
1005 tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
1007 tcg_temp_free(npc0);
1008 tcg_temp_free(npc1);
1009 tcg_temp_free(zero);
1012 /* call this function before using the condition register as it may
1013 have been set for a jump */
1014 static inline void flush_cond(DisasContext *dc)
1016 if (dc->npc == JUMP_PC) {
1017 gen_generic_branch(dc);
1018 dc->npc = DYNAMIC_PC;
1022 static inline void save_npc(DisasContext *dc)
1024 if (dc->npc == JUMP_PC) {
1025 gen_generic_branch(dc);
1026 dc->npc = DYNAMIC_PC;
1027 } else if (dc->npc != DYNAMIC_PC) {
1028 tcg_gen_movi_tl(cpu_npc, dc->npc);
1032 static inline void update_psr(DisasContext *dc)
1034 if (dc->cc_op != CC_OP_FLAGS) {
1035 dc->cc_op = CC_OP_FLAGS;
1036 gen_helper_compute_psr(cpu_env);
1040 static inline void save_state(DisasContext *dc)
1042 tcg_gen_movi_tl(cpu_pc, dc->pc);
1046 static void gen_exception(DisasContext *dc, int which)
1051 t = tcg_const_i32(which);
1052 gen_helper_raise_exception(cpu_env, t);
1053 tcg_temp_free_i32(t);
1057 static inline void gen_mov_pc_npc(DisasContext *dc)
1059 if (dc->npc == JUMP_PC) {
1060 gen_generic_branch(dc);
1061 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1062 dc->pc = DYNAMIC_PC;
1063 } else if (dc->npc == DYNAMIC_PC) {
1064 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1065 dc->pc = DYNAMIC_PC;
1071 static inline void gen_op_next_insn(void)
1073 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1074 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1077 static void free_compare(DisasCompare *cmp)
1080 tcg_temp_free(cmp->c1);
1083 tcg_temp_free(cmp->c2);
1087 static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1090 static int subcc_cond[16] = {
1106 -1, /* no overflow */
1109 static int logic_cond[16] = {
1111 TCG_COND_EQ, /* eq: Z */
1112 TCG_COND_LE, /* le: Z | (N ^ V) -> Z | N */
1113 TCG_COND_LT, /* lt: N ^ V -> N */
1114 TCG_COND_EQ, /* leu: C | Z -> Z */
1115 TCG_COND_NEVER, /* ltu: C -> 0 */
1116 TCG_COND_LT, /* neg: N */
1117 TCG_COND_NEVER, /* vs: V -> 0 */
1119 TCG_COND_NE, /* ne: !Z */
1120 TCG_COND_GT, /* gt: !(Z | (N ^ V)) -> !(Z | N) */
1121 TCG_COND_GE, /* ge: !(N ^ V) -> !N */
1122 TCG_COND_NE, /* gtu: !(C | Z) -> !Z */
1123 TCG_COND_ALWAYS, /* geu: !C -> 1 */
1124 TCG_COND_GE, /* pos: !N */
1125 TCG_COND_ALWAYS, /* vc: !V -> 1 */
1131 #ifdef TARGET_SPARC64
1141 switch (dc->cc_op) {
1143 cmp->cond = logic_cond[cond];
1145 cmp->is_bool = false;
1147 cmp->c2 = tcg_const_tl(0);
1148 #ifdef TARGET_SPARC64
1151 cmp->c1 = tcg_temp_new();
1152 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1157 cmp->c1 = cpu_cc_dst;
1164 cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1165 goto do_compare_dst_0;
1167 case 7: /* overflow */
1168 case 15: /* !overflow */
1172 cmp->cond = subcc_cond[cond];
1173 cmp->is_bool = false;
1174 #ifdef TARGET_SPARC64
1176 /* Note that sign-extension works for unsigned compares as
1177 long as both operands are sign-extended. */
1178 cmp->g1 = cmp->g2 = false;
1179 cmp->c1 = tcg_temp_new();
1180 cmp->c2 = tcg_temp_new();
1181 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1182 tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1186 cmp->g1 = cmp->g2 = true;
1187 cmp->c1 = cpu_cc_src;
1188 cmp->c2 = cpu_cc_src2;
1195 gen_helper_compute_psr(cpu_env);
1196 dc->cc_op = CC_OP_FLAGS;
1200 /* We're going to generate a boolean result. */
1201 cmp->cond = TCG_COND_NE;
1202 cmp->is_bool = true;
1203 cmp->g1 = cmp->g2 = false;
1204 cmp->c1 = r_dst = tcg_temp_new();
1205 cmp->c2 = tcg_const_tl(0);
1209 gen_op_eval_bn(r_dst);
1212 gen_op_eval_be(r_dst, r_src);
1215 gen_op_eval_ble(r_dst, r_src);
1218 gen_op_eval_bl(r_dst, r_src);
1221 gen_op_eval_bleu(r_dst, r_src);
1224 gen_op_eval_bcs(r_dst, r_src);
1227 gen_op_eval_bneg(r_dst, r_src);
1230 gen_op_eval_bvs(r_dst, r_src);
1233 gen_op_eval_ba(r_dst);
1236 gen_op_eval_bne(r_dst, r_src);
1239 gen_op_eval_bg(r_dst, r_src);
1242 gen_op_eval_bge(r_dst, r_src);
1245 gen_op_eval_bgu(r_dst, r_src);
1248 gen_op_eval_bcc(r_dst, r_src);
1251 gen_op_eval_bpos(r_dst, r_src);
1254 gen_op_eval_bvc(r_dst, r_src);
1261 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1263 unsigned int offset;
1266 /* For now we still generate a straight boolean result. */
1267 cmp->cond = TCG_COND_NE;
1268 cmp->is_bool = true;
1269 cmp->g1 = cmp->g2 = false;
1270 cmp->c1 = r_dst = tcg_temp_new();
1271 cmp->c2 = tcg_const_tl(0);
1291 gen_op_eval_bn(r_dst);
1294 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1297 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1300 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1303 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1306 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1309 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1312 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1315 gen_op_eval_ba(r_dst);
1318 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1321 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1324 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1327 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1330 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1333 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1336 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1341 static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1345 gen_compare(&cmp, cc, cond, dc);
1347 /* The interface is to return a boolean in r_dst. */
1349 tcg_gen_mov_tl(r_dst, cmp.c1);
1351 tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1357 static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1360 gen_fcompare(&cmp, cc, cond);
1362 /* The interface is to return a boolean in r_dst. */
1364 tcg_gen_mov_tl(r_dst, cmp.c1);
1366 tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1372 #ifdef TARGET_SPARC64
1374 static const int gen_tcg_cond_reg[8] = {
1385 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1387 cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1388 cmp->is_bool = false;
1392 cmp->c2 = tcg_const_tl(0);
1395 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1398 gen_compare_reg(&cmp, cond, r_src);
1400 /* The interface is to return a boolean in r_dst. */
1401 tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1407 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1409 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1410 target_ulong target = dc->pc + offset;
1412 #ifdef TARGET_SPARC64
1413 if (unlikely(AM_CHECK(dc))) {
1414 target &= 0xffffffffULL;
1418 /* unconditional not taken */
1420 dc->pc = dc->npc + 4;
1421 dc->npc = dc->pc + 4;
1424 dc->npc = dc->pc + 4;
1426 } else if (cond == 0x8) {
1427 /* unconditional taken */
1430 dc->npc = dc->pc + 4;
1434 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1438 gen_cond(cpu_cond, cc, cond, dc);
1440 gen_branch_a(dc, target);
1442 gen_branch_n(dc, target);
1447 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1449 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1450 target_ulong target = dc->pc + offset;
1452 #ifdef TARGET_SPARC64
1453 if (unlikely(AM_CHECK(dc))) {
1454 target &= 0xffffffffULL;
1458 /* unconditional not taken */
1460 dc->pc = dc->npc + 4;
1461 dc->npc = dc->pc + 4;
1464 dc->npc = dc->pc + 4;
1466 } else if (cond == 0x8) {
1467 /* unconditional taken */
1470 dc->npc = dc->pc + 4;
1474 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1478 gen_fcond(cpu_cond, cc, cond);
1480 gen_branch_a(dc, target);
1482 gen_branch_n(dc, target);
1487 #ifdef TARGET_SPARC64
1488 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1491 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1492 target_ulong target = dc->pc + offset;
1494 if (unlikely(AM_CHECK(dc))) {
1495 target &= 0xffffffffULL;
1498 gen_cond_reg(cpu_cond, cond, r_reg);
1500 gen_branch_a(dc, target);
1502 gen_branch_n(dc, target);
1506 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1510 gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1513 gen_helper_fcmps_fcc1(cpu_env, r_rs1, r_rs2);
1516 gen_helper_fcmps_fcc2(cpu_env, r_rs1, r_rs2);
1519 gen_helper_fcmps_fcc3(cpu_env, r_rs1, r_rs2);
1524 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1528 gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1531 gen_helper_fcmpd_fcc1(cpu_env, r_rs1, r_rs2);
1534 gen_helper_fcmpd_fcc2(cpu_env, r_rs1, r_rs2);
1537 gen_helper_fcmpd_fcc3(cpu_env, r_rs1, r_rs2);
1542 static inline void gen_op_fcmpq(int fccno)
1546 gen_helper_fcmpq(cpu_env);
1549 gen_helper_fcmpq_fcc1(cpu_env);
1552 gen_helper_fcmpq_fcc2(cpu_env);
1555 gen_helper_fcmpq_fcc3(cpu_env);
1560 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1564 gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1567 gen_helper_fcmpes_fcc1(cpu_env, r_rs1, r_rs2);
1570 gen_helper_fcmpes_fcc2(cpu_env, r_rs1, r_rs2);
1573 gen_helper_fcmpes_fcc3(cpu_env, r_rs1, r_rs2);
1578 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1582 gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1585 gen_helper_fcmped_fcc1(cpu_env, r_rs1, r_rs2);
1588 gen_helper_fcmped_fcc2(cpu_env, r_rs1, r_rs2);
1591 gen_helper_fcmped_fcc3(cpu_env, r_rs1, r_rs2);
1596 static inline void gen_op_fcmpeq(int fccno)
1600 gen_helper_fcmpeq(cpu_env);
1603 gen_helper_fcmpeq_fcc1(cpu_env);
1606 gen_helper_fcmpeq_fcc2(cpu_env);
1609 gen_helper_fcmpeq_fcc3(cpu_env);
1616 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1618 gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1621 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1623 gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1626 static inline void gen_op_fcmpq(int fccno)
1628 gen_helper_fcmpq(cpu_env);
1631 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1633 gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1636 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1638 gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1641 static inline void gen_op_fcmpeq(int fccno)
1643 gen_helper_fcmpeq(cpu_env);
1647 static void gen_op_fpexception_im(DisasContext *dc, int fsr_flags)
1649 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1650 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1651 gen_exception(dc, TT_FP_EXCP);
1654 static int gen_trap_ifnofpu(DisasContext *dc)
1656 #if !defined(CONFIG_USER_ONLY)
1657 if (!dc->fpu_enabled) {
1658 gen_exception(dc, TT_NFPU_INSN);
1665 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1667 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1670 static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1671 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1675 src = gen_load_fpr_F(dc, rs);
1676 dst = gen_dest_fpr_F(dc);
1678 gen(dst, cpu_env, src);
1680 gen_store_fpr_F(dc, rd, dst);
1683 static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1684 void (*gen)(TCGv_i32, TCGv_i32))
1688 src = gen_load_fpr_F(dc, rs);
1689 dst = gen_dest_fpr_F(dc);
1693 gen_store_fpr_F(dc, rd, dst);
1696 static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1697 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1699 TCGv_i32 dst, src1, src2;
1701 src1 = gen_load_fpr_F(dc, rs1);
1702 src2 = gen_load_fpr_F(dc, rs2);
1703 dst = gen_dest_fpr_F(dc);
1705 gen(dst, cpu_env, src1, src2);
1707 gen_store_fpr_F(dc, rd, dst);
1710 #ifdef TARGET_SPARC64
1711 static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1712 void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1714 TCGv_i32 dst, src1, src2;
1716 src1 = gen_load_fpr_F(dc, rs1);
1717 src2 = gen_load_fpr_F(dc, rs2);
1718 dst = gen_dest_fpr_F(dc);
1720 gen(dst, src1, src2);
1722 gen_store_fpr_F(dc, rd, dst);
1726 static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1727 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1731 src = gen_load_fpr_D(dc, rs);
1732 dst = gen_dest_fpr_D(dc, rd);
1734 gen(dst, cpu_env, src);
1736 gen_store_fpr_D(dc, rd, dst);
1739 #ifdef TARGET_SPARC64
1740 static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1741 void (*gen)(TCGv_i64, TCGv_i64))
1745 src = gen_load_fpr_D(dc, rs);
1746 dst = gen_dest_fpr_D(dc, rd);
1750 gen_store_fpr_D(dc, rd, dst);
1754 static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1755 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1757 TCGv_i64 dst, src1, src2;
1759 src1 = gen_load_fpr_D(dc, rs1);
1760 src2 = gen_load_fpr_D(dc, rs2);
1761 dst = gen_dest_fpr_D(dc, rd);
1763 gen(dst, cpu_env, src1, src2);
1765 gen_store_fpr_D(dc, rd, dst);
1768 #ifdef TARGET_SPARC64
1769 static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1770 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1772 TCGv_i64 dst, src1, src2;
1774 src1 = gen_load_fpr_D(dc, rs1);
1775 src2 = gen_load_fpr_D(dc, rs2);
1776 dst = gen_dest_fpr_D(dc, rd);
1778 gen(dst, src1, src2);
1780 gen_store_fpr_D(dc, rd, dst);
1783 static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1784 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1786 TCGv_i64 dst, src1, src2;
1788 src1 = gen_load_fpr_D(dc, rs1);
1789 src2 = gen_load_fpr_D(dc, rs2);
1790 dst = gen_dest_fpr_D(dc, rd);
1792 gen(dst, cpu_gsr, src1, src2);
1794 gen_store_fpr_D(dc, rd, dst);
1797 static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1798 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1800 TCGv_i64 dst, src0, src1, src2;
1802 src1 = gen_load_fpr_D(dc, rs1);
1803 src2 = gen_load_fpr_D(dc, rs2);
1804 src0 = gen_load_fpr_D(dc, rd);
1805 dst = gen_dest_fpr_D(dc, rd);
1807 gen(dst, src0, src1, src2);
1809 gen_store_fpr_D(dc, rd, dst);
1813 static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1814 void (*gen)(TCGv_ptr))
1816 gen_op_load_fpr_QT1(QFPREG(rs));
1820 gen_op_store_QT0_fpr(QFPREG(rd));
1821 gen_update_fprs_dirty(QFPREG(rd));
1824 #ifdef TARGET_SPARC64
1825 static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1826 void (*gen)(TCGv_ptr))
1828 gen_op_load_fpr_QT1(QFPREG(rs));
1832 gen_op_store_QT0_fpr(QFPREG(rd));
1833 gen_update_fprs_dirty(QFPREG(rd));
1837 static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1838 void (*gen)(TCGv_ptr))
1840 gen_op_load_fpr_QT0(QFPREG(rs1));
1841 gen_op_load_fpr_QT1(QFPREG(rs2));
1845 gen_op_store_QT0_fpr(QFPREG(rd));
1846 gen_update_fprs_dirty(QFPREG(rd));
1849 static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1850 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1853 TCGv_i32 src1, src2;
1855 src1 = gen_load_fpr_F(dc, rs1);
1856 src2 = gen_load_fpr_F(dc, rs2);
1857 dst = gen_dest_fpr_D(dc, rd);
1859 gen(dst, cpu_env, src1, src2);
1861 gen_store_fpr_D(dc, rd, dst);
1864 static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1865 void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1867 TCGv_i64 src1, src2;
1869 src1 = gen_load_fpr_D(dc, rs1);
1870 src2 = gen_load_fpr_D(dc, rs2);
1872 gen(cpu_env, src1, src2);
1874 gen_op_store_QT0_fpr(QFPREG(rd));
1875 gen_update_fprs_dirty(QFPREG(rd));
1878 #ifdef TARGET_SPARC64
1879 static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1880 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1885 src = gen_load_fpr_F(dc, rs);
1886 dst = gen_dest_fpr_D(dc, rd);
1888 gen(dst, cpu_env, src);
1890 gen_store_fpr_D(dc, rd, dst);
1894 static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1895 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1900 src = gen_load_fpr_F(dc, rs);
1901 dst = gen_dest_fpr_D(dc, rd);
1903 gen(dst, cpu_env, src);
1905 gen_store_fpr_D(dc, rd, dst);
1908 static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1909 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1914 src = gen_load_fpr_D(dc, rs);
1915 dst = gen_dest_fpr_F(dc);
1917 gen(dst, cpu_env, src);
1919 gen_store_fpr_F(dc, rd, dst);
1922 static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1923 void (*gen)(TCGv_i32, TCGv_ptr))
1927 gen_op_load_fpr_QT1(QFPREG(rs));
1928 dst = gen_dest_fpr_F(dc);
1932 gen_store_fpr_F(dc, rd, dst);
1935 static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1936 void (*gen)(TCGv_i64, TCGv_ptr))
1940 gen_op_load_fpr_QT1(QFPREG(rs));
1941 dst = gen_dest_fpr_D(dc, rd);
1945 gen_store_fpr_D(dc, rd, dst);
1948 static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1949 void (*gen)(TCGv_ptr, TCGv_i32))
1953 src = gen_load_fpr_F(dc, rs);
1957 gen_op_store_QT0_fpr(QFPREG(rd));
1958 gen_update_fprs_dirty(QFPREG(rd));
1961 static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1962 void (*gen)(TCGv_ptr, TCGv_i64))
1966 src = gen_load_fpr_D(dc, rs);
1970 gen_op_store_QT0_fpr(QFPREG(rd));
1971 gen_update_fprs_dirty(QFPREG(rd));
1975 #ifdef TARGET_SPARC64
1976 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1982 r_asi = tcg_temp_new_i32();
1983 tcg_gen_mov_i32(r_asi, cpu_asi);
1985 asi = GET_FIELD(insn, 19, 26);
1986 r_asi = tcg_const_i32(asi);
1991 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1994 TCGv_i32 r_asi, r_size, r_sign;
1996 r_asi = gen_get_asi(insn, addr);
1997 r_size = tcg_const_i32(size);
1998 r_sign = tcg_const_i32(sign);
1999 gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_size, r_sign);
2000 tcg_temp_free_i32(r_sign);
2001 tcg_temp_free_i32(r_size);
2002 tcg_temp_free_i32(r_asi);
2005 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2007 TCGv_i32 r_asi, r_size;
2009 r_asi = gen_get_asi(insn, addr);
2010 r_size = tcg_const_i32(size);
2011 gen_helper_st_asi(cpu_env, addr, src, r_asi, r_size);
2012 tcg_temp_free_i32(r_size);
2013 tcg_temp_free_i32(r_asi);
2016 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
2018 TCGv_i32 r_asi, r_size, r_rd;
2020 r_asi = gen_get_asi(insn, addr);
2021 r_size = tcg_const_i32(size);
2022 r_rd = tcg_const_i32(rd);
2023 gen_helper_ldf_asi(cpu_env, addr, r_asi, r_size, r_rd);
2024 tcg_temp_free_i32(r_rd);
2025 tcg_temp_free_i32(r_size);
2026 tcg_temp_free_i32(r_asi);
2029 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
2031 TCGv_i32 r_asi, r_size, r_rd;
2033 r_asi = gen_get_asi(insn, addr);
2034 r_size = tcg_const_i32(size);
2035 r_rd = tcg_const_i32(rd);
2036 gen_helper_stf_asi(cpu_env, addr, r_asi, r_size, r_rd);
2037 tcg_temp_free_i32(r_rd);
2038 tcg_temp_free_i32(r_size);
2039 tcg_temp_free_i32(r_asi);
2042 static inline void gen_swap_asi(TCGv dst, TCGv src, TCGv addr, int insn)
2044 TCGv_i32 r_asi, r_size, r_sign;
2045 TCGv_i64 t64 = tcg_temp_new_i64();
2047 r_asi = gen_get_asi(insn, addr);
2048 r_size = tcg_const_i32(4);
2049 r_sign = tcg_const_i32(0);
2050 gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2051 tcg_temp_free_i32(r_sign);
2052 gen_helper_st_asi(cpu_env, addr, src, r_asi, r_size);
2053 tcg_temp_free_i32(r_size);
2054 tcg_temp_free_i32(r_asi);
2055 tcg_gen_trunc_i64_tl(dst, t64);
2056 tcg_temp_free_i64(t64);
2059 static inline void gen_ldda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2062 TCGv_i32 r_asi, r_rd;
2064 r_asi = gen_get_asi(insn, addr);
2065 r_rd = tcg_const_i32(rd);
2066 gen_helper_ldda_asi(cpu_env, addr, r_asi, r_rd);
2067 tcg_temp_free_i32(r_rd);
2068 tcg_temp_free_i32(r_asi);
2071 static inline void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2074 TCGv_i32 r_asi, r_size;
2075 TCGv lo = gen_load_gpr(dc, rd + 1);
2076 TCGv_i64 t64 = tcg_temp_new_i64();
2078 tcg_gen_concat_tl_i64(t64, lo, hi);
2079 r_asi = gen_get_asi(insn, addr);
2080 r_size = tcg_const_i32(8);
2081 gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_size);
2082 tcg_temp_free_i32(r_size);
2083 tcg_temp_free_i32(r_asi);
2084 tcg_temp_free_i64(t64);
2087 static inline void gen_casx_asi(DisasContext *dc, TCGv addr,
2088 TCGv val2, int insn, int rd)
2090 TCGv val1 = gen_load_gpr(dc, rd);
2091 TCGv dst = gen_dest_gpr(dc, rd);
2092 TCGv_i32 r_asi = gen_get_asi(insn, addr);
2094 gen_helper_casx_asi(dst, cpu_env, addr, val1, val2, r_asi);
2095 tcg_temp_free_i32(r_asi);
2096 gen_store_gpr(dc, rd, dst);
2099 #elif !defined(CONFIG_USER_ONLY)
2101 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
2104 TCGv_i32 r_asi, r_size, r_sign;
2105 TCGv_i64 t64 = tcg_temp_new_i64();
2107 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2108 r_size = tcg_const_i32(size);
2109 r_sign = tcg_const_i32(sign);
2110 gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2111 tcg_temp_free_i32(r_sign);
2112 tcg_temp_free_i32(r_size);
2113 tcg_temp_free_i32(r_asi);
2114 tcg_gen_trunc_i64_tl(dst, t64);
2115 tcg_temp_free_i64(t64);
2118 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2120 TCGv_i32 r_asi, r_size;
2121 TCGv_i64 t64 = tcg_temp_new_i64();
2123 tcg_gen_extu_tl_i64(t64, src);
2124 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2125 r_size = tcg_const_i32(size);
2126 gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_size);
2127 tcg_temp_free_i32(r_size);
2128 tcg_temp_free_i32(r_asi);
2129 tcg_temp_free_i64(t64);
2132 static inline void gen_swap_asi(TCGv dst, TCGv src, TCGv addr, int insn)
2134 TCGv_i32 r_asi, r_size, r_sign;
2135 TCGv_i64 r_val, t64;
2137 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2138 r_size = tcg_const_i32(4);
2139 r_sign = tcg_const_i32(0);
2140 t64 = tcg_temp_new_i64();
2141 gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2142 tcg_temp_free(r_sign);
2143 r_val = tcg_temp_new_i64();
2144 tcg_gen_extu_tl_i64(r_val, src);
2145 gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2146 tcg_temp_free_i64(r_val);
2147 tcg_temp_free_i32(r_size);
2148 tcg_temp_free_i32(r_asi);
2149 tcg_gen_trunc_i64_tl(dst, t64);
2150 tcg_temp_free_i64(t64);
2153 static inline void gen_ldda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2156 TCGv_i32 r_asi, r_size, r_sign;
2160 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2161 r_size = tcg_const_i32(8);
2162 r_sign = tcg_const_i32(0);
2163 t64 = tcg_temp_new_i64();
2164 gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2165 tcg_temp_free_i32(r_sign);
2166 tcg_temp_free_i32(r_size);
2167 tcg_temp_free_i32(r_asi);
2169 /* ??? Work around an apparent bug in Ubuntu gcc 4.8.2-10ubuntu2+12,
2170 whereby "rd + 1" elicits "error: array subscript is above array".
2171 Since we have already asserted that rd is even, the semantics
2173 t = gen_dest_gpr(dc, rd | 1);
2174 tcg_gen_trunc_i64_tl(t, t64);
2175 gen_store_gpr(dc, rd | 1, t);
2177 tcg_gen_shri_i64(t64, t64, 32);
2178 tcg_gen_trunc_i64_tl(hi, t64);
2179 tcg_temp_free_i64(t64);
2180 gen_store_gpr(dc, rd, hi);
2183 static inline void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2186 TCGv_i32 r_asi, r_size;
2187 TCGv lo = gen_load_gpr(dc, rd + 1);
2188 TCGv_i64 t64 = tcg_temp_new_i64();
2190 tcg_gen_concat_tl_i64(t64, lo, hi);
2191 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2192 r_size = tcg_const_i32(8);
2193 gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_size);
2194 tcg_temp_free_i32(r_size);
2195 tcg_temp_free_i32(r_asi);
2196 tcg_temp_free_i64(t64);
2200 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2201 static inline void gen_cas_asi(DisasContext *dc, TCGv addr,
2202 TCGv val2, int insn, int rd)
2204 TCGv val1 = gen_load_gpr(dc, rd);
2205 TCGv dst = gen_dest_gpr(dc, rd);
2206 #ifdef TARGET_SPARC64
2207 TCGv_i32 r_asi = gen_get_asi(insn, addr);
2209 TCGv_i32 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2212 gen_helper_cas_asi(dst, cpu_env, addr, val1, val2, r_asi);
2213 tcg_temp_free_i32(r_asi);
2214 gen_store_gpr(dc, rd, dst);
2217 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
2220 TCGv_i32 r_asi, r_size;
2222 gen_ld_asi(dst, addr, insn, 1, 0);
2224 r_val = tcg_const_i64(0xffULL);
2225 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2226 r_size = tcg_const_i32(1);
2227 gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2228 tcg_temp_free_i32(r_size);
2229 tcg_temp_free_i32(r_asi);
2230 tcg_temp_free_i64(r_val);
2234 static TCGv get_src1(DisasContext *dc, unsigned int insn)
2236 unsigned int rs1 = GET_FIELD(insn, 13, 17);
2237 return gen_load_gpr(dc, rs1);
2240 static TCGv get_src2(DisasContext *dc, unsigned int insn)
2242 if (IS_IMM) { /* immediate */
2243 target_long simm = GET_FIELDs(insn, 19, 31);
2244 TCGv t = get_temp_tl(dc);
2245 tcg_gen_movi_tl(t, simm);
2247 } else { /* register */
2248 unsigned int rs2 = GET_FIELD(insn, 27, 31);
2249 return gen_load_gpr(dc, rs2);
2253 #ifdef TARGET_SPARC64
2254 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2256 TCGv_i32 c32, zero, dst, s1, s2;
2258 /* We have two choices here: extend the 32 bit data and use movcond_i64,
2259 or fold the comparison down to 32 bits and use movcond_i32. Choose
2261 c32 = tcg_temp_new_i32();
2263 tcg_gen_extrl_i64_i32(c32, cmp->c1);
2265 TCGv_i64 c64 = tcg_temp_new_i64();
2266 tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2267 tcg_gen_extrl_i64_i32(c32, c64);
2268 tcg_temp_free_i64(c64);
2271 s1 = gen_load_fpr_F(dc, rs);
2272 s2 = gen_load_fpr_F(dc, rd);
2273 dst = gen_dest_fpr_F(dc);
2274 zero = tcg_const_i32(0);
2276 tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2278 tcg_temp_free_i32(c32);
2279 tcg_temp_free_i32(zero);
2280 gen_store_fpr_F(dc, rd, dst);
2283 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2285 TCGv_i64 dst = gen_dest_fpr_D(dc, rd);
2286 tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2287 gen_load_fpr_D(dc, rs),
2288 gen_load_fpr_D(dc, rd));
2289 gen_store_fpr_D(dc, rd, dst);
2292 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2294 int qd = QFPREG(rd);
2295 int qs = QFPREG(rs);
2297 tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2298 cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2299 tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2300 cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2302 gen_update_fprs_dirty(qd);
2305 #ifndef CONFIG_USER_ONLY
2306 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_env cpu_env)
2308 TCGv_i32 r_tl = tcg_temp_new_i32();
2310 /* load env->tl into r_tl */
2311 tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2313 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2314 tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2316 /* calculate offset to current trap state from env->ts, reuse r_tl */
2317 tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2318 tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2320 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2322 TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2323 tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2324 tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2325 tcg_temp_free_ptr(r_tl_tmp);
2328 tcg_temp_free_i32(r_tl);
2332 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2333 int width, bool cc, bool left)
2335 TCGv lo1, lo2, t1, t2;
2336 uint64_t amask, tabl, tabr;
2337 int shift, imask, omask;
2340 tcg_gen_mov_tl(cpu_cc_src, s1);
2341 tcg_gen_mov_tl(cpu_cc_src2, s2);
2342 tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2343 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2344 dc->cc_op = CC_OP_SUB;
2347 /* Theory of operation: there are two tables, left and right (not to
2348 be confused with the left and right versions of the opcode). These
2349 are indexed by the low 3 bits of the inputs. To make things "easy",
2350 these tables are loaded into two constants, TABL and TABR below.
2351 The operation index = (input & imask) << shift calculates the index
2352 into the constant, while val = (table >> index) & omask calculates
2353 the value we're looking for. */
2360 tabl = 0x80c0e0f0f8fcfeffULL;
2361 tabr = 0xff7f3f1f0f070301ULL;
2363 tabl = 0x0103070f1f3f7fffULL;
2364 tabr = 0xfffefcf8f0e0c080ULL;
2384 tabl = (2 << 2) | 3;
2385 tabr = (3 << 2) | 1;
2387 tabl = (1 << 2) | 3;
2388 tabr = (3 << 2) | 2;
2395 lo1 = tcg_temp_new();
2396 lo2 = tcg_temp_new();
2397 tcg_gen_andi_tl(lo1, s1, imask);
2398 tcg_gen_andi_tl(lo2, s2, imask);
2399 tcg_gen_shli_tl(lo1, lo1, shift);
2400 tcg_gen_shli_tl(lo2, lo2, shift);
2402 t1 = tcg_const_tl(tabl);
2403 t2 = tcg_const_tl(tabr);
2404 tcg_gen_shr_tl(lo1, t1, lo1);
2405 tcg_gen_shr_tl(lo2, t2, lo2);
2406 tcg_gen_andi_tl(dst, lo1, omask);
2407 tcg_gen_andi_tl(lo2, lo2, omask);
2411 amask &= 0xffffffffULL;
2413 tcg_gen_andi_tl(s1, s1, amask);
2414 tcg_gen_andi_tl(s2, s2, amask);
2416 /* We want to compute
2417 dst = (s1 == s2 ? lo1 : lo1 & lo2).
2418 We've already done dst = lo1, so this reduces to
2419 dst &= (s1 == s2 ? -1 : lo2)
2424 tcg_gen_setcond_tl(TCG_COND_EQ, t1, s1, s2);
2425 tcg_gen_neg_tl(t1, t1);
2426 tcg_gen_or_tl(lo2, lo2, t1);
2427 tcg_gen_and_tl(dst, dst, lo2);
2435 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2437 TCGv tmp = tcg_temp_new();
2439 tcg_gen_add_tl(tmp, s1, s2);
2440 tcg_gen_andi_tl(dst, tmp, -8);
2442 tcg_gen_neg_tl(tmp, tmp);
2444 tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2449 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2453 t1 = tcg_temp_new();
2454 t2 = tcg_temp_new();
2455 shift = tcg_temp_new();
2457 tcg_gen_andi_tl(shift, gsr, 7);
2458 tcg_gen_shli_tl(shift, shift, 3);
2459 tcg_gen_shl_tl(t1, s1, shift);
2461 /* A shift of 64 does not produce 0 in TCG. Divide this into a
2462 shift of (up to 63) followed by a constant shift of 1. */
2463 tcg_gen_xori_tl(shift, shift, 63);
2464 tcg_gen_shr_tl(t2, s2, shift);
2465 tcg_gen_shri_tl(t2, t2, 1);
2467 tcg_gen_or_tl(dst, t1, t2);
2471 tcg_temp_free(shift);
2475 #define CHECK_IU_FEATURE(dc, FEATURE) \
2476 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2478 #define CHECK_FPU_FEATURE(dc, FEATURE) \
2479 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2482 /* before an instruction, dc->pc must be static */
2483 static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
2485 unsigned int opc, rs1, rs2, rd;
2486 TCGv cpu_src1, cpu_src2;
2487 TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
2488 TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
2491 opc = GET_FIELD(insn, 0, 1);
2492 rd = GET_FIELD(insn, 2, 6);
2495 case 0: /* branches/sethi */
2497 unsigned int xop = GET_FIELD(insn, 7, 9);
2500 #ifdef TARGET_SPARC64
2501 case 0x1: /* V9 BPcc */
2505 target = GET_FIELD_SP(insn, 0, 18);
2506 target = sign_extend(target, 19);
2508 cc = GET_FIELD_SP(insn, 20, 21);
2510 do_branch(dc, target, insn, 0);
2512 do_branch(dc, target, insn, 1);
2517 case 0x3: /* V9 BPr */
2519 target = GET_FIELD_SP(insn, 0, 13) |
2520 (GET_FIELD_SP(insn, 20, 21) << 14);
2521 target = sign_extend(target, 16);
2523 cpu_src1 = get_src1(dc, insn);
2524 do_branch_reg(dc, target, insn, cpu_src1);
2527 case 0x5: /* V9 FBPcc */
2529 int cc = GET_FIELD_SP(insn, 20, 21);
2530 if (gen_trap_ifnofpu(dc)) {
2533 target = GET_FIELD_SP(insn, 0, 18);
2534 target = sign_extend(target, 19);
2536 do_fbranch(dc, target, insn, cc);
2540 case 0x7: /* CBN+x */
2545 case 0x2: /* BN+x */
2547 target = GET_FIELD(insn, 10, 31);
2548 target = sign_extend(target, 22);
2550 do_branch(dc, target, insn, 0);
2553 case 0x6: /* FBN+x */
2555 if (gen_trap_ifnofpu(dc)) {
2558 target = GET_FIELD(insn, 10, 31);
2559 target = sign_extend(target, 22);
2561 do_fbranch(dc, target, insn, 0);
2564 case 0x4: /* SETHI */
2565 /* Special-case %g0 because that's the canonical nop. */
2567 uint32_t value = GET_FIELD(insn, 10, 31);
2568 TCGv t = gen_dest_gpr(dc, rd);
2569 tcg_gen_movi_tl(t, value << 10);
2570 gen_store_gpr(dc, rd, t);
2573 case 0x0: /* UNIMPL */
2582 target_long target = GET_FIELDs(insn, 2, 31) << 2;
2583 TCGv o7 = gen_dest_gpr(dc, 15);
2585 tcg_gen_movi_tl(o7, dc->pc);
2586 gen_store_gpr(dc, 15, o7);
2589 #ifdef TARGET_SPARC64
2590 if (unlikely(AM_CHECK(dc))) {
2591 target &= 0xffffffffULL;
2597 case 2: /* FPU & Logical Operations */
2599 unsigned int xop = GET_FIELD(insn, 7, 12);
2600 TCGv cpu_dst = get_temp_tl(dc);
2603 if (xop == 0x3a) { /* generate trap */
2604 int cond = GET_FIELD(insn, 3, 6);
2606 TCGLabel *l1 = NULL;
2617 /* Conditional trap. */
2619 #ifdef TARGET_SPARC64
2621 int cc = GET_FIELD_SP(insn, 11, 12);
2623 gen_compare(&cmp, 0, cond, dc);
2624 } else if (cc == 2) {
2625 gen_compare(&cmp, 1, cond, dc);
2630 gen_compare(&cmp, 0, cond, dc);
2632 l1 = gen_new_label();
2633 tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
2634 cmp.c1, cmp.c2, l1);
2638 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
2639 ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
2641 /* Don't use the normal temporaries, as they may well have
2642 gone out of scope with the branch above. While we're
2643 doing that we might as well pre-truncate to 32-bit. */
2644 trap = tcg_temp_new_i32();
2646 rs1 = GET_FIELD_SP(insn, 14, 18);
2648 rs2 = GET_FIELD_SP(insn, 0, 6);
2650 tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
2651 /* Signal that the trap value is fully constant. */
2654 TCGv t1 = gen_load_gpr(dc, rs1);
2655 tcg_gen_trunc_tl_i32(trap, t1);
2656 tcg_gen_addi_i32(trap, trap, rs2);
2660 rs2 = GET_FIELD_SP(insn, 0, 4);
2661 t1 = gen_load_gpr(dc, rs1);
2662 t2 = gen_load_gpr(dc, rs2);
2663 tcg_gen_add_tl(t1, t1, t2);
2664 tcg_gen_trunc_tl_i32(trap, t1);
2667 tcg_gen_andi_i32(trap, trap, mask);
2668 tcg_gen_addi_i32(trap, trap, TT_TRAP);
2671 gen_helper_raise_exception(cpu_env, trap);
2672 tcg_temp_free_i32(trap);
2675 /* An unconditional trap ends the TB. */
2679 /* A conditional trap falls through to the next insn. */
2683 } else if (xop == 0x28) {
2684 rs1 = GET_FIELD(insn, 13, 17);
2687 #ifndef TARGET_SPARC64
2688 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2689 manual, rdy on the microSPARC
2691 case 0x0f: /* stbar in the SPARCv8 manual,
2692 rdy on the microSPARC II */
2693 case 0x10 ... 0x1f: /* implementation-dependent in the
2694 SPARCv8 manual, rdy on the
2697 if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2698 TCGv t = gen_dest_gpr(dc, rd);
2699 /* Read Asr17 for a Leon3 monoprocessor */
2700 tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
2701 gen_store_gpr(dc, rd, t);
2705 gen_store_gpr(dc, rd, cpu_y);
2707 #ifdef TARGET_SPARC64
2708 case 0x2: /* V9 rdccr */
2710 gen_helper_rdccr(cpu_dst, cpu_env);
2711 gen_store_gpr(dc, rd, cpu_dst);
2713 case 0x3: /* V9 rdasi */
2714 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2715 gen_store_gpr(dc, rd, cpu_dst);
2717 case 0x4: /* V9 rdtick */
2722 r_tickptr = tcg_temp_new_ptr();
2723 r_const = tcg_const_i32(dc->mem_idx);
2724 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2725 offsetof(CPUSPARCState, tick));
2726 gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
2728 tcg_temp_free_ptr(r_tickptr);
2729 tcg_temp_free_i32(r_const);
2730 gen_store_gpr(dc, rd, cpu_dst);
2733 case 0x5: /* V9 rdpc */
2735 TCGv t = gen_dest_gpr(dc, rd);
2736 if (unlikely(AM_CHECK(dc))) {
2737 tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
2739 tcg_gen_movi_tl(t, dc->pc);
2741 gen_store_gpr(dc, rd, t);
2744 case 0x6: /* V9 rdfprs */
2745 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2746 gen_store_gpr(dc, rd, cpu_dst);
2748 case 0xf: /* V9 membar */
2749 break; /* no effect */
2750 case 0x13: /* Graphics Status */
2751 if (gen_trap_ifnofpu(dc)) {
2754 gen_store_gpr(dc, rd, cpu_gsr);
2756 case 0x16: /* Softint */
2757 tcg_gen_ld32s_tl(cpu_dst, cpu_env,
2758 offsetof(CPUSPARCState, softint));
2759 gen_store_gpr(dc, rd, cpu_dst);
2761 case 0x17: /* Tick compare */
2762 gen_store_gpr(dc, rd, cpu_tick_cmpr);
2764 case 0x18: /* System tick */
2769 r_tickptr = tcg_temp_new_ptr();
2770 r_const = tcg_const_i32(dc->mem_idx);
2771 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2772 offsetof(CPUSPARCState, stick));
2773 gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
2775 tcg_temp_free_ptr(r_tickptr);
2776 tcg_temp_free_i32(r_const);
2777 gen_store_gpr(dc, rd, cpu_dst);
2780 case 0x19: /* System tick compare */
2781 gen_store_gpr(dc, rd, cpu_stick_cmpr);
2783 case 0x10: /* Performance Control */
2784 case 0x11: /* Performance Instrumentation Counter */
2785 case 0x12: /* Dispatch Control */
2786 case 0x14: /* Softint set, WO */
2787 case 0x15: /* Softint clear, WO */
2792 #if !defined(CONFIG_USER_ONLY)
2793 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2794 #ifndef TARGET_SPARC64
2795 if (!supervisor(dc)) {
2799 gen_helper_rdpsr(cpu_dst, cpu_env);
2801 CHECK_IU_FEATURE(dc, HYPV);
2802 if (!hypervisor(dc))
2804 rs1 = GET_FIELD(insn, 13, 17);
2807 // gen_op_rdhpstate();
2810 // gen_op_rdhtstate();
2813 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2816 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2819 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2821 case 31: // hstick_cmpr
2822 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2828 gen_store_gpr(dc, rd, cpu_dst);
2830 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2831 if (!supervisor(dc)) {
2834 cpu_tmp0 = get_temp_tl(dc);
2835 #ifdef TARGET_SPARC64
2836 rs1 = GET_FIELD(insn, 13, 17);
2842 r_tsptr = tcg_temp_new_ptr();
2843 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2844 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2845 offsetof(trap_state, tpc));
2846 tcg_temp_free_ptr(r_tsptr);
2853 r_tsptr = tcg_temp_new_ptr();
2854 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2855 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2856 offsetof(trap_state, tnpc));
2857 tcg_temp_free_ptr(r_tsptr);
2864 r_tsptr = tcg_temp_new_ptr();
2865 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2866 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2867 offsetof(trap_state, tstate));
2868 tcg_temp_free_ptr(r_tsptr);
2873 TCGv_ptr r_tsptr = tcg_temp_new_ptr();
2875 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2876 tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
2877 offsetof(trap_state, tt));
2878 tcg_temp_free_ptr(r_tsptr);
2886 r_tickptr = tcg_temp_new_ptr();
2887 r_const = tcg_const_i32(dc->mem_idx);
2888 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2889 offsetof(CPUSPARCState, tick));
2890 gen_helper_tick_get_count(cpu_tmp0, cpu_env,
2891 r_tickptr, r_const);
2892 tcg_temp_free_ptr(r_tickptr);
2893 tcg_temp_free_i32(r_const);
2897 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2900 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2901 offsetof(CPUSPARCState, pstate));
2904 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2905 offsetof(CPUSPARCState, tl));
2908 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2909 offsetof(CPUSPARCState, psrpil));
2912 gen_helper_rdcwp(cpu_tmp0, cpu_env);
2915 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2916 offsetof(CPUSPARCState, cansave));
2918 case 11: // canrestore
2919 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2920 offsetof(CPUSPARCState, canrestore));
2922 case 12: // cleanwin
2923 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2924 offsetof(CPUSPARCState, cleanwin));
2926 case 13: // otherwin
2927 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2928 offsetof(CPUSPARCState, otherwin));
2931 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2932 offsetof(CPUSPARCState, wstate));
2934 case 16: // UA2005 gl
2935 CHECK_IU_FEATURE(dc, GL);
2936 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2937 offsetof(CPUSPARCState, gl));
2939 case 26: // UA2005 strand status
2940 CHECK_IU_FEATURE(dc, HYPV);
2941 if (!hypervisor(dc))
2943 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2946 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2953 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2955 gen_store_gpr(dc, rd, cpu_tmp0);
2957 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2958 #ifdef TARGET_SPARC64
2960 gen_helper_flushw(cpu_env);
2962 if (!supervisor(dc))
2964 gen_store_gpr(dc, rd, cpu_tbr);
2968 } else if (xop == 0x34) { /* FPU Operations */
2969 if (gen_trap_ifnofpu(dc)) {
2972 gen_op_clear_ieee_excp_and_FTT();
2973 rs1 = GET_FIELD(insn, 13, 17);
2974 rs2 = GET_FIELD(insn, 27, 31);
2975 xop = GET_FIELD(insn, 18, 26);
2978 case 0x1: /* fmovs */
2979 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2980 gen_store_fpr_F(dc, rd, cpu_src1_32);
2982 case 0x5: /* fnegs */
2983 gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
2985 case 0x9: /* fabss */
2986 gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
2988 case 0x29: /* fsqrts */
2989 CHECK_FPU_FEATURE(dc, FSQRT);
2990 gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
2992 case 0x2a: /* fsqrtd */
2993 CHECK_FPU_FEATURE(dc, FSQRT);
2994 gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
2996 case 0x2b: /* fsqrtq */
2997 CHECK_FPU_FEATURE(dc, FLOAT128);
2998 gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
3000 case 0x41: /* fadds */
3001 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
3003 case 0x42: /* faddd */
3004 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
3006 case 0x43: /* faddq */
3007 CHECK_FPU_FEATURE(dc, FLOAT128);
3008 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
3010 case 0x45: /* fsubs */
3011 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3013 case 0x46: /* fsubd */
3014 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3016 case 0x47: /* fsubq */
3017 CHECK_FPU_FEATURE(dc, FLOAT128);
3018 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3020 case 0x49: /* fmuls */
3021 CHECK_FPU_FEATURE(dc, FMUL);
3022 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3024 case 0x4a: /* fmuld */
3025 CHECK_FPU_FEATURE(dc, FMUL);
3026 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3028 case 0x4b: /* fmulq */
3029 CHECK_FPU_FEATURE(dc, FLOAT128);
3030 CHECK_FPU_FEATURE(dc, FMUL);
3031 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3033 case 0x4d: /* fdivs */
3034 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3036 case 0x4e: /* fdivd */
3037 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3039 case 0x4f: /* fdivq */
3040 CHECK_FPU_FEATURE(dc, FLOAT128);
3041 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3043 case 0x69: /* fsmuld */
3044 CHECK_FPU_FEATURE(dc, FSMULD);
3045 gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3047 case 0x6e: /* fdmulq */
3048 CHECK_FPU_FEATURE(dc, FLOAT128);
3049 gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3051 case 0xc4: /* fitos */
3052 gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3054 case 0xc6: /* fdtos */
3055 gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3057 case 0xc7: /* fqtos */
3058 CHECK_FPU_FEATURE(dc, FLOAT128);
3059 gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3061 case 0xc8: /* fitod */
3062 gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3064 case 0xc9: /* fstod */
3065 gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3067 case 0xcb: /* fqtod */
3068 CHECK_FPU_FEATURE(dc, FLOAT128);
3069 gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3071 case 0xcc: /* fitoq */
3072 CHECK_FPU_FEATURE(dc, FLOAT128);
3073 gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3075 case 0xcd: /* fstoq */
3076 CHECK_FPU_FEATURE(dc, FLOAT128);
3077 gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3079 case 0xce: /* fdtoq */
3080 CHECK_FPU_FEATURE(dc, FLOAT128);
3081 gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3083 case 0xd1: /* fstoi */
3084 gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3086 case 0xd2: /* fdtoi */
3087 gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3089 case 0xd3: /* fqtoi */
3090 CHECK_FPU_FEATURE(dc, FLOAT128);
3091 gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3093 #ifdef TARGET_SPARC64
3094 case 0x2: /* V9 fmovd */
3095 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3096 gen_store_fpr_D(dc, rd, cpu_src1_64);
3098 case 0x3: /* V9 fmovq */
3099 CHECK_FPU_FEATURE(dc, FLOAT128);
3100 gen_move_Q(rd, rs2);
3102 case 0x6: /* V9 fnegd */
3103 gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3105 case 0x7: /* V9 fnegq */
3106 CHECK_FPU_FEATURE(dc, FLOAT128);
3107 gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3109 case 0xa: /* V9 fabsd */
3110 gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3112 case 0xb: /* V9 fabsq */
3113 CHECK_FPU_FEATURE(dc, FLOAT128);
3114 gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3116 case 0x81: /* V9 fstox */
3117 gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3119 case 0x82: /* V9 fdtox */
3120 gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3122 case 0x83: /* V9 fqtox */
3123 CHECK_FPU_FEATURE(dc, FLOAT128);
3124 gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3126 case 0x84: /* V9 fxtos */
3127 gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3129 case 0x88: /* V9 fxtod */
3130 gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3132 case 0x8c: /* V9 fxtoq */
3133 CHECK_FPU_FEATURE(dc, FLOAT128);
3134 gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3140 } else if (xop == 0x35) { /* FPU Operations */
3141 #ifdef TARGET_SPARC64
3144 if (gen_trap_ifnofpu(dc)) {
3147 gen_op_clear_ieee_excp_and_FTT();
3148 rs1 = GET_FIELD(insn, 13, 17);
3149 rs2 = GET_FIELD(insn, 27, 31);
3150 xop = GET_FIELD(insn, 18, 26);
3153 #ifdef TARGET_SPARC64
3157 cond = GET_FIELD_SP(insn, 10, 12); \
3158 cpu_src1 = get_src1(dc, insn); \
3159 gen_compare_reg(&cmp, cond, cpu_src1); \
3160 gen_fmov##sz(dc, &cmp, rd, rs2); \
3161 free_compare(&cmp); \
3164 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3167 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3170 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3171 CHECK_FPU_FEATURE(dc, FLOAT128);
3178 #ifdef TARGET_SPARC64
3179 #define FMOVCC(fcc, sz) \
3182 cond = GET_FIELD_SP(insn, 14, 17); \
3183 gen_fcompare(&cmp, fcc, cond); \
3184 gen_fmov##sz(dc, &cmp, rd, rs2); \
3185 free_compare(&cmp); \
3188 case 0x001: /* V9 fmovscc %fcc0 */
3191 case 0x002: /* V9 fmovdcc %fcc0 */
3194 case 0x003: /* V9 fmovqcc %fcc0 */
3195 CHECK_FPU_FEATURE(dc, FLOAT128);
3198 case 0x041: /* V9 fmovscc %fcc1 */
3201 case 0x042: /* V9 fmovdcc %fcc1 */
3204 case 0x043: /* V9 fmovqcc %fcc1 */
3205 CHECK_FPU_FEATURE(dc, FLOAT128);
3208 case 0x081: /* V9 fmovscc %fcc2 */
3211 case 0x082: /* V9 fmovdcc %fcc2 */
3214 case 0x083: /* V9 fmovqcc %fcc2 */
3215 CHECK_FPU_FEATURE(dc, FLOAT128);
3218 case 0x0c1: /* V9 fmovscc %fcc3 */
3221 case 0x0c2: /* V9 fmovdcc %fcc3 */
3224 case 0x0c3: /* V9 fmovqcc %fcc3 */
3225 CHECK_FPU_FEATURE(dc, FLOAT128);
3229 #define FMOVCC(xcc, sz) \
3232 cond = GET_FIELD_SP(insn, 14, 17); \
3233 gen_compare(&cmp, xcc, cond, dc); \
3234 gen_fmov##sz(dc, &cmp, rd, rs2); \
3235 free_compare(&cmp); \
3238 case 0x101: /* V9 fmovscc %icc */
3241 case 0x102: /* V9 fmovdcc %icc */
3244 case 0x103: /* V9 fmovqcc %icc */
3245 CHECK_FPU_FEATURE(dc, FLOAT128);
3248 case 0x181: /* V9 fmovscc %xcc */
3251 case 0x182: /* V9 fmovdcc %xcc */
3254 case 0x183: /* V9 fmovqcc %xcc */
3255 CHECK_FPU_FEATURE(dc, FLOAT128);
3260 case 0x51: /* fcmps, V9 %fcc */
3261 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3262 cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3263 gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3265 case 0x52: /* fcmpd, V9 %fcc */
3266 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3267 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3268 gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3270 case 0x53: /* fcmpq, V9 %fcc */
3271 CHECK_FPU_FEATURE(dc, FLOAT128);
3272 gen_op_load_fpr_QT0(QFPREG(rs1));
3273 gen_op_load_fpr_QT1(QFPREG(rs2));
3274 gen_op_fcmpq(rd & 3);
3276 case 0x55: /* fcmpes, V9 %fcc */
3277 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3278 cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3279 gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3281 case 0x56: /* fcmped, V9 %fcc */
3282 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3283 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3284 gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3286 case 0x57: /* fcmpeq, V9 %fcc */
3287 CHECK_FPU_FEATURE(dc, FLOAT128);
3288 gen_op_load_fpr_QT0(QFPREG(rs1));
3289 gen_op_load_fpr_QT1(QFPREG(rs2));
3290 gen_op_fcmpeq(rd & 3);
3295 } else if (xop == 0x2) {
3296 TCGv dst = gen_dest_gpr(dc, rd);
3297 rs1 = GET_FIELD(insn, 13, 17);
3299 /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3300 if (IS_IMM) { /* immediate */
3301 simm = GET_FIELDs(insn, 19, 31);
3302 tcg_gen_movi_tl(dst, simm);
3303 gen_store_gpr(dc, rd, dst);
3304 } else { /* register */
3305 rs2 = GET_FIELD(insn, 27, 31);
3307 tcg_gen_movi_tl(dst, 0);
3308 gen_store_gpr(dc, rd, dst);
3310 cpu_src2 = gen_load_gpr(dc, rs2);
3311 gen_store_gpr(dc, rd, cpu_src2);
3315 cpu_src1 = get_src1(dc, insn);
3316 if (IS_IMM) { /* immediate */
3317 simm = GET_FIELDs(insn, 19, 31);
3318 tcg_gen_ori_tl(dst, cpu_src1, simm);
3319 gen_store_gpr(dc, rd, dst);
3320 } else { /* register */
3321 rs2 = GET_FIELD(insn, 27, 31);
3323 /* mov shortcut: or x, %g0, y -> mov x, y */
3324 gen_store_gpr(dc, rd, cpu_src1);
3326 cpu_src2 = gen_load_gpr(dc, rs2);
3327 tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
3328 gen_store_gpr(dc, rd, dst);
3332 #ifdef TARGET_SPARC64
3333 } else if (xop == 0x25) { /* sll, V9 sllx */
3334 cpu_src1 = get_src1(dc, insn);
3335 if (IS_IMM) { /* immediate */
3336 simm = GET_FIELDs(insn, 20, 31);
3337 if (insn & (1 << 12)) {
3338 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3340 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3342 } else { /* register */
3343 rs2 = GET_FIELD(insn, 27, 31);
3344 cpu_src2 = gen_load_gpr(dc, rs2);
3345 cpu_tmp0 = get_temp_tl(dc);
3346 if (insn & (1 << 12)) {
3347 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3349 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3351 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3353 gen_store_gpr(dc, rd, cpu_dst);
3354 } else if (xop == 0x26) { /* srl, V9 srlx */
3355 cpu_src1 = get_src1(dc, insn);
3356 if (IS_IMM) { /* immediate */
3357 simm = GET_FIELDs(insn, 20, 31);
3358 if (insn & (1 << 12)) {
3359 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3361 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3362 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3364 } else { /* register */
3365 rs2 = GET_FIELD(insn, 27, 31);
3366 cpu_src2 = gen_load_gpr(dc, rs2);
3367 cpu_tmp0 = get_temp_tl(dc);
3368 if (insn & (1 << 12)) {
3369 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3370 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3372 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3373 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3374 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3377 gen_store_gpr(dc, rd, cpu_dst);
3378 } else if (xop == 0x27) { /* sra, V9 srax */
3379 cpu_src1 = get_src1(dc, insn);
3380 if (IS_IMM) { /* immediate */
3381 simm = GET_FIELDs(insn, 20, 31);
3382 if (insn & (1 << 12)) {
3383 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3385 tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3386 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3388 } else { /* register */
3389 rs2 = GET_FIELD(insn, 27, 31);
3390 cpu_src2 = gen_load_gpr(dc, rs2);
3391 cpu_tmp0 = get_temp_tl(dc);
3392 if (insn & (1 << 12)) {
3393 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3394 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3396 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3397 tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3398 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3401 gen_store_gpr(dc, rd, cpu_dst);
3403 } else if (xop < 0x36) {
3405 cpu_src1 = get_src1(dc, insn);
3406 cpu_src2 = get_src2(dc, insn);
3407 switch (xop & ~0x10) {
3410 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3411 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3412 dc->cc_op = CC_OP_ADD;
3414 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3418 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3420 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3421 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3422 dc->cc_op = CC_OP_LOGIC;
3426 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3428 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3429 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3430 dc->cc_op = CC_OP_LOGIC;
3434 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3436 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3437 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3438 dc->cc_op = CC_OP_LOGIC;
3443 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3444 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3445 dc->cc_op = CC_OP_SUB;
3447 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3450 case 0x5: /* andn */
3451 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3453 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3454 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3455 dc->cc_op = CC_OP_LOGIC;
3459 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3461 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3462 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3463 dc->cc_op = CC_OP_LOGIC;
3466 case 0x7: /* xorn */
3467 tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
3469 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3470 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3471 dc->cc_op = CC_OP_LOGIC;
3474 case 0x8: /* addx, V9 addc */
3475 gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3478 #ifdef TARGET_SPARC64
3479 case 0x9: /* V9 mulx */
3480 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3483 case 0xa: /* umul */
3484 CHECK_IU_FEATURE(dc, MUL);
3485 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3487 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3488 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3489 dc->cc_op = CC_OP_LOGIC;
3492 case 0xb: /* smul */
3493 CHECK_IU_FEATURE(dc, MUL);
3494 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3496 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3497 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3498 dc->cc_op = CC_OP_LOGIC;
3501 case 0xc: /* subx, V9 subc */
3502 gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3505 #ifdef TARGET_SPARC64
3506 case 0xd: /* V9 udivx */
3507 gen_helper_udivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3510 case 0xe: /* udiv */
3511 CHECK_IU_FEATURE(dc, DIV);
3513 gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
3515 dc->cc_op = CC_OP_DIV;
3517 gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
3521 case 0xf: /* sdiv */
3522 CHECK_IU_FEATURE(dc, DIV);
3524 gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
3526 dc->cc_op = CC_OP_DIV;
3528 gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
3535 gen_store_gpr(dc, rd, cpu_dst);
3537 cpu_src1 = get_src1(dc, insn);
3538 cpu_src2 = get_src2(dc, insn);
3540 case 0x20: /* taddcc */
3541 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3542 gen_store_gpr(dc, rd, cpu_dst);
3543 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3544 dc->cc_op = CC_OP_TADD;
3546 case 0x21: /* tsubcc */
3547 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3548 gen_store_gpr(dc, rd, cpu_dst);
3549 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3550 dc->cc_op = CC_OP_TSUB;
3552 case 0x22: /* taddcctv */
3553 gen_helper_taddcctv(cpu_dst, cpu_env,
3554 cpu_src1, cpu_src2);
3555 gen_store_gpr(dc, rd, cpu_dst);
3556 dc->cc_op = CC_OP_TADDTV;
3558 case 0x23: /* tsubcctv */
3559 gen_helper_tsubcctv(cpu_dst, cpu_env,
3560 cpu_src1, cpu_src2);
3561 gen_store_gpr(dc, rd, cpu_dst);
3562 dc->cc_op = CC_OP_TSUBTV;
3564 case 0x24: /* mulscc */
3566 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3567 gen_store_gpr(dc, rd, cpu_dst);
3568 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3569 dc->cc_op = CC_OP_ADD;
3571 #ifndef TARGET_SPARC64
3572 case 0x25: /* sll */
3573 if (IS_IMM) { /* immediate */
3574 simm = GET_FIELDs(insn, 20, 31);
3575 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3576 } else { /* register */
3577 cpu_tmp0 = get_temp_tl(dc);
3578 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3579 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3581 gen_store_gpr(dc, rd, cpu_dst);
3583 case 0x26: /* srl */
3584 if (IS_IMM) { /* immediate */
3585 simm = GET_FIELDs(insn, 20, 31);
3586 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3587 } else { /* register */
3588 cpu_tmp0 = get_temp_tl(dc);
3589 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3590 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3592 gen_store_gpr(dc, rd, cpu_dst);
3594 case 0x27: /* sra */
3595 if (IS_IMM) { /* immediate */
3596 simm = GET_FIELDs(insn, 20, 31);
3597 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3598 } else { /* register */
3599 cpu_tmp0 = get_temp_tl(dc);
3600 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3601 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3603 gen_store_gpr(dc, rd, cpu_dst);
3608 cpu_tmp0 = get_temp_tl(dc);
3611 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3612 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3614 #ifndef TARGET_SPARC64
3615 case 0x01 ... 0x0f: /* undefined in the
3619 case 0x10 ... 0x1f: /* implementation-dependent
3623 if ((rd == 0x13) && (dc->def->features &
3624 CPU_FEATURE_POWERDOWN)) {
3625 /* LEON3 power-down */
3627 gen_helper_power_down(cpu_env);
3631 case 0x2: /* V9 wrccr */
3632 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3633 gen_helper_wrccr(cpu_env, cpu_tmp0);
3634 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3635 dc->cc_op = CC_OP_FLAGS;
3637 case 0x3: /* V9 wrasi */
3638 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3639 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xff);
3640 tcg_gen_trunc_tl_i32(cpu_asi, cpu_tmp0);
3642 case 0x6: /* V9 wrfprs */
3643 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3644 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_tmp0);
3650 case 0xf: /* V9 sir, nop if user */
3651 #if !defined(CONFIG_USER_ONLY)
3652 if (supervisor(dc)) {
3657 case 0x13: /* Graphics Status */
3658 if (gen_trap_ifnofpu(dc)) {
3661 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3663 case 0x14: /* Softint set */
3664 if (!supervisor(dc))
3666 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3667 gen_helper_set_softint(cpu_env, cpu_tmp0);
3669 case 0x15: /* Softint clear */
3670 if (!supervisor(dc))
3672 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3673 gen_helper_clear_softint(cpu_env, cpu_tmp0);
3675 case 0x16: /* Softint write */
3676 if (!supervisor(dc))
3678 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3679 gen_helper_write_softint(cpu_env, cpu_tmp0);
3681 case 0x17: /* Tick compare */
3682 #if !defined(CONFIG_USER_ONLY)
3683 if (!supervisor(dc))
3689 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3691 r_tickptr = tcg_temp_new_ptr();
3692 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3693 offsetof(CPUSPARCState, tick));
3694 gen_helper_tick_set_limit(r_tickptr,
3696 tcg_temp_free_ptr(r_tickptr);
3699 case 0x18: /* System tick */
3700 #if !defined(CONFIG_USER_ONLY)
3701 if (!supervisor(dc))
3707 tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
3709 r_tickptr = tcg_temp_new_ptr();
3710 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3711 offsetof(CPUSPARCState, stick));
3712 gen_helper_tick_set_count(r_tickptr,
3714 tcg_temp_free_ptr(r_tickptr);
3717 case 0x19: /* System tick compare */
3718 #if !defined(CONFIG_USER_ONLY)
3719 if (!supervisor(dc))
3725 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3727 r_tickptr = tcg_temp_new_ptr();
3728 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3729 offsetof(CPUSPARCState, stick));
3730 gen_helper_tick_set_limit(r_tickptr,
3732 tcg_temp_free_ptr(r_tickptr);
3736 case 0x10: /* Performance Control */
3737 case 0x11: /* Performance Instrumentation
3739 case 0x12: /* Dispatch Control */
3746 #if !defined(CONFIG_USER_ONLY)
3747 case 0x31: /* wrpsr, V9 saved, restored */
3749 if (!supervisor(dc))
3751 #ifdef TARGET_SPARC64
3754 gen_helper_saved(cpu_env);
3757 gen_helper_restored(cpu_env);
3759 case 2: /* UA2005 allclean */
3760 case 3: /* UA2005 otherw */
3761 case 4: /* UA2005 normalw */
3762 case 5: /* UA2005 invalw */
3768 cpu_tmp0 = get_temp_tl(dc);
3769 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3770 gen_helper_wrpsr(cpu_env, cpu_tmp0);
3771 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3772 dc->cc_op = CC_OP_FLAGS;
3780 case 0x32: /* wrwim, V9 wrpr */
3782 if (!supervisor(dc))
3784 cpu_tmp0 = get_temp_tl(dc);
3785 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3786 #ifdef TARGET_SPARC64
3792 r_tsptr = tcg_temp_new_ptr();
3793 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3794 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3795 offsetof(trap_state, tpc));
3796 tcg_temp_free_ptr(r_tsptr);
3803 r_tsptr = tcg_temp_new_ptr();
3804 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3805 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3806 offsetof(trap_state, tnpc));
3807 tcg_temp_free_ptr(r_tsptr);
3814 r_tsptr = tcg_temp_new_ptr();
3815 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3816 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3817 offsetof(trap_state,
3819 tcg_temp_free_ptr(r_tsptr);
3826 r_tsptr = tcg_temp_new_ptr();
3827 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3828 tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
3829 offsetof(trap_state, tt));
3830 tcg_temp_free_ptr(r_tsptr);
3837 r_tickptr = tcg_temp_new_ptr();
3838 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3839 offsetof(CPUSPARCState, tick));
3840 gen_helper_tick_set_count(r_tickptr,
3842 tcg_temp_free_ptr(r_tickptr);
3846 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3850 gen_helper_wrpstate(cpu_env, cpu_tmp0);
3851 dc->npc = DYNAMIC_PC;
3855 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3856 offsetof(CPUSPARCState, tl));
3857 dc->npc = DYNAMIC_PC;
3860 gen_helper_wrpil(cpu_env, cpu_tmp0);
3863 gen_helper_wrcwp(cpu_env, cpu_tmp0);
3866 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3867 offsetof(CPUSPARCState,
3870 case 11: // canrestore
3871 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3872 offsetof(CPUSPARCState,
3875 case 12: // cleanwin
3876 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3877 offsetof(CPUSPARCState,
3880 case 13: // otherwin
3881 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3882 offsetof(CPUSPARCState,
3886 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3887 offsetof(CPUSPARCState,
3890 case 16: // UA2005 gl
3891 CHECK_IU_FEATURE(dc, GL);
3892 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3893 offsetof(CPUSPARCState, gl));
3895 case 26: // UA2005 strand status
3896 CHECK_IU_FEATURE(dc, HYPV);
3897 if (!hypervisor(dc))
3899 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3905 tcg_gen_trunc_tl_i32(cpu_wim, cpu_tmp0);
3906 if (dc->def->nwindows != 32) {
3907 tcg_gen_andi_tl(cpu_wim, cpu_wim,
3908 (1 << dc->def->nwindows) - 1);
3913 case 0x33: /* wrtbr, UA2005 wrhpr */
3915 #ifndef TARGET_SPARC64
3916 if (!supervisor(dc))
3918 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3920 CHECK_IU_FEATURE(dc, HYPV);
3921 if (!hypervisor(dc))
3923 cpu_tmp0 = get_temp_tl(dc);
3924 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3927 // XXX gen_op_wrhpstate();
3934 // XXX gen_op_wrhtstate();
3937 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3940 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3942 case 31: // hstick_cmpr
3946 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3947 r_tickptr = tcg_temp_new_ptr();
3948 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3949 offsetof(CPUSPARCState, hstick));
3950 gen_helper_tick_set_limit(r_tickptr,
3952 tcg_temp_free_ptr(r_tickptr);
3955 case 6: // hver readonly
3963 #ifdef TARGET_SPARC64
3964 case 0x2c: /* V9 movcc */
3966 int cc = GET_FIELD_SP(insn, 11, 12);
3967 int cond = GET_FIELD_SP(insn, 14, 17);
3971 if (insn & (1 << 18)) {
3973 gen_compare(&cmp, 0, cond, dc);
3974 } else if (cc == 2) {
3975 gen_compare(&cmp, 1, cond, dc);
3980 gen_fcompare(&cmp, cc, cond);
3983 /* The get_src2 above loaded the normal 13-bit
3984 immediate field, not the 11-bit field we have
3985 in movcc. But it did handle the reg case. */
3987 simm = GET_FIELD_SPs(insn, 0, 10);
3988 tcg_gen_movi_tl(cpu_src2, simm);
3991 dst = gen_load_gpr(dc, rd);
3992 tcg_gen_movcond_tl(cmp.cond, dst,
3996 gen_store_gpr(dc, rd, dst);
3999 case 0x2d: /* V9 sdivx */
4000 gen_helper_sdivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4001 gen_store_gpr(dc, rd, cpu_dst);
4003 case 0x2e: /* V9 popc */
4004 gen_helper_popc(cpu_dst, cpu_src2);
4005 gen_store_gpr(dc, rd, cpu_dst);
4007 case 0x2f: /* V9 movr */
4009 int cond = GET_FIELD_SP(insn, 10, 12);
4013 gen_compare_reg(&cmp, cond, cpu_src1);
4015 /* The get_src2 above loaded the normal 13-bit
4016 immediate field, not the 10-bit field we have
4017 in movr. But it did handle the reg case. */
4019 simm = GET_FIELD_SPs(insn, 0, 9);
4020 tcg_gen_movi_tl(cpu_src2, simm);
4023 dst = gen_load_gpr(dc, rd);
4024 tcg_gen_movcond_tl(cmp.cond, dst,
4028 gen_store_gpr(dc, rd, dst);
4036 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4037 #ifdef TARGET_SPARC64
4038 int opf = GET_FIELD_SP(insn, 5, 13);
4039 rs1 = GET_FIELD(insn, 13, 17);
4040 rs2 = GET_FIELD(insn, 27, 31);
4041 if (gen_trap_ifnofpu(dc)) {
4046 case 0x000: /* VIS I edge8cc */
4047 CHECK_FPU_FEATURE(dc, VIS1);
4048 cpu_src1 = gen_load_gpr(dc, rs1);
4049 cpu_src2 = gen_load_gpr(dc, rs2);
4050 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4051 gen_store_gpr(dc, rd, cpu_dst);
4053 case 0x001: /* VIS II edge8n */
4054 CHECK_FPU_FEATURE(dc, VIS2);
4055 cpu_src1 = gen_load_gpr(dc, rs1);
4056 cpu_src2 = gen_load_gpr(dc, rs2);
4057 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4058 gen_store_gpr(dc, rd, cpu_dst);
4060 case 0x002: /* VIS I edge8lcc */
4061 CHECK_FPU_FEATURE(dc, VIS1);
4062 cpu_src1 = gen_load_gpr(dc, rs1);
4063 cpu_src2 = gen_load_gpr(dc, rs2);
4064 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4065 gen_store_gpr(dc, rd, cpu_dst);
4067 case 0x003: /* VIS II edge8ln */
4068 CHECK_FPU_FEATURE(dc, VIS2);
4069 cpu_src1 = gen_load_gpr(dc, rs1);
4070 cpu_src2 = gen_load_gpr(dc, rs2);
4071 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4072 gen_store_gpr(dc, rd, cpu_dst);
4074 case 0x004: /* VIS I edge16cc */
4075 CHECK_FPU_FEATURE(dc, VIS1);
4076 cpu_src1 = gen_load_gpr(dc, rs1);
4077 cpu_src2 = gen_load_gpr(dc, rs2);
4078 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4079 gen_store_gpr(dc, rd, cpu_dst);
4081 case 0x005: /* VIS II edge16n */
4082 CHECK_FPU_FEATURE(dc, VIS2);
4083 cpu_src1 = gen_load_gpr(dc, rs1);
4084 cpu_src2 = gen_load_gpr(dc, rs2);
4085 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4086 gen_store_gpr(dc, rd, cpu_dst);
4088 case 0x006: /* VIS I edge16lcc */
4089 CHECK_FPU_FEATURE(dc, VIS1);
4090 cpu_src1 = gen_load_gpr(dc, rs1);
4091 cpu_src2 = gen_load_gpr(dc, rs2);
4092 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4093 gen_store_gpr(dc, rd, cpu_dst);
4095 case 0x007: /* VIS II edge16ln */
4096 CHECK_FPU_FEATURE(dc, VIS2);
4097 cpu_src1 = gen_load_gpr(dc, rs1);
4098 cpu_src2 = gen_load_gpr(dc, rs2);
4099 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4100 gen_store_gpr(dc, rd, cpu_dst);
4102 case 0x008: /* VIS I edge32cc */
4103 CHECK_FPU_FEATURE(dc, VIS1);
4104 cpu_src1 = gen_load_gpr(dc, rs1);
4105 cpu_src2 = gen_load_gpr(dc, rs2);
4106 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4107 gen_store_gpr(dc, rd, cpu_dst);
4109 case 0x009: /* VIS II edge32n */
4110 CHECK_FPU_FEATURE(dc, VIS2);
4111 cpu_src1 = gen_load_gpr(dc, rs1);
4112 cpu_src2 = gen_load_gpr(dc, rs2);
4113 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4114 gen_store_gpr(dc, rd, cpu_dst);
4116 case 0x00a: /* VIS I edge32lcc */
4117 CHECK_FPU_FEATURE(dc, VIS1);
4118 cpu_src1 = gen_load_gpr(dc, rs1);
4119 cpu_src2 = gen_load_gpr(dc, rs2);
4120 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4121 gen_store_gpr(dc, rd, cpu_dst);
4123 case 0x00b: /* VIS II edge32ln */
4124 CHECK_FPU_FEATURE(dc, VIS2);
4125 cpu_src1 = gen_load_gpr(dc, rs1);
4126 cpu_src2 = gen_load_gpr(dc, rs2);
4127 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4128 gen_store_gpr(dc, rd, cpu_dst);
4130 case 0x010: /* VIS I array8 */
4131 CHECK_FPU_FEATURE(dc, VIS1);
4132 cpu_src1 = gen_load_gpr(dc, rs1);
4133 cpu_src2 = gen_load_gpr(dc, rs2);
4134 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4135 gen_store_gpr(dc, rd, cpu_dst);
4137 case 0x012: /* VIS I array16 */
4138 CHECK_FPU_FEATURE(dc, VIS1);
4139 cpu_src1 = gen_load_gpr(dc, rs1);
4140 cpu_src2 = gen_load_gpr(dc, rs2);
4141 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4142 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4143 gen_store_gpr(dc, rd, cpu_dst);
4145 case 0x014: /* VIS I array32 */
4146 CHECK_FPU_FEATURE(dc, VIS1);
4147 cpu_src1 = gen_load_gpr(dc, rs1);
4148 cpu_src2 = gen_load_gpr(dc, rs2);
4149 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4150 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4151 gen_store_gpr(dc, rd, cpu_dst);
4153 case 0x018: /* VIS I alignaddr */
4154 CHECK_FPU_FEATURE(dc, VIS1);
4155 cpu_src1 = gen_load_gpr(dc, rs1);
4156 cpu_src2 = gen_load_gpr(dc, rs2);
4157 gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4158 gen_store_gpr(dc, rd, cpu_dst);
4160 case 0x01a: /* VIS I alignaddrl */
4161 CHECK_FPU_FEATURE(dc, VIS1);
4162 cpu_src1 = gen_load_gpr(dc, rs1);
4163 cpu_src2 = gen_load_gpr(dc, rs2);
4164 gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4165 gen_store_gpr(dc, rd, cpu_dst);
4167 case 0x019: /* VIS II bmask */
4168 CHECK_FPU_FEATURE(dc, VIS2);
4169 cpu_src1 = gen_load_gpr(dc, rs1);
4170 cpu_src2 = gen_load_gpr(dc, rs2);
4171 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4172 tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4173 gen_store_gpr(dc, rd, cpu_dst);
4175 case 0x020: /* VIS I fcmple16 */
4176 CHECK_FPU_FEATURE(dc, VIS1);
4177 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4178 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4179 gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4180 gen_store_gpr(dc, rd, cpu_dst);
4182 case 0x022: /* VIS I fcmpne16 */
4183 CHECK_FPU_FEATURE(dc, VIS1);
4184 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4185 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4186 gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4187 gen_store_gpr(dc, rd, cpu_dst);
4189 case 0x024: /* VIS I fcmple32 */
4190 CHECK_FPU_FEATURE(dc, VIS1);
4191 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4192 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4193 gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4194 gen_store_gpr(dc, rd, cpu_dst);
4196 case 0x026: /* VIS I fcmpne32 */
4197 CHECK_FPU_FEATURE(dc, VIS1);
4198 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4199 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4200 gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4201 gen_store_gpr(dc, rd, cpu_dst);
4203 case 0x028: /* VIS I fcmpgt16 */
4204 CHECK_FPU_FEATURE(dc, VIS1);
4205 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4206 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4207 gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4208 gen_store_gpr(dc, rd, cpu_dst);
4210 case 0x02a: /* VIS I fcmpeq16 */
4211 CHECK_FPU_FEATURE(dc, VIS1);
4212 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4213 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4214 gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4215 gen_store_gpr(dc, rd, cpu_dst);
4217 case 0x02c: /* VIS I fcmpgt32 */
4218 CHECK_FPU_FEATURE(dc, VIS1);
4219 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4220 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4221 gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4222 gen_store_gpr(dc, rd, cpu_dst);
4224 case 0x02e: /* VIS I fcmpeq32 */
4225 CHECK_FPU_FEATURE(dc, VIS1);
4226 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4227 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4228 gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4229 gen_store_gpr(dc, rd, cpu_dst);
4231 case 0x031: /* VIS I fmul8x16 */
4232 CHECK_FPU_FEATURE(dc, VIS1);
4233 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4235 case 0x033: /* VIS I fmul8x16au */
4236 CHECK_FPU_FEATURE(dc, VIS1);
4237 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4239 case 0x035: /* VIS I fmul8x16al */
4240 CHECK_FPU_FEATURE(dc, VIS1);
4241 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4243 case 0x036: /* VIS I fmul8sux16 */
4244 CHECK_FPU_FEATURE(dc, VIS1);
4245 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4247 case 0x037: /* VIS I fmul8ulx16 */
4248 CHECK_FPU_FEATURE(dc, VIS1);
4249 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4251 case 0x038: /* VIS I fmuld8sux16 */
4252 CHECK_FPU_FEATURE(dc, VIS1);
4253 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4255 case 0x039: /* VIS I fmuld8ulx16 */
4256 CHECK_FPU_FEATURE(dc, VIS1);
4257 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4259 case 0x03a: /* VIS I fpack32 */
4260 CHECK_FPU_FEATURE(dc, VIS1);
4261 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4263 case 0x03b: /* VIS I fpack16 */
4264 CHECK_FPU_FEATURE(dc, VIS1);
4265 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4266 cpu_dst_32 = gen_dest_fpr_F(dc);
4267 gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4268 gen_store_fpr_F(dc, rd, cpu_dst_32);
4270 case 0x03d: /* VIS I fpackfix */
4271 CHECK_FPU_FEATURE(dc, VIS1);
4272 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4273 cpu_dst_32 = gen_dest_fpr_F(dc);
4274 gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4275 gen_store_fpr_F(dc, rd, cpu_dst_32);
4277 case 0x03e: /* VIS I pdist */
4278 CHECK_FPU_FEATURE(dc, VIS1);
4279 gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4281 case 0x048: /* VIS I faligndata */
4282 CHECK_FPU_FEATURE(dc, VIS1);
4283 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4285 case 0x04b: /* VIS I fpmerge */
4286 CHECK_FPU_FEATURE(dc, VIS1);
4287 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4289 case 0x04c: /* VIS II bshuffle */
4290 CHECK_FPU_FEATURE(dc, VIS2);
4291 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4293 case 0x04d: /* VIS I fexpand */
4294 CHECK_FPU_FEATURE(dc, VIS1);
4295 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4297 case 0x050: /* VIS I fpadd16 */
4298 CHECK_FPU_FEATURE(dc, VIS1);
4299 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4301 case 0x051: /* VIS I fpadd16s */
4302 CHECK_FPU_FEATURE(dc, VIS1);
4303 gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4305 case 0x052: /* VIS I fpadd32 */
4306 CHECK_FPU_FEATURE(dc, VIS1);
4307 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4309 case 0x053: /* VIS I fpadd32s */
4310 CHECK_FPU_FEATURE(dc, VIS1);
4311 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4313 case 0x054: /* VIS I fpsub16 */
4314 CHECK_FPU_FEATURE(dc, VIS1);
4315 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4317 case 0x055: /* VIS I fpsub16s */
4318 CHECK_FPU_FEATURE(dc, VIS1);
4319 gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4321 case 0x056: /* VIS I fpsub32 */
4322 CHECK_FPU_FEATURE(dc, VIS1);
4323 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4325 case 0x057: /* VIS I fpsub32s */
4326 CHECK_FPU_FEATURE(dc, VIS1);
4327 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4329 case 0x060: /* VIS I fzero */
4330 CHECK_FPU_FEATURE(dc, VIS1);
4331 cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4332 tcg_gen_movi_i64(cpu_dst_64, 0);
4333 gen_store_fpr_D(dc, rd, cpu_dst_64);
4335 case 0x061: /* VIS I fzeros */
4336 CHECK_FPU_FEATURE(dc, VIS1);
4337 cpu_dst_32 = gen_dest_fpr_F(dc);
4338 tcg_gen_movi_i32(cpu_dst_32, 0);
4339 gen_store_fpr_F(dc, rd, cpu_dst_32);
4341 case 0x062: /* VIS I fnor */
4342 CHECK_FPU_FEATURE(dc, VIS1);
4343 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4345 case 0x063: /* VIS I fnors */
4346 CHECK_FPU_FEATURE(dc, VIS1);
4347 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4349 case 0x064: /* VIS I fandnot2 */
4350 CHECK_FPU_FEATURE(dc, VIS1);
4351 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4353 case 0x065: /* VIS I fandnot2s */
4354 CHECK_FPU_FEATURE(dc, VIS1);
4355 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4357 case 0x066: /* VIS I fnot2 */
4358 CHECK_FPU_FEATURE(dc, VIS1);
4359 gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4361 case 0x067: /* VIS I fnot2s */
4362 CHECK_FPU_FEATURE(dc, VIS1);
4363 gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4365 case 0x068: /* VIS I fandnot1 */
4366 CHECK_FPU_FEATURE(dc, VIS1);
4367 gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4369 case 0x069: /* VIS I fandnot1s */
4370 CHECK_FPU_FEATURE(dc, VIS1);
4371 gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4373 case 0x06a: /* VIS I fnot1 */
4374 CHECK_FPU_FEATURE(dc, VIS1);
4375 gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4377 case 0x06b: /* VIS I fnot1s */
4378 CHECK_FPU_FEATURE(dc, VIS1);
4379 gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4381 case 0x06c: /* VIS I fxor */
4382 CHECK_FPU_FEATURE(dc, VIS1);
4383 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4385 case 0x06d: /* VIS I fxors */
4386 CHECK_FPU_FEATURE(dc, VIS1);
4387 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4389 case 0x06e: /* VIS I fnand */
4390 CHECK_FPU_FEATURE(dc, VIS1);
4391 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4393 case 0x06f: /* VIS I fnands */
4394 CHECK_FPU_FEATURE(dc, VIS1);
4395 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4397 case 0x070: /* VIS I fand */
4398 CHECK_FPU_FEATURE(dc, VIS1);
4399 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4401 case 0x071: /* VIS I fands */
4402 CHECK_FPU_FEATURE(dc, VIS1);
4403 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4405 case 0x072: /* VIS I fxnor */
4406 CHECK_FPU_FEATURE(dc, VIS1);
4407 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4409 case 0x073: /* VIS I fxnors */
4410 CHECK_FPU_FEATURE(dc, VIS1);
4411 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4413 case 0x074: /* VIS I fsrc1 */
4414 CHECK_FPU_FEATURE(dc, VIS1);
4415 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4416 gen_store_fpr_D(dc, rd, cpu_src1_64);
4418 case 0x075: /* VIS I fsrc1s */
4419 CHECK_FPU_FEATURE(dc, VIS1);
4420 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4421 gen_store_fpr_F(dc, rd, cpu_src1_32);
4423 case 0x076: /* VIS I fornot2 */
4424 CHECK_FPU_FEATURE(dc, VIS1);
4425 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4427 case 0x077: /* VIS I fornot2s */
4428 CHECK_FPU_FEATURE(dc, VIS1);
4429 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4431 case 0x078: /* VIS I fsrc2 */
4432 CHECK_FPU_FEATURE(dc, VIS1);
4433 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4434 gen_store_fpr_D(dc, rd, cpu_src1_64);
4436 case 0x079: /* VIS I fsrc2s */
4437 CHECK_FPU_FEATURE(dc, VIS1);
4438 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4439 gen_store_fpr_F(dc, rd, cpu_src1_32);
4441 case 0x07a: /* VIS I fornot1 */
4442 CHECK_FPU_FEATURE(dc, VIS1);
4443 gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4445 case 0x07b: /* VIS I fornot1s */
4446 CHECK_FPU_FEATURE(dc, VIS1);
4447 gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
4449 case 0x07c: /* VIS I for */
4450 CHECK_FPU_FEATURE(dc, VIS1);
4451 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
4453 case 0x07d: /* VIS I fors */
4454 CHECK_FPU_FEATURE(dc, VIS1);
4455 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
4457 case 0x07e: /* VIS I fone */
4458 CHECK_FPU_FEATURE(dc, VIS1);
4459 cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4460 tcg_gen_movi_i64(cpu_dst_64, -1);
4461 gen_store_fpr_D(dc, rd, cpu_dst_64);
4463 case 0x07f: /* VIS I fones */
4464 CHECK_FPU_FEATURE(dc, VIS1);
4465 cpu_dst_32 = gen_dest_fpr_F(dc);
4466 tcg_gen_movi_i32(cpu_dst_32, -1);
4467 gen_store_fpr_F(dc, rd, cpu_dst_32);
4469 case 0x080: /* VIS I shutdown */
4470 case 0x081: /* VIS II siam */
4479 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4480 #ifdef TARGET_SPARC64
4485 #ifdef TARGET_SPARC64
4486 } else if (xop == 0x39) { /* V9 return */
4490 cpu_src1 = get_src1(dc, insn);
4491 cpu_tmp0 = get_temp_tl(dc);
4492 if (IS_IMM) { /* immediate */
4493 simm = GET_FIELDs(insn, 19, 31);
4494 tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
4495 } else { /* register */
4496 rs2 = GET_FIELD(insn, 27, 31);
4498 cpu_src2 = gen_load_gpr(dc, rs2);
4499 tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
4501 tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
4504 gen_helper_restore(cpu_env);
4506 r_const = tcg_const_i32(3);
4507 gen_helper_check_align(cpu_env, cpu_tmp0, r_const);
4508 tcg_temp_free_i32(r_const);
4509 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
4510 dc->npc = DYNAMIC_PC;
4514 cpu_src1 = get_src1(dc, insn);
4515 cpu_tmp0 = get_temp_tl(dc);
4516 if (IS_IMM) { /* immediate */
4517 simm = GET_FIELDs(insn, 19, 31);
4518 tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
4519 } else { /* register */
4520 rs2 = GET_FIELD(insn, 27, 31);
4522 cpu_src2 = gen_load_gpr(dc, rs2);
4523 tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
4525 tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
4529 case 0x38: /* jmpl */
4534 t = gen_dest_gpr(dc, rd);
4535 tcg_gen_movi_tl(t, dc->pc);
4536 gen_store_gpr(dc, rd, t);
4538 r_const = tcg_const_i32(3);
4539 gen_helper_check_align(cpu_env, cpu_tmp0, r_const);
4540 tcg_temp_free_i32(r_const);
4541 gen_address_mask(dc, cpu_tmp0);
4542 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
4543 dc->npc = DYNAMIC_PC;
4546 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4547 case 0x39: /* rett, V9 return */
4551 if (!supervisor(dc))
4554 r_const = tcg_const_i32(3);
4555 gen_helper_check_align(cpu_env, cpu_tmp0, r_const);
4556 tcg_temp_free_i32(r_const);
4557 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
4558 dc->npc = DYNAMIC_PC;
4559 gen_helper_rett(cpu_env);
4563 case 0x3b: /* flush */
4564 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4568 case 0x3c: /* save */
4570 gen_helper_save(cpu_env);
4571 gen_store_gpr(dc, rd, cpu_tmp0);
4573 case 0x3d: /* restore */
4575 gen_helper_restore(cpu_env);
4576 gen_store_gpr(dc, rd, cpu_tmp0);
4578 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4579 case 0x3e: /* V9 done/retry */
4583 if (!supervisor(dc))
4585 dc->npc = DYNAMIC_PC;
4586 dc->pc = DYNAMIC_PC;
4587 gen_helper_done(cpu_env);
4590 if (!supervisor(dc))
4592 dc->npc = DYNAMIC_PC;
4593 dc->pc = DYNAMIC_PC;
4594 gen_helper_retry(cpu_env);
4609 case 3: /* load/store instructions */
4611 unsigned int xop = GET_FIELD(insn, 7, 12);
4612 /* ??? gen_address_mask prevents us from using a source
4613 register directly. Always generate a temporary. */
4614 TCGv cpu_addr = get_temp_tl(dc);
4616 tcg_gen_mov_tl(cpu_addr, get_src1(dc, insn));
4617 if (xop == 0x3c || xop == 0x3e) {
4618 /* V9 casa/casxa : no offset */
4619 } else if (IS_IMM) { /* immediate */
4620 simm = GET_FIELDs(insn, 19, 31);
4622 tcg_gen_addi_tl(cpu_addr, cpu_addr, simm);
4624 } else { /* register */
4625 rs2 = GET_FIELD(insn, 27, 31);
4627 tcg_gen_add_tl(cpu_addr, cpu_addr, gen_load_gpr(dc, rs2));
4630 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4631 (xop > 0x17 && xop <= 0x1d ) ||
4632 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4633 TCGv cpu_val = gen_dest_gpr(dc, rd);
4636 case 0x0: /* ld, V9 lduw, load unsigned word */
4637 gen_address_mask(dc, cpu_addr);
4638 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4640 case 0x1: /* ldub, load unsigned byte */
4641 gen_address_mask(dc, cpu_addr);
4642 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4644 case 0x2: /* lduh, load unsigned halfword */
4645 gen_address_mask(dc, cpu_addr);
4646 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4648 case 0x3: /* ldd, load double word */
4656 r_const = tcg_const_i32(7);
4657 /* XXX remove alignment check */
4658 gen_helper_check_align(cpu_env, cpu_addr, r_const);
4659 tcg_temp_free_i32(r_const);
4660 gen_address_mask(dc, cpu_addr);
4661 t64 = tcg_temp_new_i64();
4662 tcg_gen_qemu_ld64(t64, cpu_addr, dc->mem_idx);
4663 tcg_gen_trunc_i64_tl(cpu_val, t64);
4664 tcg_gen_ext32u_tl(cpu_val, cpu_val);
4665 gen_store_gpr(dc, rd + 1, cpu_val);
4666 tcg_gen_shri_i64(t64, t64, 32);
4667 tcg_gen_trunc_i64_tl(cpu_val, t64);
4668 tcg_temp_free_i64(t64);
4669 tcg_gen_ext32u_tl(cpu_val, cpu_val);
4672 case 0x9: /* ldsb, load signed byte */
4673 gen_address_mask(dc, cpu_addr);
4674 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4676 case 0xa: /* ldsh, load signed halfword */
4677 gen_address_mask(dc, cpu_addr);
4678 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4680 case 0xd: /* ldstub -- XXX: should be atomically */
4683 TCGv tmp = tcg_temp_new();
4685 gen_address_mask(dc, cpu_addr);
4686 tcg_gen_qemu_ld8u(tmp, cpu_addr, dc->mem_idx);
4687 r_const = tcg_const_tl(0xff);
4688 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4689 tcg_gen_mov_tl(cpu_val, tmp);
4690 tcg_temp_free(r_const);
4695 /* swap, swap register with memory. Also atomically */
4697 TCGv t0 = get_temp_tl(dc);
4698 CHECK_IU_FEATURE(dc, SWAP);
4699 cpu_src1 = gen_load_gpr(dc, rd);
4700 gen_address_mask(dc, cpu_addr);
4701 tcg_gen_qemu_ld32u(t0, cpu_addr, dc->mem_idx);
4702 tcg_gen_qemu_st32(cpu_src1, cpu_addr, dc->mem_idx);
4703 tcg_gen_mov_tl(cpu_val, t0);
4706 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4707 case 0x10: /* lda, V9 lduwa, load word alternate */
4708 #ifndef TARGET_SPARC64
4711 if (!supervisor(dc))
4715 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4717 case 0x11: /* lduba, load unsigned byte alternate */
4718 #ifndef TARGET_SPARC64
4721 if (!supervisor(dc))
4725 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4727 case 0x12: /* lduha, load unsigned halfword alternate */
4728 #ifndef TARGET_SPARC64
4731 if (!supervisor(dc))
4735 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4737 case 0x13: /* ldda, load double word alternate */
4738 #ifndef TARGET_SPARC64
4741 if (!supervisor(dc))
4747 gen_ldda_asi(dc, cpu_val, cpu_addr, insn, rd);
4749 case 0x19: /* ldsba, load signed byte alternate */
4750 #ifndef TARGET_SPARC64
4753 if (!supervisor(dc))
4757 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4759 case 0x1a: /* ldsha, load signed halfword alternate */
4760 #ifndef TARGET_SPARC64
4763 if (!supervisor(dc))
4767 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4769 case 0x1d: /* ldstuba -- XXX: should be atomically */
4770 #ifndef TARGET_SPARC64
4773 if (!supervisor(dc))
4777 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4779 case 0x1f: /* swapa, swap reg with alt. memory. Also
4781 CHECK_IU_FEATURE(dc, SWAP);
4782 #ifndef TARGET_SPARC64
4785 if (!supervisor(dc))
4789 cpu_src1 = gen_load_gpr(dc, rd);
4790 gen_swap_asi(cpu_val, cpu_src1, cpu_addr, insn);
4793 #ifndef TARGET_SPARC64
4794 case 0x30: /* ldc */
4795 case 0x31: /* ldcsr */
4796 case 0x33: /* lddc */
4800 #ifdef TARGET_SPARC64
4801 case 0x08: /* V9 ldsw */
4802 gen_address_mask(dc, cpu_addr);
4803 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4805 case 0x0b: /* V9 ldx */
4806 gen_address_mask(dc, cpu_addr);
4807 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4809 case 0x18: /* V9 ldswa */
4811 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4813 case 0x1b: /* V9 ldxa */
4815 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4817 case 0x2d: /* V9 prefetch, no effect */
4819 case 0x30: /* V9 ldfa */
4820 if (gen_trap_ifnofpu(dc)) {
4824 gen_ldf_asi(cpu_addr, insn, 4, rd);
4825 gen_update_fprs_dirty(rd);
4827 case 0x33: /* V9 lddfa */
4828 if (gen_trap_ifnofpu(dc)) {
4832 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4833 gen_update_fprs_dirty(DFPREG(rd));
4835 case 0x3d: /* V9 prefetcha, no effect */
4837 case 0x32: /* V9 ldqfa */
4838 CHECK_FPU_FEATURE(dc, FLOAT128);
4839 if (gen_trap_ifnofpu(dc)) {
4843 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4844 gen_update_fprs_dirty(QFPREG(rd));
4850 gen_store_gpr(dc, rd, cpu_val);
4851 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4854 } else if (xop >= 0x20 && xop < 0x24) {
4857 if (gen_trap_ifnofpu(dc)) {
4862 case 0x20: /* ldf, load fpreg */
4863 gen_address_mask(dc, cpu_addr);
4864 t0 = get_temp_tl(dc);
4865 tcg_gen_qemu_ld32u(t0, cpu_addr, dc->mem_idx);
4866 cpu_dst_32 = gen_dest_fpr_F(dc);
4867 tcg_gen_trunc_tl_i32(cpu_dst_32, t0);
4868 gen_store_fpr_F(dc, rd, cpu_dst_32);
4870 case 0x21: /* ldfsr, V9 ldxfsr */
4871 #ifdef TARGET_SPARC64
4872 gen_address_mask(dc, cpu_addr);
4874 TCGv_i64 t64 = tcg_temp_new_i64();
4875 tcg_gen_qemu_ld64(t64, cpu_addr, dc->mem_idx);
4876 gen_helper_ldxfsr(cpu_env, t64);
4877 tcg_temp_free_i64(t64);
4881 cpu_dst_32 = get_temp_i32(dc);
4882 t0 = get_temp_tl(dc);
4883 tcg_gen_qemu_ld32u(t0, cpu_addr, dc->mem_idx);
4884 tcg_gen_trunc_tl_i32(cpu_dst_32, t0);
4885 gen_helper_ldfsr(cpu_env, cpu_dst_32);
4887 case 0x22: /* ldqf, load quad fpreg */
4891 CHECK_FPU_FEATURE(dc, FLOAT128);
4892 r_const = tcg_const_i32(dc->mem_idx);
4893 gen_address_mask(dc, cpu_addr);
4894 gen_helper_ldqf(cpu_env, cpu_addr, r_const);
4895 tcg_temp_free_i32(r_const);
4896 gen_op_store_QT0_fpr(QFPREG(rd));
4897 gen_update_fprs_dirty(QFPREG(rd));
4900 case 0x23: /* lddf, load double fpreg */
4901 gen_address_mask(dc, cpu_addr);
4902 cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4903 tcg_gen_qemu_ld64(cpu_dst_64, cpu_addr, dc->mem_idx);
4904 gen_store_fpr_D(dc, rd, cpu_dst_64);
4909 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4910 xop == 0xe || xop == 0x1e) {
4911 TCGv cpu_val = gen_load_gpr(dc, rd);
4914 case 0x4: /* st, store word */
4915 gen_address_mask(dc, cpu_addr);
4916 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4918 case 0x5: /* stb, store byte */
4919 gen_address_mask(dc, cpu_addr);
4920 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4922 case 0x6: /* sth, store halfword */
4923 gen_address_mask(dc, cpu_addr);
4924 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4926 case 0x7: /* std, store double word */
4935 gen_address_mask(dc, cpu_addr);
4936 r_const = tcg_const_i32(7);
4937 /* XXX remove alignment check */
4938 gen_helper_check_align(cpu_env, cpu_addr, r_const);
4939 tcg_temp_free_i32(r_const);
4940 lo = gen_load_gpr(dc, rd + 1);
4942 t64 = tcg_temp_new_i64();
4943 tcg_gen_concat_tl_i64(t64, lo, cpu_val);
4944 tcg_gen_qemu_st64(t64, cpu_addr, dc->mem_idx);
4945 tcg_temp_free_i64(t64);
4948 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4949 case 0x14: /* sta, V9 stwa, store word alternate */
4950 #ifndef TARGET_SPARC64
4953 if (!supervisor(dc))
4957 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4958 dc->npc = DYNAMIC_PC;
4960 case 0x15: /* stba, store byte alternate */
4961 #ifndef TARGET_SPARC64
4964 if (!supervisor(dc))
4968 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4969 dc->npc = DYNAMIC_PC;
4971 case 0x16: /* stha, store halfword alternate */
4972 #ifndef TARGET_SPARC64
4975 if (!supervisor(dc))
4979 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4980 dc->npc = DYNAMIC_PC;
4982 case 0x17: /* stda, store double word alternate */
4983 #ifndef TARGET_SPARC64
4986 if (!supervisor(dc))
4993 gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
4997 #ifdef TARGET_SPARC64
4998 case 0x0e: /* V9 stx */
4999 gen_address_mask(dc, cpu_addr);
5000 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
5002 case 0x1e: /* V9 stxa */
5004 gen_st_asi(cpu_val, cpu_addr, insn, 8);
5005 dc->npc = DYNAMIC_PC;
5011 } else if (xop > 0x23 && xop < 0x28) {
5012 if (gen_trap_ifnofpu(dc)) {
5017 case 0x24: /* stf, store fpreg */
5019 TCGv t = get_temp_tl(dc);
5020 gen_address_mask(dc, cpu_addr);
5021 cpu_src1_32 = gen_load_fpr_F(dc, rd);
5022 tcg_gen_ext_i32_tl(t, cpu_src1_32);
5023 tcg_gen_qemu_st32(t, cpu_addr, dc->mem_idx);
5026 case 0x25: /* stfsr, V9 stxfsr */
5028 TCGv t = get_temp_tl(dc);
5030 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUSPARCState, fsr));
5031 #ifdef TARGET_SPARC64
5032 gen_address_mask(dc, cpu_addr);
5034 tcg_gen_qemu_st64(t, cpu_addr, dc->mem_idx);
5038 tcg_gen_qemu_st32(t, cpu_addr, dc->mem_idx);
5042 #ifdef TARGET_SPARC64
5043 /* V9 stqf, store quad fpreg */
5047 CHECK_FPU_FEATURE(dc, FLOAT128);
5048 gen_op_load_fpr_QT0(QFPREG(rd));
5049 r_const = tcg_const_i32(dc->mem_idx);
5050 gen_address_mask(dc, cpu_addr);
5051 gen_helper_stqf(cpu_env, cpu_addr, r_const);
5052 tcg_temp_free_i32(r_const);
5055 #else /* !TARGET_SPARC64 */
5056 /* stdfq, store floating point queue */
5057 #if defined(CONFIG_USER_ONLY)
5060 if (!supervisor(dc))
5062 if (gen_trap_ifnofpu(dc)) {
5068 case 0x27: /* stdf, store double fpreg */
5069 gen_address_mask(dc, cpu_addr);
5070 cpu_src1_64 = gen_load_fpr_D(dc, rd);
5071 tcg_gen_qemu_st64(cpu_src1_64, cpu_addr, dc->mem_idx);
5076 } else if (xop > 0x33 && xop < 0x3f) {
5079 #ifdef TARGET_SPARC64
5080 case 0x34: /* V9 stfa */
5081 if (gen_trap_ifnofpu(dc)) {
5084 gen_stf_asi(cpu_addr, insn, 4, rd);
5086 case 0x36: /* V9 stqfa */
5090 CHECK_FPU_FEATURE(dc, FLOAT128);
5091 if (gen_trap_ifnofpu(dc)) {
5094 r_const = tcg_const_i32(7);
5095 gen_helper_check_align(cpu_env, cpu_addr, r_const);
5096 tcg_temp_free_i32(r_const);
5097 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
5100 case 0x37: /* V9 stdfa */
5101 if (gen_trap_ifnofpu(dc)) {
5104 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
5106 case 0x3e: /* V9 casxa */
5107 rs2 = GET_FIELD(insn, 27, 31);
5108 cpu_src2 = gen_load_gpr(dc, rs2);
5109 gen_casx_asi(dc, cpu_addr, cpu_src2, insn, rd);
5112 case 0x34: /* stc */
5113 case 0x35: /* stcsr */
5114 case 0x36: /* stdcq */
5115 case 0x37: /* stdc */
5118 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5119 case 0x3c: /* V9 or LEON3 casa */
5120 #ifndef TARGET_SPARC64
5121 CHECK_IU_FEATURE(dc, CASA);
5125 /* LEON3 allows CASA from user space with ASI 0xa */
5126 if ((GET_FIELD(insn, 19, 26) != 0xa) && !supervisor(dc)) {
5130 rs2 = GET_FIELD(insn, 27, 31);
5131 cpu_src2 = gen_load_gpr(dc, rs2);
5132 gen_cas_asi(dc, cpu_addr, cpu_src2, insn, rd);
5144 /* default case for non jump instructions */
5145 if (dc->npc == DYNAMIC_PC) {
5146 dc->pc = DYNAMIC_PC;
5148 } else if (dc->npc == JUMP_PC) {
5149 /* we can do a static jump */
5150 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5154 dc->npc = dc->npc + 4;
5159 gen_exception(dc, TT_ILL_INSN);
5162 gen_exception(dc, TT_UNIMP_FLUSH);
5164 #if !defined(CONFIG_USER_ONLY)
5166 gen_exception(dc, TT_PRIV_INSN);
5170 gen_op_fpexception_im(dc, FSR_FTT_UNIMPFPOP);
5172 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5174 gen_op_fpexception_im(dc, FSR_FTT_SEQ_ERROR);
5177 #ifndef TARGET_SPARC64
5179 gen_exception(dc, TT_NCP_INSN);
5183 if (dc->n_t32 != 0) {
5185 for (i = dc->n_t32 - 1; i >= 0; --i) {
5186 tcg_temp_free_i32(dc->t32[i]);
5190 if (dc->n_ttl != 0) {
5192 for (i = dc->n_ttl - 1; i >= 0; --i) {
5193 tcg_temp_free(dc->ttl[i]);
5199 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5201 SPARCCPU *cpu = sparc_env_get_cpu(env);
5202 CPUState *cs = CPU(cpu);
5203 target_ulong pc_start, last_pc;
5204 DisasContext dc1, *dc = &dc1;
5209 memset(dc, 0, sizeof(DisasContext));
5214 dc->npc = (target_ulong) tb->cs_base;
5215 dc->cc_op = CC_OP_DYNAMIC;
5216 dc->mem_idx = tb->flags & TB_FLAG_MMU_MASK;
5218 dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5219 dc->address_mask_32bit = tb_am_enabled(tb->flags);
5220 dc->singlestep = (cs->singlestep_enabled || singlestep);
5223 max_insns = tb->cflags & CF_COUNT_MASK;
5224 if (max_insns == 0) {
5225 max_insns = CF_COUNT_MASK;
5227 if (max_insns > TCG_MAX_INSNS) {
5228 max_insns = TCG_MAX_INSNS;
5233 if (dc->npc & JUMP_PC) {
5234 assert(dc->jump_pc[1] == dc->pc + 4);
5235 tcg_gen_insn_start(dc->pc, dc->jump_pc[0] | JUMP_PC);
5237 tcg_gen_insn_start(dc->pc, dc->npc);
5242 if (unlikely(cpu_breakpoint_test(cs, dc->pc, BP_ANY))) {
5243 if (dc->pc != pc_start) {
5246 gen_helper_debug(cpu_env);
5252 if (num_insns == max_insns && (tb->cflags & CF_LAST_IO)) {
5256 insn = cpu_ldl_code(env, dc->pc);
5258 disas_sparc_insn(dc, insn);
5262 /* if the next PC is different, we abort now */
5263 if (dc->pc != (last_pc + 4))
5265 /* if we reach a page boundary, we stop generation so that the
5266 PC of a TT_TFAULT exception is always in the right page */
5267 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5269 /* if single step mode, we generate only one instruction and
5270 generate an exception */
5271 if (dc->singlestep) {
5274 } while (!tcg_op_buf_full() &&
5275 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5276 num_insns < max_insns);
5279 if (tb->cflags & CF_LAST_IO) {
5283 if (dc->pc != DYNAMIC_PC &&
5284 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5285 /* static PC and NPC: we can use direct chaining */
5286 gen_goto_tb(dc, 0, dc->pc, dc->npc);
5288 if (dc->pc != DYNAMIC_PC) {
5289 tcg_gen_movi_tl(cpu_pc, dc->pc);
5295 gen_tb_end(tb, num_insns);
5297 tb->size = last_pc + 4 - pc_start;
5298 tb->icount = num_insns;
5301 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)
5302 && qemu_log_in_addr_range(pc_start)) {
5303 qemu_log("--------------\n");
5304 qemu_log("IN: %s\n", lookup_symbol(pc_start));
5305 log_target_disas(cs, pc_start, last_pc + 4 - pc_start, 0);
5311 void gen_intermediate_code_init(CPUSPARCState *env)
5314 static const char gregnames[32][4] = {
5315 "g0", "g1", "g2", "g3", "g4", "g5", "g6", "g7",
5316 "o0", "o1", "o2", "o3", "o4", "o5", "o6", "o7",
5317 "l0", "l1", "l2", "l3", "l4", "l5", "l6", "l7",
5318 "i0", "i1", "i2", "i3", "i4", "i5", "i6", "i7",
5320 static const char fregnames[32][4] = {
5321 "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5322 "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5323 "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5324 "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5327 static const struct { TCGv_i32 *ptr; int off; const char *name; } r32[] = {
5328 #ifdef TARGET_SPARC64
5329 { &cpu_xcc, offsetof(CPUSPARCState, xcc), "xcc" },
5330 { &cpu_asi, offsetof(CPUSPARCState, asi), "asi" },
5331 { &cpu_fprs, offsetof(CPUSPARCState, fprs), "fprs" },
5333 { &cpu_wim, offsetof(CPUSPARCState, wim), "wim" },
5335 { &cpu_cc_op, offsetof(CPUSPARCState, cc_op), "cc_op" },
5336 { &cpu_psr, offsetof(CPUSPARCState, psr), "psr" },
5339 static const struct { TCGv *ptr; int off; const char *name; } rtl[] = {
5340 #ifdef TARGET_SPARC64
5341 { &cpu_gsr, offsetof(CPUSPARCState, gsr), "gsr" },
5342 { &cpu_tick_cmpr, offsetof(CPUSPARCState, tick_cmpr), "tick_cmpr" },
5343 { &cpu_stick_cmpr, offsetof(CPUSPARCState, stick_cmpr), "stick_cmpr" },
5344 { &cpu_hstick_cmpr, offsetof(CPUSPARCState, hstick_cmpr),
5346 { &cpu_hintp, offsetof(CPUSPARCState, hintp), "hintp" },
5347 { &cpu_htba, offsetof(CPUSPARCState, htba), "htba" },
5348 { &cpu_hver, offsetof(CPUSPARCState, hver), "hver" },
5349 { &cpu_ssr, offsetof(CPUSPARCState, ssr), "ssr" },
5350 { &cpu_ver, offsetof(CPUSPARCState, version), "ver" },
5352 { &cpu_cond, offsetof(CPUSPARCState, cond), "cond" },
5353 { &cpu_cc_src, offsetof(CPUSPARCState, cc_src), "cc_src" },
5354 { &cpu_cc_src2, offsetof(CPUSPARCState, cc_src2), "cc_src2" },
5355 { &cpu_cc_dst, offsetof(CPUSPARCState, cc_dst), "cc_dst" },
5356 { &cpu_fsr, offsetof(CPUSPARCState, fsr), "fsr" },
5357 { &cpu_pc, offsetof(CPUSPARCState, pc), "pc" },
5358 { &cpu_npc, offsetof(CPUSPARCState, npc), "npc" },
5359 { &cpu_y, offsetof(CPUSPARCState, y), "y" },
5360 #ifndef CONFIG_USER_ONLY
5361 { &cpu_tbr, offsetof(CPUSPARCState, tbr), "tbr" },
5367 /* init various static tables */
5373 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5374 tcg_ctx.tcg_env = cpu_env;
5376 cpu_regwptr = tcg_global_mem_new_ptr(cpu_env,
5377 offsetof(CPUSPARCState, regwptr),
5380 for (i = 0; i < ARRAY_SIZE(r32); ++i) {
5381 *r32[i].ptr = tcg_global_mem_new_i32(cpu_env, r32[i].off, r32[i].name);
5384 for (i = 0; i < ARRAY_SIZE(rtl); ++i) {
5385 *rtl[i].ptr = tcg_global_mem_new(cpu_env, rtl[i].off, rtl[i].name);
5388 TCGV_UNUSED(cpu_regs[0]);
5389 for (i = 1; i < 8; ++i) {
5390 cpu_regs[i] = tcg_global_mem_new(cpu_env,
5391 offsetof(CPUSPARCState, gregs[i]),
5395 for (i = 8; i < 32; ++i) {
5396 cpu_regs[i] = tcg_global_mem_new(cpu_regwptr,
5397 (i - 8) * sizeof(target_ulong),
5401 for (i = 0; i < TARGET_DPREGS; i++) {
5402 cpu_fpr[i] = tcg_global_mem_new_i64(cpu_env,
5403 offsetof(CPUSPARCState, fpr[i]),
5408 void restore_state_to_opc(CPUSPARCState *env, TranslationBlock *tb,
5411 target_ulong pc = data[0];
5412 target_ulong npc = data[1];
5415 if (npc == DYNAMIC_PC) {
5416 /* dynamic NPC: already stored */
5417 } else if (npc & JUMP_PC) {
5418 /* jump PC: use 'cond' and the jump targets of the translation */
5420 env->npc = npc & ~3;