5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
37 #define DYNAMIC_PC 1 /* dynamic pc value */
38 #define JUMP_PC 2 /* dynamic pc value which takes only two values
39 according to jump_pc[T2] */
41 /* global register indexes */
42 static TCGv_ptr cpu_env, cpu_regwptr;
43 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
44 static TCGv_i32 cpu_cc_op;
45 static TCGv_i32 cpu_psr;
46 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
48 #ifndef CONFIG_USER_ONLY
51 static TCGv cpu_cond, cpu_dst, cpu_addr, cpu_val;
53 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
55 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
56 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
57 static TCGv_i32 cpu_softint;
61 /* local register indexes (only used inside old micro ops) */
63 static TCGv_i32 cpu_tmp32;
64 static TCGv_i64 cpu_tmp64;
65 /* Floating point registers */
66 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
68 static target_ulong gen_opc_npc[OPC_BUF_SIZE];
69 static target_ulong gen_opc_jump_pc[2];
71 #include "gen-icount.h"
73 typedef struct DisasContext {
74 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
75 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
76 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
80 int address_mask_32bit;
82 uint32_t cc_op; /* current CC operation */
83 struct TranslationBlock *tb;
98 // This function uses non-native bit order
99 #define GET_FIELD(X, FROM, TO) \
100 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
102 // This function uses the order in the manuals, i.e. bit 0 is 2^0
103 #define GET_FIELD_SP(X, FROM, TO) \
104 GET_FIELD(X, 31 - (TO), 31 - (FROM))
106 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
107 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
109 #ifdef TARGET_SPARC64
110 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
111 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
113 #define DFPREG(r) (r & 0x1e)
114 #define QFPREG(r) (r & 0x1c)
117 #define UA2005_HTRAP_MASK 0xff
118 #define V8_TRAP_MASK 0x7f
120 static int sign_extend(int x, int len)
123 return (x << len) >> len;
126 #define IS_IMM (insn & (1<<13))
128 static inline void gen_update_fprs_dirty(int rd)
130 #if defined(TARGET_SPARC64)
131 tcg_gen_ori_i32(cpu_fprs, cpu_fprs, (rd < 32) ? 1 : 2);
135 /* floating point registers moves */
136 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
138 #if TCG_TARGET_REG_BITS == 32
140 return TCGV_LOW(cpu_fpr[src / 2]);
142 return TCGV_HIGH(cpu_fpr[src / 2]);
146 return MAKE_TCGV_I32(GET_TCGV_I64(cpu_fpr[src / 2]));
148 TCGv_i32 ret = tcg_temp_new_i32();
149 TCGv_i64 t = tcg_temp_new_i64();
151 tcg_gen_shri_i64(t, cpu_fpr[src / 2], 32);
152 tcg_gen_trunc_i64_i32(ret, t);
153 tcg_temp_free_i64(t);
155 dc->t32[dc->n_t32++] = ret;
156 assert(dc->n_t32 <= ARRAY_SIZE(dc->t32));
163 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
165 #if TCG_TARGET_REG_BITS == 32
167 tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
169 tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
172 TCGv_i64 t = MAKE_TCGV_I64(GET_TCGV_I32(v));
173 tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
174 (dst & 1 ? 0 : 32), 32);
176 gen_update_fprs_dirty(dst);
179 static TCGv_i32 gen_dest_fpr_F(void)
184 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
187 return cpu_fpr[src / 2];
190 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
193 tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
194 gen_update_fprs_dirty(dst);
197 static TCGv_i64 gen_dest_fpr_D(void)
202 static void gen_op_load_fpr_QT0(unsigned int src)
204 tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
205 offsetof(CPU_QuadU, ll.upper));
206 tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
207 offsetof(CPU_QuadU, ll.lower));
210 static void gen_op_load_fpr_QT1(unsigned int src)
212 tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
213 offsetof(CPU_QuadU, ll.upper));
214 tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
215 offsetof(CPU_QuadU, ll.lower));
218 static void gen_op_store_QT0_fpr(unsigned int dst)
220 tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
221 offsetof(CPU_QuadU, ll.upper));
222 tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
223 offsetof(CPU_QuadU, ll.lower));
226 #ifdef TARGET_SPARC64
227 static void gen_move_Q(unsigned int rd, unsigned int rs)
232 tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
233 tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
234 gen_update_fprs_dirty(rd);
239 #ifdef CONFIG_USER_ONLY
240 #define supervisor(dc) 0
241 #ifdef TARGET_SPARC64
242 #define hypervisor(dc) 0
245 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
246 #ifdef TARGET_SPARC64
247 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
252 #ifdef TARGET_SPARC64
254 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
256 #define AM_CHECK(dc) (1)
260 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
262 #ifdef TARGET_SPARC64
264 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
268 static inline TCGv get_temp_tl(DisasContext *dc)
271 assert(dc->n_ttl < ARRAY_SIZE(dc->ttl));
272 dc->ttl[dc->n_ttl++] = t = tcg_temp_new();
276 static inline TCGv gen_load_gpr(DisasContext *dc, int reg)
278 if (reg == 0 || reg >= 8) {
279 TCGv t = get_temp_tl(dc);
281 tcg_gen_movi_tl(t, 0);
283 tcg_gen_ld_tl(t, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
287 return cpu_gregs[reg];
291 static inline void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
295 tcg_gen_mov_tl(cpu_gregs[reg], v);
297 tcg_gen_st_tl(v, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
302 static inline TCGv gen_dest_gpr(DisasContext *dc, int reg)
304 if (reg == 0 || reg >= 8) {
305 return get_temp_tl(dc);
307 return cpu_gregs[reg];
311 static inline void gen_movl_reg_TN(int reg, TCGv tn)
314 tcg_gen_movi_tl(tn, 0);
316 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
318 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
322 static inline void gen_movl_TN_reg(int reg, TCGv tn)
327 tcg_gen_mov_tl(cpu_gregs[reg], tn);
329 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
333 static inline void gen_goto_tb(DisasContext *s, int tb_num,
334 target_ulong pc, target_ulong npc)
336 TranslationBlock *tb;
339 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
340 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
342 /* jump to same page: we can use a direct jump */
343 tcg_gen_goto_tb(tb_num);
344 tcg_gen_movi_tl(cpu_pc, pc);
345 tcg_gen_movi_tl(cpu_npc, npc);
346 tcg_gen_exit_tb((tcg_target_long)tb + tb_num);
348 /* jump to another page: currently not optimized */
349 tcg_gen_movi_tl(cpu_pc, pc);
350 tcg_gen_movi_tl(cpu_npc, npc);
356 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
358 tcg_gen_extu_i32_tl(reg, src);
359 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
360 tcg_gen_andi_tl(reg, reg, 0x1);
363 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
365 tcg_gen_extu_i32_tl(reg, src);
366 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
367 tcg_gen_andi_tl(reg, reg, 0x1);
370 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
372 tcg_gen_extu_i32_tl(reg, src);
373 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
374 tcg_gen_andi_tl(reg, reg, 0x1);
377 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
379 tcg_gen_extu_i32_tl(reg, src);
380 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
381 tcg_gen_andi_tl(reg, reg, 0x1);
384 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
386 tcg_gen_mov_tl(cpu_cc_src, src1);
387 tcg_gen_movi_tl(cpu_cc_src2, src2);
388 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
389 tcg_gen_mov_tl(dst, cpu_cc_dst);
392 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
394 tcg_gen_mov_tl(cpu_cc_src, src1);
395 tcg_gen_mov_tl(cpu_cc_src2, src2);
396 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
397 tcg_gen_mov_tl(dst, cpu_cc_dst);
400 static TCGv_i32 gen_add32_carry32(void)
402 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
404 /* Carry is computed from a previous add: (dst < src) */
405 #if TARGET_LONG_BITS == 64
406 cc_src1_32 = tcg_temp_new_i32();
407 cc_src2_32 = tcg_temp_new_i32();
408 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_dst);
409 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src);
411 cc_src1_32 = cpu_cc_dst;
412 cc_src2_32 = cpu_cc_src;
415 carry_32 = tcg_temp_new_i32();
416 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
418 #if TARGET_LONG_BITS == 64
419 tcg_temp_free_i32(cc_src1_32);
420 tcg_temp_free_i32(cc_src2_32);
426 static TCGv_i32 gen_sub32_carry32(void)
428 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
430 /* Carry is computed from a previous borrow: (src1 < src2) */
431 #if TARGET_LONG_BITS == 64
432 cc_src1_32 = tcg_temp_new_i32();
433 cc_src2_32 = tcg_temp_new_i32();
434 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_src);
435 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src2);
437 cc_src1_32 = cpu_cc_src;
438 cc_src2_32 = cpu_cc_src2;
441 carry_32 = tcg_temp_new_i32();
442 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
444 #if TARGET_LONG_BITS == 64
445 tcg_temp_free_i32(cc_src1_32);
446 tcg_temp_free_i32(cc_src2_32);
452 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
453 TCGv src2, int update_cc)
461 /* Carry is known to be zero. Fall back to plain ADD. */
463 gen_op_add_cc(dst, src1, src2);
465 tcg_gen_add_tl(dst, src1, src2);
472 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
474 /* For 32-bit hosts, we can re-use the host's hardware carry
475 generation by using an ADD2 opcode. We discard the low
476 part of the output. Ideally we'd combine this operation
477 with the add that generated the carry in the first place. */
478 TCGv dst_low = tcg_temp_new();
479 tcg_gen_op6_i32(INDEX_op_add2_i32, dst_low, dst,
480 cpu_cc_src, src1, cpu_cc_src2, src2);
481 tcg_temp_free(dst_low);
485 carry_32 = gen_add32_carry32();
491 carry_32 = gen_sub32_carry32();
495 /* We need external help to produce the carry. */
496 carry_32 = tcg_temp_new_i32();
497 gen_helper_compute_C_icc(carry_32, cpu_env);
501 #if TARGET_LONG_BITS == 64
502 carry = tcg_temp_new();
503 tcg_gen_extu_i32_i64(carry, carry_32);
508 tcg_gen_add_tl(dst, src1, src2);
509 tcg_gen_add_tl(dst, dst, carry);
511 tcg_temp_free_i32(carry_32);
512 #if TARGET_LONG_BITS == 64
513 tcg_temp_free(carry);
516 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
520 tcg_gen_mov_tl(cpu_cc_src, src1);
521 tcg_gen_mov_tl(cpu_cc_src2, src2);
522 tcg_gen_mov_tl(cpu_cc_dst, dst);
523 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
524 dc->cc_op = CC_OP_ADDX;
528 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
530 tcg_gen_mov_tl(cpu_cc_src, src1);
531 tcg_gen_movi_tl(cpu_cc_src2, src2);
533 tcg_gen_mov_tl(cpu_cc_dst, src1);
534 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
535 dc->cc_op = CC_OP_LOGIC;
537 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
538 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
539 dc->cc_op = CC_OP_SUB;
541 tcg_gen_mov_tl(dst, cpu_cc_dst);
544 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
546 tcg_gen_mov_tl(cpu_cc_src, src1);
547 tcg_gen_mov_tl(cpu_cc_src2, src2);
548 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
549 tcg_gen_mov_tl(dst, cpu_cc_dst);
552 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
553 TCGv src2, int update_cc)
561 /* Carry is known to be zero. Fall back to plain SUB. */
563 gen_op_sub_cc(dst, src1, src2);
565 tcg_gen_sub_tl(dst, src1, src2);
572 carry_32 = gen_add32_carry32();
578 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
580 /* For 32-bit hosts, we can re-use the host's hardware carry
581 generation by using a SUB2 opcode. We discard the low
582 part of the output. Ideally we'd combine this operation
583 with the add that generated the carry in the first place. */
584 TCGv dst_low = tcg_temp_new();
585 tcg_gen_op6_i32(INDEX_op_sub2_i32, dst_low, dst,
586 cpu_cc_src, src1, cpu_cc_src2, src2);
587 tcg_temp_free(dst_low);
591 carry_32 = gen_sub32_carry32();
595 /* We need external help to produce the carry. */
596 carry_32 = tcg_temp_new_i32();
597 gen_helper_compute_C_icc(carry_32, cpu_env);
601 #if TARGET_LONG_BITS == 64
602 carry = tcg_temp_new();
603 tcg_gen_extu_i32_i64(carry, carry_32);
608 tcg_gen_sub_tl(dst, src1, src2);
609 tcg_gen_sub_tl(dst, dst, carry);
611 tcg_temp_free_i32(carry_32);
612 #if TARGET_LONG_BITS == 64
613 tcg_temp_free(carry);
616 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
620 tcg_gen_mov_tl(cpu_cc_src, src1);
621 tcg_gen_mov_tl(cpu_cc_src2, src2);
622 tcg_gen_mov_tl(cpu_cc_dst, dst);
623 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
624 dc->cc_op = CC_OP_SUBX;
628 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
632 r_temp = tcg_temp_new();
638 zero = tcg_const_tl(0);
639 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
640 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
641 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
642 tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
647 // env->y = (b2 << 31) | (env->y >> 1);
648 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
649 tcg_gen_shli_tl(r_temp, r_temp, 31);
650 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
651 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
652 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
653 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
656 gen_mov_reg_N(cpu_tmp0, cpu_psr);
657 gen_mov_reg_V(r_temp, cpu_psr);
658 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
659 tcg_temp_free(r_temp);
661 // T0 = (b1 << 31) | (T0 >> 1);
663 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
664 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
665 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
667 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
669 tcg_gen_mov_tl(dst, cpu_cc_dst);
672 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
674 TCGv_i32 r_src1, r_src2;
675 TCGv_i64 r_temp, r_temp2;
677 r_src1 = tcg_temp_new_i32();
678 r_src2 = tcg_temp_new_i32();
680 tcg_gen_trunc_tl_i32(r_src1, src1);
681 tcg_gen_trunc_tl_i32(r_src2, src2);
683 r_temp = tcg_temp_new_i64();
684 r_temp2 = tcg_temp_new_i64();
687 tcg_gen_ext_i32_i64(r_temp, r_src2);
688 tcg_gen_ext_i32_i64(r_temp2, r_src1);
690 tcg_gen_extu_i32_i64(r_temp, r_src2);
691 tcg_gen_extu_i32_i64(r_temp2, r_src1);
694 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
696 tcg_gen_shri_i64(r_temp, r_temp2, 32);
697 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
698 tcg_temp_free_i64(r_temp);
699 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
701 tcg_gen_trunc_i64_tl(dst, r_temp2);
703 tcg_temp_free_i64(r_temp2);
705 tcg_temp_free_i32(r_src1);
706 tcg_temp_free_i32(r_src2);
709 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
711 /* zero-extend truncated operands before multiplication */
712 gen_op_multiply(dst, src1, src2, 0);
715 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
717 /* sign-extend truncated operands before multiplication */
718 gen_op_multiply(dst, src1, src2, 1);
722 static inline void gen_op_eval_ba(TCGv dst)
724 tcg_gen_movi_tl(dst, 1);
728 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
730 gen_mov_reg_Z(dst, src);
734 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
736 gen_mov_reg_N(cpu_tmp0, src);
737 gen_mov_reg_V(dst, src);
738 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
739 gen_mov_reg_Z(cpu_tmp0, src);
740 tcg_gen_or_tl(dst, dst, cpu_tmp0);
744 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
746 gen_mov_reg_V(cpu_tmp0, src);
747 gen_mov_reg_N(dst, src);
748 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
752 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
754 gen_mov_reg_Z(cpu_tmp0, src);
755 gen_mov_reg_C(dst, src);
756 tcg_gen_or_tl(dst, dst, cpu_tmp0);
760 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
762 gen_mov_reg_C(dst, src);
766 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
768 gen_mov_reg_V(dst, src);
772 static inline void gen_op_eval_bn(TCGv dst)
774 tcg_gen_movi_tl(dst, 0);
778 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
780 gen_mov_reg_N(dst, src);
784 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
786 gen_mov_reg_Z(dst, src);
787 tcg_gen_xori_tl(dst, dst, 0x1);
791 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
793 gen_mov_reg_N(cpu_tmp0, src);
794 gen_mov_reg_V(dst, src);
795 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
796 gen_mov_reg_Z(cpu_tmp0, src);
797 tcg_gen_or_tl(dst, dst, cpu_tmp0);
798 tcg_gen_xori_tl(dst, dst, 0x1);
802 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
804 gen_mov_reg_V(cpu_tmp0, src);
805 gen_mov_reg_N(dst, src);
806 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
807 tcg_gen_xori_tl(dst, dst, 0x1);
811 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
813 gen_mov_reg_Z(cpu_tmp0, src);
814 gen_mov_reg_C(dst, src);
815 tcg_gen_or_tl(dst, dst, cpu_tmp0);
816 tcg_gen_xori_tl(dst, dst, 0x1);
820 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
822 gen_mov_reg_C(dst, src);
823 tcg_gen_xori_tl(dst, dst, 0x1);
827 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
829 gen_mov_reg_N(dst, src);
830 tcg_gen_xori_tl(dst, dst, 0x1);
834 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
836 gen_mov_reg_V(dst, src);
837 tcg_gen_xori_tl(dst, dst, 0x1);
841 FPSR bit field FCC1 | FCC0:
847 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
848 unsigned int fcc_offset)
850 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
851 tcg_gen_andi_tl(reg, reg, 0x1);
854 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
855 unsigned int fcc_offset)
857 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
858 tcg_gen_andi_tl(reg, reg, 0x1);
862 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
863 unsigned int fcc_offset)
865 gen_mov_reg_FCC0(dst, src, fcc_offset);
866 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
867 tcg_gen_or_tl(dst, dst, cpu_tmp0);
870 // 1 or 2: FCC0 ^ FCC1
871 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
872 unsigned int fcc_offset)
874 gen_mov_reg_FCC0(dst, src, fcc_offset);
875 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
876 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
880 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
881 unsigned int fcc_offset)
883 gen_mov_reg_FCC0(dst, src, fcc_offset);
887 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
888 unsigned int fcc_offset)
890 gen_mov_reg_FCC0(dst, src, fcc_offset);
891 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
892 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
893 tcg_gen_and_tl(dst, dst, cpu_tmp0);
897 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
898 unsigned int fcc_offset)
900 gen_mov_reg_FCC1(dst, src, fcc_offset);
904 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
905 unsigned int fcc_offset)
907 gen_mov_reg_FCC0(dst, src, fcc_offset);
908 tcg_gen_xori_tl(dst, dst, 0x1);
909 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
910 tcg_gen_and_tl(dst, dst, cpu_tmp0);
914 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
915 unsigned int fcc_offset)
917 gen_mov_reg_FCC0(dst, src, fcc_offset);
918 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
919 tcg_gen_and_tl(dst, dst, cpu_tmp0);
923 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
924 unsigned int fcc_offset)
926 gen_mov_reg_FCC0(dst, src, fcc_offset);
927 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
928 tcg_gen_or_tl(dst, dst, cpu_tmp0);
929 tcg_gen_xori_tl(dst, dst, 0x1);
932 // 0 or 3: !(FCC0 ^ FCC1)
933 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
934 unsigned int fcc_offset)
936 gen_mov_reg_FCC0(dst, src, fcc_offset);
937 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
938 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
939 tcg_gen_xori_tl(dst, dst, 0x1);
943 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
944 unsigned int fcc_offset)
946 gen_mov_reg_FCC0(dst, src, fcc_offset);
947 tcg_gen_xori_tl(dst, dst, 0x1);
950 // !1: !(FCC0 & !FCC1)
951 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
952 unsigned int fcc_offset)
954 gen_mov_reg_FCC0(dst, src, fcc_offset);
955 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
956 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
957 tcg_gen_and_tl(dst, dst, cpu_tmp0);
958 tcg_gen_xori_tl(dst, dst, 0x1);
962 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
963 unsigned int fcc_offset)
965 gen_mov_reg_FCC1(dst, src, fcc_offset);
966 tcg_gen_xori_tl(dst, dst, 0x1);
969 // !2: !(!FCC0 & FCC1)
970 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
971 unsigned int fcc_offset)
973 gen_mov_reg_FCC0(dst, src, fcc_offset);
974 tcg_gen_xori_tl(dst, dst, 0x1);
975 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
976 tcg_gen_and_tl(dst, dst, cpu_tmp0);
977 tcg_gen_xori_tl(dst, dst, 0x1);
980 // !3: !(FCC0 & FCC1)
981 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
982 unsigned int fcc_offset)
984 gen_mov_reg_FCC0(dst, src, fcc_offset);
985 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
986 tcg_gen_and_tl(dst, dst, cpu_tmp0);
987 tcg_gen_xori_tl(dst, dst, 0x1);
990 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
991 target_ulong pc2, TCGv r_cond)
995 l1 = gen_new_label();
997 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
999 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1002 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1005 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1006 target_ulong pc2, TCGv r_cond)
1010 l1 = gen_new_label();
1012 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1014 gen_goto_tb(dc, 0, pc2, pc1);
1017 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1020 static inline void gen_generic_branch(DisasContext *dc)
1022 TCGv npc0 = tcg_const_tl(dc->jump_pc[0]);
1023 TCGv npc1 = tcg_const_tl(dc->jump_pc[1]);
1024 TCGv zero = tcg_const_tl(0);
1026 tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
1028 tcg_temp_free(npc0);
1029 tcg_temp_free(npc1);
1030 tcg_temp_free(zero);
1033 /* call this function before using the condition register as it may
1034 have been set for a jump */
1035 static inline void flush_cond(DisasContext *dc)
1037 if (dc->npc == JUMP_PC) {
1038 gen_generic_branch(dc);
1039 dc->npc = DYNAMIC_PC;
1043 static inline void save_npc(DisasContext *dc)
1045 if (dc->npc == JUMP_PC) {
1046 gen_generic_branch(dc);
1047 dc->npc = DYNAMIC_PC;
1048 } else if (dc->npc != DYNAMIC_PC) {
1049 tcg_gen_movi_tl(cpu_npc, dc->npc);
1053 static inline void update_psr(DisasContext *dc)
1055 if (dc->cc_op != CC_OP_FLAGS) {
1056 dc->cc_op = CC_OP_FLAGS;
1057 gen_helper_compute_psr(cpu_env);
1061 static inline void save_state(DisasContext *dc)
1063 tcg_gen_movi_tl(cpu_pc, dc->pc);
1067 static inline void gen_mov_pc_npc(DisasContext *dc)
1069 if (dc->npc == JUMP_PC) {
1070 gen_generic_branch(dc);
1071 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1072 dc->pc = DYNAMIC_PC;
1073 } else if (dc->npc == DYNAMIC_PC) {
1074 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1075 dc->pc = DYNAMIC_PC;
1081 static inline void gen_op_next_insn(void)
1083 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1084 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1087 static void free_compare(DisasCompare *cmp)
1090 tcg_temp_free(cmp->c1);
1093 tcg_temp_free(cmp->c2);
1097 static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1100 static int subcc_cond[16] = {
1116 -1, /* no overflow */
1119 static int logic_cond[16] = {
1121 TCG_COND_EQ, /* eq: Z */
1122 TCG_COND_LE, /* le: Z | (N ^ V) -> Z | N */
1123 TCG_COND_LT, /* lt: N ^ V -> N */
1124 TCG_COND_EQ, /* leu: C | Z -> Z */
1125 TCG_COND_NEVER, /* ltu: C -> 0 */
1126 TCG_COND_LT, /* neg: N */
1127 TCG_COND_NEVER, /* vs: V -> 0 */
1129 TCG_COND_NE, /* ne: !Z */
1130 TCG_COND_GT, /* gt: !(Z | (N ^ V)) -> !(Z | N) */
1131 TCG_COND_GE, /* ge: !(N ^ V) -> !N */
1132 TCG_COND_NE, /* gtu: !(C | Z) -> !Z */
1133 TCG_COND_ALWAYS, /* geu: !C -> 1 */
1134 TCG_COND_GE, /* pos: !N */
1135 TCG_COND_ALWAYS, /* vc: !V -> 1 */
1141 #ifdef TARGET_SPARC64
1151 switch (dc->cc_op) {
1153 cmp->cond = logic_cond[cond];
1155 cmp->is_bool = false;
1157 cmp->c2 = tcg_const_tl(0);
1158 #ifdef TARGET_SPARC64
1161 cmp->c1 = tcg_temp_new();
1162 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1167 cmp->c1 = cpu_cc_dst;
1174 cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1175 goto do_compare_dst_0;
1177 case 7: /* overflow */
1178 case 15: /* !overflow */
1182 cmp->cond = subcc_cond[cond];
1183 cmp->is_bool = false;
1184 #ifdef TARGET_SPARC64
1186 /* Note that sign-extension works for unsigned compares as
1187 long as both operands are sign-extended. */
1188 cmp->g1 = cmp->g2 = false;
1189 cmp->c1 = tcg_temp_new();
1190 cmp->c2 = tcg_temp_new();
1191 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1192 tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1196 cmp->g1 = cmp->g2 = true;
1197 cmp->c1 = cpu_cc_src;
1198 cmp->c2 = cpu_cc_src2;
1205 gen_helper_compute_psr(cpu_env);
1206 dc->cc_op = CC_OP_FLAGS;
1210 /* We're going to generate a boolean result. */
1211 cmp->cond = TCG_COND_NE;
1212 cmp->is_bool = true;
1213 cmp->g1 = cmp->g2 = false;
1214 cmp->c1 = r_dst = tcg_temp_new();
1215 cmp->c2 = tcg_const_tl(0);
1219 gen_op_eval_bn(r_dst);
1222 gen_op_eval_be(r_dst, r_src);
1225 gen_op_eval_ble(r_dst, r_src);
1228 gen_op_eval_bl(r_dst, r_src);
1231 gen_op_eval_bleu(r_dst, r_src);
1234 gen_op_eval_bcs(r_dst, r_src);
1237 gen_op_eval_bneg(r_dst, r_src);
1240 gen_op_eval_bvs(r_dst, r_src);
1243 gen_op_eval_ba(r_dst);
1246 gen_op_eval_bne(r_dst, r_src);
1249 gen_op_eval_bg(r_dst, r_src);
1252 gen_op_eval_bge(r_dst, r_src);
1255 gen_op_eval_bgu(r_dst, r_src);
1258 gen_op_eval_bcc(r_dst, r_src);
1261 gen_op_eval_bpos(r_dst, r_src);
1264 gen_op_eval_bvc(r_dst, r_src);
1271 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1273 unsigned int offset;
1276 /* For now we still generate a straight boolean result. */
1277 cmp->cond = TCG_COND_NE;
1278 cmp->is_bool = true;
1279 cmp->g1 = cmp->g2 = false;
1280 cmp->c1 = r_dst = tcg_temp_new();
1281 cmp->c2 = tcg_const_tl(0);
1301 gen_op_eval_bn(r_dst);
1304 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1307 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1310 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1313 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1316 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1319 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1322 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1325 gen_op_eval_ba(r_dst);
1328 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1331 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1334 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1337 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1340 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1343 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1346 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1351 static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1355 gen_compare(&cmp, cc, cond, dc);
1357 /* The interface is to return a boolean in r_dst. */
1359 tcg_gen_mov_tl(r_dst, cmp.c1);
1361 tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1367 static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1370 gen_fcompare(&cmp, cc, cond);
1372 /* The interface is to return a boolean in r_dst. */
1374 tcg_gen_mov_tl(r_dst, cmp.c1);
1376 tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1382 #ifdef TARGET_SPARC64
1384 static const int gen_tcg_cond_reg[8] = {
1395 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1397 cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1398 cmp->is_bool = false;
1402 cmp->c2 = tcg_const_tl(0);
1405 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1408 gen_compare_reg(&cmp, cond, r_src);
1410 /* The interface is to return a boolean in r_dst. */
1411 tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1417 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1419 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1420 target_ulong target = dc->pc + offset;
1422 #ifdef TARGET_SPARC64
1423 if (unlikely(AM_CHECK(dc))) {
1424 target &= 0xffffffffULL;
1428 /* unconditional not taken */
1430 dc->pc = dc->npc + 4;
1431 dc->npc = dc->pc + 4;
1434 dc->npc = dc->pc + 4;
1436 } else if (cond == 0x8) {
1437 /* unconditional taken */
1440 dc->npc = dc->pc + 4;
1444 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1448 gen_cond(cpu_cond, cc, cond, dc);
1450 gen_branch_a(dc, target, dc->npc, cpu_cond);
1454 dc->jump_pc[0] = target;
1455 if (unlikely(dc->npc == DYNAMIC_PC)) {
1456 dc->jump_pc[1] = DYNAMIC_PC;
1457 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1459 dc->jump_pc[1] = dc->npc + 4;
1466 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1468 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1469 target_ulong target = dc->pc + offset;
1471 #ifdef TARGET_SPARC64
1472 if (unlikely(AM_CHECK(dc))) {
1473 target &= 0xffffffffULL;
1477 /* unconditional not taken */
1479 dc->pc = dc->npc + 4;
1480 dc->npc = dc->pc + 4;
1483 dc->npc = dc->pc + 4;
1485 } else if (cond == 0x8) {
1486 /* unconditional taken */
1489 dc->npc = dc->pc + 4;
1493 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1497 gen_fcond(cpu_cond, cc, cond);
1499 gen_branch_a(dc, target, dc->npc, cpu_cond);
1503 dc->jump_pc[0] = target;
1504 if (unlikely(dc->npc == DYNAMIC_PC)) {
1505 dc->jump_pc[1] = DYNAMIC_PC;
1506 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1508 dc->jump_pc[1] = dc->npc + 4;
1515 #ifdef TARGET_SPARC64
1516 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1519 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1520 target_ulong target = dc->pc + offset;
1522 if (unlikely(AM_CHECK(dc))) {
1523 target &= 0xffffffffULL;
1526 gen_cond_reg(cpu_cond, cond, r_reg);
1528 gen_branch_a(dc, target, dc->npc, cpu_cond);
1532 dc->jump_pc[0] = target;
1533 if (unlikely(dc->npc == DYNAMIC_PC)) {
1534 dc->jump_pc[1] = DYNAMIC_PC;
1535 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1537 dc->jump_pc[1] = dc->npc + 4;
1543 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1547 gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1550 gen_helper_fcmps_fcc1(cpu_env, r_rs1, r_rs2);
1553 gen_helper_fcmps_fcc2(cpu_env, r_rs1, r_rs2);
1556 gen_helper_fcmps_fcc3(cpu_env, r_rs1, r_rs2);
1561 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1565 gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1568 gen_helper_fcmpd_fcc1(cpu_env, r_rs1, r_rs2);
1571 gen_helper_fcmpd_fcc2(cpu_env, r_rs1, r_rs2);
1574 gen_helper_fcmpd_fcc3(cpu_env, r_rs1, r_rs2);
1579 static inline void gen_op_fcmpq(int fccno)
1583 gen_helper_fcmpq(cpu_env);
1586 gen_helper_fcmpq_fcc1(cpu_env);
1589 gen_helper_fcmpq_fcc2(cpu_env);
1592 gen_helper_fcmpq_fcc3(cpu_env);
1597 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1601 gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1604 gen_helper_fcmpes_fcc1(cpu_env, r_rs1, r_rs2);
1607 gen_helper_fcmpes_fcc2(cpu_env, r_rs1, r_rs2);
1610 gen_helper_fcmpes_fcc3(cpu_env, r_rs1, r_rs2);
1615 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1619 gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1622 gen_helper_fcmped_fcc1(cpu_env, r_rs1, r_rs2);
1625 gen_helper_fcmped_fcc2(cpu_env, r_rs1, r_rs2);
1628 gen_helper_fcmped_fcc3(cpu_env, r_rs1, r_rs2);
1633 static inline void gen_op_fcmpeq(int fccno)
1637 gen_helper_fcmpeq(cpu_env);
1640 gen_helper_fcmpeq_fcc1(cpu_env);
1643 gen_helper_fcmpeq_fcc2(cpu_env);
1646 gen_helper_fcmpeq_fcc3(cpu_env);
1653 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1655 gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1658 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1660 gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1663 static inline void gen_op_fcmpq(int fccno)
1665 gen_helper_fcmpq(cpu_env);
1668 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1670 gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1673 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1675 gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1678 static inline void gen_op_fcmpeq(int fccno)
1680 gen_helper_fcmpeq(cpu_env);
1684 static inline void gen_op_fpexception_im(int fsr_flags)
1688 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1689 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1690 r_const = tcg_const_i32(TT_FP_EXCP);
1691 gen_helper_raise_exception(cpu_env, r_const);
1692 tcg_temp_free_i32(r_const);
1695 static int gen_trap_ifnofpu(DisasContext *dc)
1697 #if !defined(CONFIG_USER_ONLY)
1698 if (!dc->fpu_enabled) {
1702 r_const = tcg_const_i32(TT_NFPU_INSN);
1703 gen_helper_raise_exception(cpu_env, r_const);
1704 tcg_temp_free_i32(r_const);
1712 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1714 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1717 static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1718 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1722 src = gen_load_fpr_F(dc, rs);
1723 dst = gen_dest_fpr_F();
1725 gen(dst, cpu_env, src);
1727 gen_store_fpr_F(dc, rd, dst);
1730 static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1731 void (*gen)(TCGv_i32, TCGv_i32))
1735 src = gen_load_fpr_F(dc, rs);
1736 dst = gen_dest_fpr_F();
1740 gen_store_fpr_F(dc, rd, dst);
1743 static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1744 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1746 TCGv_i32 dst, src1, src2;
1748 src1 = gen_load_fpr_F(dc, rs1);
1749 src2 = gen_load_fpr_F(dc, rs2);
1750 dst = gen_dest_fpr_F();
1752 gen(dst, cpu_env, src1, src2);
1754 gen_store_fpr_F(dc, rd, dst);
1757 #ifdef TARGET_SPARC64
1758 static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1759 void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1761 TCGv_i32 dst, src1, src2;
1763 src1 = gen_load_fpr_F(dc, rs1);
1764 src2 = gen_load_fpr_F(dc, rs2);
1765 dst = gen_dest_fpr_F();
1767 gen(dst, src1, src2);
1769 gen_store_fpr_F(dc, rd, dst);
1773 static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1774 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1778 src = gen_load_fpr_D(dc, rs);
1779 dst = gen_dest_fpr_D();
1781 gen(dst, cpu_env, src);
1783 gen_store_fpr_D(dc, rd, dst);
1786 #ifdef TARGET_SPARC64
1787 static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1788 void (*gen)(TCGv_i64, TCGv_i64))
1792 src = gen_load_fpr_D(dc, rs);
1793 dst = gen_dest_fpr_D();
1797 gen_store_fpr_D(dc, rd, dst);
1801 static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1802 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1804 TCGv_i64 dst, src1, src2;
1806 src1 = gen_load_fpr_D(dc, rs1);
1807 src2 = gen_load_fpr_D(dc, rs2);
1808 dst = gen_dest_fpr_D();
1810 gen(dst, cpu_env, src1, src2);
1812 gen_store_fpr_D(dc, rd, dst);
1815 #ifdef TARGET_SPARC64
1816 static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1817 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1819 TCGv_i64 dst, src1, src2;
1821 src1 = gen_load_fpr_D(dc, rs1);
1822 src2 = gen_load_fpr_D(dc, rs2);
1823 dst = gen_dest_fpr_D();
1825 gen(dst, src1, src2);
1827 gen_store_fpr_D(dc, rd, dst);
1830 static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1831 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1833 TCGv_i64 dst, src1, src2;
1835 src1 = gen_load_fpr_D(dc, rs1);
1836 src2 = gen_load_fpr_D(dc, rs2);
1837 dst = gen_dest_fpr_D();
1839 gen(dst, cpu_gsr, src1, src2);
1841 gen_store_fpr_D(dc, rd, dst);
1844 static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1845 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1847 TCGv_i64 dst, src0, src1, src2;
1849 src1 = gen_load_fpr_D(dc, rs1);
1850 src2 = gen_load_fpr_D(dc, rs2);
1851 src0 = gen_load_fpr_D(dc, rd);
1852 dst = gen_dest_fpr_D();
1854 gen(dst, src0, src1, src2);
1856 gen_store_fpr_D(dc, rd, dst);
1860 static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1861 void (*gen)(TCGv_ptr))
1863 gen_op_load_fpr_QT1(QFPREG(rs));
1867 gen_op_store_QT0_fpr(QFPREG(rd));
1868 gen_update_fprs_dirty(QFPREG(rd));
1871 #ifdef TARGET_SPARC64
1872 static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1873 void (*gen)(TCGv_ptr))
1875 gen_op_load_fpr_QT1(QFPREG(rs));
1879 gen_op_store_QT0_fpr(QFPREG(rd));
1880 gen_update_fprs_dirty(QFPREG(rd));
1884 static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1885 void (*gen)(TCGv_ptr))
1887 gen_op_load_fpr_QT0(QFPREG(rs1));
1888 gen_op_load_fpr_QT1(QFPREG(rs2));
1892 gen_op_store_QT0_fpr(QFPREG(rd));
1893 gen_update_fprs_dirty(QFPREG(rd));
1896 static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1897 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1900 TCGv_i32 src1, src2;
1902 src1 = gen_load_fpr_F(dc, rs1);
1903 src2 = gen_load_fpr_F(dc, rs2);
1904 dst = gen_dest_fpr_D();
1906 gen(dst, cpu_env, src1, src2);
1908 gen_store_fpr_D(dc, rd, dst);
1911 static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1912 void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1914 TCGv_i64 src1, src2;
1916 src1 = gen_load_fpr_D(dc, rs1);
1917 src2 = gen_load_fpr_D(dc, rs2);
1919 gen(cpu_env, src1, src2);
1921 gen_op_store_QT0_fpr(QFPREG(rd));
1922 gen_update_fprs_dirty(QFPREG(rd));
1925 #ifdef TARGET_SPARC64
1926 static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1927 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1932 src = gen_load_fpr_F(dc, rs);
1933 dst = gen_dest_fpr_D();
1935 gen(dst, cpu_env, src);
1937 gen_store_fpr_D(dc, rd, dst);
1941 static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1942 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1947 src = gen_load_fpr_F(dc, rs);
1948 dst = gen_dest_fpr_D();
1950 gen(dst, cpu_env, src);
1952 gen_store_fpr_D(dc, rd, dst);
1955 static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1956 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1961 src = gen_load_fpr_D(dc, rs);
1962 dst = gen_dest_fpr_F();
1964 gen(dst, cpu_env, src);
1966 gen_store_fpr_F(dc, rd, dst);
1969 static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1970 void (*gen)(TCGv_i32, TCGv_ptr))
1974 gen_op_load_fpr_QT1(QFPREG(rs));
1975 dst = gen_dest_fpr_F();
1979 gen_store_fpr_F(dc, rd, dst);
1982 static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1983 void (*gen)(TCGv_i64, TCGv_ptr))
1987 gen_op_load_fpr_QT1(QFPREG(rs));
1988 dst = gen_dest_fpr_D();
1992 gen_store_fpr_D(dc, rd, dst);
1995 static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1996 void (*gen)(TCGv_ptr, TCGv_i32))
2000 src = gen_load_fpr_F(dc, rs);
2004 gen_op_store_QT0_fpr(QFPREG(rd));
2005 gen_update_fprs_dirty(QFPREG(rd));
2008 static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
2009 void (*gen)(TCGv_ptr, TCGv_i64))
2013 src = gen_load_fpr_D(dc, rs);
2017 gen_op_store_QT0_fpr(QFPREG(rd));
2018 gen_update_fprs_dirty(QFPREG(rd));
2022 #ifdef TARGET_SPARC64
2023 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
2029 r_asi = tcg_temp_new_i32();
2030 tcg_gen_mov_i32(r_asi, cpu_asi);
2032 asi = GET_FIELD(insn, 19, 26);
2033 r_asi = tcg_const_i32(asi);
2038 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
2041 TCGv_i32 r_asi, r_size, r_sign;
2043 r_asi = gen_get_asi(insn, addr);
2044 r_size = tcg_const_i32(size);
2045 r_sign = tcg_const_i32(sign);
2046 gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_size, r_sign);
2047 tcg_temp_free_i32(r_sign);
2048 tcg_temp_free_i32(r_size);
2049 tcg_temp_free_i32(r_asi);
2052 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2054 TCGv_i32 r_asi, r_size;
2056 r_asi = gen_get_asi(insn, addr);
2057 r_size = tcg_const_i32(size);
2058 gen_helper_st_asi(cpu_env, addr, src, r_asi, r_size);
2059 tcg_temp_free_i32(r_size);
2060 tcg_temp_free_i32(r_asi);
2063 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
2065 TCGv_i32 r_asi, r_size, r_rd;
2067 r_asi = gen_get_asi(insn, addr);
2068 r_size = tcg_const_i32(size);
2069 r_rd = tcg_const_i32(rd);
2070 gen_helper_ldf_asi(cpu_env, addr, r_asi, r_size, r_rd);
2071 tcg_temp_free_i32(r_rd);
2072 tcg_temp_free_i32(r_size);
2073 tcg_temp_free_i32(r_asi);
2076 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
2078 TCGv_i32 r_asi, r_size, r_rd;
2080 r_asi = gen_get_asi(insn, addr);
2081 r_size = tcg_const_i32(size);
2082 r_rd = tcg_const_i32(rd);
2083 gen_helper_stf_asi(cpu_env, addr, r_asi, r_size, r_rd);
2084 tcg_temp_free_i32(r_rd);
2085 tcg_temp_free_i32(r_size);
2086 tcg_temp_free_i32(r_asi);
2089 static inline void gen_swap_asi(TCGv dst, TCGv src, TCGv addr, int insn)
2091 TCGv_i32 r_asi, r_size, r_sign;
2093 r_asi = gen_get_asi(insn, addr);
2094 r_size = tcg_const_i32(4);
2095 r_sign = tcg_const_i32(0);
2096 gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2097 tcg_temp_free_i32(r_sign);
2098 gen_helper_st_asi(cpu_env, addr, src, r_asi, r_size);
2099 tcg_temp_free_i32(r_size);
2100 tcg_temp_free_i32(r_asi);
2101 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2104 static inline void gen_ldda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2107 TCGv_i32 r_asi, r_rd;
2109 r_asi = gen_get_asi(insn, addr);
2110 r_rd = tcg_const_i32(rd);
2111 gen_helper_ldda_asi(cpu_env, addr, r_asi, r_rd);
2112 tcg_temp_free_i32(r_rd);
2113 tcg_temp_free_i32(r_asi);
2116 static inline void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2119 TCGv_i32 r_asi, r_size;
2120 TCGv lo = gen_load_gpr(dc, rd + 1);
2122 tcg_gen_concat_tl_i64(cpu_tmp64, lo, hi);
2123 r_asi = gen_get_asi(insn, addr);
2124 r_size = tcg_const_i32(8);
2125 gen_helper_st_asi(cpu_env, addr, cpu_tmp64, r_asi, r_size);
2126 tcg_temp_free_i32(r_size);
2127 tcg_temp_free_i32(r_asi);
2130 static inline void gen_cas_asi(DisasContext *dc, TCGv dst, TCGv addr,
2131 TCGv val2, int insn, int rd)
2133 TCGv r_val1 = gen_load_gpr(dc, rd);
2134 TCGv_i32 r_asi = gen_get_asi(insn, addr);
2136 gen_helper_cas_asi(dst, cpu_env, addr, r_val1, val2, r_asi);
2137 tcg_temp_free_i32(r_asi);
2140 static inline void gen_casx_asi(DisasContext *dc, TCGv dst, TCGv addr,
2141 TCGv val2, int insn, int rd)
2143 TCGv r_val1 = gen_load_gpr(dc, rd);
2144 TCGv_i32 r_asi = gen_get_asi(insn, addr);
2146 gen_helper_casx_asi(dst, cpu_env, addr, r_val1, val2, r_asi);
2147 tcg_temp_free_i32(r_asi);
2150 #elif !defined(CONFIG_USER_ONLY)
2152 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
2155 TCGv_i32 r_asi, r_size, r_sign;
2157 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2158 r_size = tcg_const_i32(size);
2159 r_sign = tcg_const_i32(sign);
2160 gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2161 tcg_temp_free(r_sign);
2162 tcg_temp_free(r_size);
2163 tcg_temp_free(r_asi);
2164 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2167 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2169 TCGv_i32 r_asi, r_size;
2171 tcg_gen_extu_tl_i64(cpu_tmp64, src);
2172 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2173 r_size = tcg_const_i32(size);
2174 gen_helper_st_asi(cpu_env, addr, cpu_tmp64, r_asi, r_size);
2175 tcg_temp_free(r_size);
2176 tcg_temp_free(r_asi);
2179 static inline void gen_swap_asi(TCGv dst, TCGv src, TCGv addr, int insn)
2181 TCGv_i32 r_asi, r_size, r_sign;
2184 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2185 r_size = tcg_const_i32(4);
2186 r_sign = tcg_const_i32(0);
2187 gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2188 tcg_temp_free(r_sign);
2189 r_val = tcg_temp_new_i64();
2190 tcg_gen_extu_tl_i64(r_val, src);
2191 gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2192 tcg_temp_free_i64(r_val);
2193 tcg_temp_free(r_size);
2194 tcg_temp_free(r_asi);
2195 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2198 static inline void gen_ldda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2201 TCGv_i32 r_asi, r_size, r_sign;
2204 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2205 r_size = tcg_const_i32(8);
2206 r_sign = tcg_const_i32(0);
2207 gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2208 tcg_temp_free(r_sign);
2209 tcg_temp_free(r_size);
2210 tcg_temp_free(r_asi);
2212 t = gen_dest_gpr(dc, rd + 1);
2213 tcg_gen_trunc_i64_tl(t, cpu_tmp64);
2214 gen_store_gpr(dc, rd + 1, t);
2216 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
2217 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
2218 gen_store_gpr(dc, rd, hi);
2221 static inline void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2224 TCGv_i32 r_asi, r_size;
2225 TCGv lo = gen_load_gpr(dc, rd + 1);
2227 tcg_gen_concat_tl_i64(cpu_tmp64, lo, hi);
2228 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2229 r_size = tcg_const_i32(8);
2230 gen_helper_st_asi(cpu_env, addr, cpu_tmp64, r_asi, r_size);
2231 tcg_temp_free(r_size);
2232 tcg_temp_free(r_asi);
2236 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2237 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
2240 TCGv_i32 r_asi, r_size;
2242 gen_ld_asi(dst, addr, insn, 1, 0);
2244 r_val = tcg_const_i64(0xffULL);
2245 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2246 r_size = tcg_const_i32(1);
2247 gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2248 tcg_temp_free_i32(r_size);
2249 tcg_temp_free_i32(r_asi);
2250 tcg_temp_free_i64(r_val);
2254 static TCGv get_src1(DisasContext *dc, unsigned int insn)
2256 unsigned int rs1 = GET_FIELD(insn, 13, 17);
2257 return gen_load_gpr(dc, rs1);
2260 static TCGv get_src2(DisasContext *dc, unsigned int insn)
2262 if (IS_IMM) { /* immediate */
2263 target_long simm = GET_FIELDs(insn, 19, 31);
2264 TCGv t = get_temp_tl(dc);
2265 tcg_gen_movi_tl(t, simm);
2267 } else { /* register */
2268 unsigned int rs2 = GET_FIELD(insn, 27, 31);
2269 return gen_load_gpr(dc, rs2);
2273 #ifdef TARGET_SPARC64
2274 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2276 TCGv_i32 c32, zero, dst, s1, s2;
2278 /* We have two choices here: extend the 32 bit data and use movcond_i64,
2279 or fold the comparison down to 32 bits and use movcond_i32. Choose
2281 c32 = tcg_temp_new_i32();
2283 tcg_gen_trunc_i64_i32(c32, cmp->c1);
2285 TCGv_i64 c64 = tcg_temp_new_i64();
2286 tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2287 tcg_gen_trunc_i64_i32(c32, c64);
2288 tcg_temp_free_i64(c64);
2291 s1 = gen_load_fpr_F(dc, rs);
2292 s2 = gen_load_fpr_F(dc, rd);
2293 dst = gen_dest_fpr_F();
2294 zero = tcg_const_i32(0);
2296 tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2298 tcg_temp_free_i32(c32);
2299 tcg_temp_free_i32(zero);
2300 gen_store_fpr_F(dc, rd, dst);
2303 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2305 TCGv_i64 dst = gen_dest_fpr_D();
2306 tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2307 gen_load_fpr_D(dc, rs),
2308 gen_load_fpr_D(dc, rd));
2309 gen_store_fpr_D(dc, rd, dst);
2312 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2314 int qd = QFPREG(rd);
2315 int qs = QFPREG(rs);
2317 tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2318 cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2319 tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2320 cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2322 gen_update_fprs_dirty(qd);
2325 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
2327 TCGv_i32 r_tl = tcg_temp_new_i32();
2329 /* load env->tl into r_tl */
2330 tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2332 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2333 tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2335 /* calculate offset to current trap state from env->ts, reuse r_tl */
2336 tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2337 tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2339 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2341 TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2342 tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2343 tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2344 tcg_temp_free_ptr(r_tl_tmp);
2347 tcg_temp_free_i32(r_tl);
2350 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2351 int width, bool cc, bool left)
2353 TCGv lo1, lo2, t1, t2;
2354 uint64_t amask, tabl, tabr;
2355 int shift, imask, omask;
2358 tcg_gen_mov_tl(cpu_cc_src, s1);
2359 tcg_gen_mov_tl(cpu_cc_src2, s2);
2360 tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2361 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2362 dc->cc_op = CC_OP_SUB;
2365 /* Theory of operation: there are two tables, left and right (not to
2366 be confused with the left and right versions of the opcode). These
2367 are indexed by the low 3 bits of the inputs. To make things "easy",
2368 these tables are loaded into two constants, TABL and TABR below.
2369 The operation index = (input & imask) << shift calculates the index
2370 into the constant, while val = (table >> index) & omask calculates
2371 the value we're looking for. */
2378 tabl = 0x80c0e0f0f8fcfeffULL;
2379 tabr = 0xff7f3f1f0f070301ULL;
2381 tabl = 0x0103070f1f3f7fffULL;
2382 tabr = 0xfffefcf8f0e0c080ULL;
2402 tabl = (2 << 2) | 3;
2403 tabr = (3 << 2) | 1;
2405 tabl = (1 << 2) | 3;
2406 tabr = (3 << 2) | 2;
2413 lo1 = tcg_temp_new();
2414 lo2 = tcg_temp_new();
2415 tcg_gen_andi_tl(lo1, s1, imask);
2416 tcg_gen_andi_tl(lo2, s2, imask);
2417 tcg_gen_shli_tl(lo1, lo1, shift);
2418 tcg_gen_shli_tl(lo2, lo2, shift);
2420 t1 = tcg_const_tl(tabl);
2421 t2 = tcg_const_tl(tabr);
2422 tcg_gen_shr_tl(lo1, t1, lo1);
2423 tcg_gen_shr_tl(lo2, t2, lo2);
2424 tcg_gen_andi_tl(dst, lo1, omask);
2425 tcg_gen_andi_tl(lo2, lo2, omask);
2429 amask &= 0xffffffffULL;
2431 tcg_gen_andi_tl(s1, s1, amask);
2432 tcg_gen_andi_tl(s2, s2, amask);
2434 /* We want to compute
2435 dst = (s1 == s2 ? lo1 : lo1 & lo2).
2436 We've already done dst = lo1, so this reduces to
2437 dst &= (s1 == s2 ? -1 : lo2)
2442 tcg_gen_setcond_tl(TCG_COND_EQ, t1, s1, s2);
2443 tcg_gen_neg_tl(t1, t1);
2444 tcg_gen_or_tl(lo2, lo2, t1);
2445 tcg_gen_and_tl(dst, dst, lo2);
2453 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2455 TCGv tmp = tcg_temp_new();
2457 tcg_gen_add_tl(tmp, s1, s2);
2458 tcg_gen_andi_tl(dst, tmp, -8);
2460 tcg_gen_neg_tl(tmp, tmp);
2462 tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2467 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2471 t1 = tcg_temp_new();
2472 t2 = tcg_temp_new();
2473 shift = tcg_temp_new();
2475 tcg_gen_andi_tl(shift, gsr, 7);
2476 tcg_gen_shli_tl(shift, shift, 3);
2477 tcg_gen_shl_tl(t1, s1, shift);
2479 /* A shift of 64 does not produce 0 in TCG. Divide this into a
2480 shift of (up to 63) followed by a constant shift of 1. */
2481 tcg_gen_xori_tl(shift, shift, 63);
2482 tcg_gen_shr_tl(t2, s2, shift);
2483 tcg_gen_shri_tl(t2, t2, 1);
2485 tcg_gen_or_tl(dst, t1, t2);
2489 tcg_temp_free(shift);
2493 #define CHECK_IU_FEATURE(dc, FEATURE) \
2494 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2496 #define CHECK_FPU_FEATURE(dc, FEATURE) \
2497 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2500 /* before an instruction, dc->pc must be static */
2501 static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
2503 unsigned int opc, rs1, rs2, rd;
2504 TCGv cpu_src1, cpu_src2, cpu_tmp1, cpu_tmp2;
2505 TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
2506 TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
2509 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP | CPU_LOG_TB_OP_OPT))) {
2510 tcg_gen_debug_insn_start(dc->pc);
2513 opc = GET_FIELD(insn, 0, 1);
2515 rd = GET_FIELD(insn, 2, 6);
2517 cpu_tmp1 = cpu_src1 = tcg_temp_new();
2518 cpu_tmp2 = cpu_src2 = tcg_temp_new();
2521 case 0: /* branches/sethi */
2523 unsigned int xop = GET_FIELD(insn, 7, 9);
2526 #ifdef TARGET_SPARC64
2527 case 0x1: /* V9 BPcc */
2531 target = GET_FIELD_SP(insn, 0, 18);
2532 target = sign_extend(target, 19);
2534 cc = GET_FIELD_SP(insn, 20, 21);
2536 do_branch(dc, target, insn, 0);
2538 do_branch(dc, target, insn, 1);
2543 case 0x3: /* V9 BPr */
2545 target = GET_FIELD_SP(insn, 0, 13) |
2546 (GET_FIELD_SP(insn, 20, 21) << 14);
2547 target = sign_extend(target, 16);
2549 cpu_src1 = get_src1(dc, insn);
2550 do_branch_reg(dc, target, insn, cpu_src1);
2553 case 0x5: /* V9 FBPcc */
2555 int cc = GET_FIELD_SP(insn, 20, 21);
2556 if (gen_trap_ifnofpu(dc)) {
2559 target = GET_FIELD_SP(insn, 0, 18);
2560 target = sign_extend(target, 19);
2562 do_fbranch(dc, target, insn, cc);
2566 case 0x7: /* CBN+x */
2571 case 0x2: /* BN+x */
2573 target = GET_FIELD(insn, 10, 31);
2574 target = sign_extend(target, 22);
2576 do_branch(dc, target, insn, 0);
2579 case 0x6: /* FBN+x */
2581 if (gen_trap_ifnofpu(dc)) {
2584 target = GET_FIELD(insn, 10, 31);
2585 target = sign_extend(target, 22);
2587 do_fbranch(dc, target, insn, 0);
2590 case 0x4: /* SETHI */
2591 /* Special-case %g0 because that's the canonical nop. */
2593 uint32_t value = GET_FIELD(insn, 10, 31);
2594 TCGv t = gen_dest_gpr(dc, rd);
2595 tcg_gen_movi_tl(t, value << 10);
2596 gen_store_gpr(dc, rd, t);
2599 case 0x0: /* UNIMPL */
2608 target_long target = GET_FIELDs(insn, 2, 31) << 2;
2609 TCGv o7 = gen_dest_gpr(dc, 15);
2611 tcg_gen_movi_tl(o7, dc->pc);
2612 gen_store_gpr(dc, 15, o7);
2615 #ifdef TARGET_SPARC64
2616 if (unlikely(AM_CHECK(dc))) {
2617 target &= 0xffffffffULL;
2623 case 2: /* FPU & Logical Operations */
2625 unsigned int xop = GET_FIELD(insn, 7, 12);
2626 if (xop == 0x3a) { /* generate trap */
2627 int cond = GET_FIELD(insn, 3, 6);
2639 /* Conditional trap. */
2641 #ifdef TARGET_SPARC64
2643 int cc = GET_FIELD_SP(insn, 11, 12);
2645 gen_compare(&cmp, 0, cond, dc);
2646 } else if (cc == 2) {
2647 gen_compare(&cmp, 1, cond, dc);
2652 gen_compare(&cmp, 0, cond, dc);
2654 l1 = gen_new_label();
2655 tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
2656 cmp.c1, cmp.c2, l1);
2660 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
2661 ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
2663 /* Don't use the normal temporaries, as they may well have
2664 gone out of scope with the branch above. While we're
2665 doing that we might as well pre-truncate to 32-bit. */
2666 trap = tcg_temp_new_i32();
2668 rs1 = GET_FIELD_SP(insn, 14, 18);
2670 rs2 = GET_FIELD_SP(insn, 0, 6);
2672 tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
2673 /* Signal that the trap value is fully constant. */
2676 TCGv t1 = gen_load_gpr(dc, rs1);
2677 tcg_gen_trunc_tl_i32(trap, t1);
2678 tcg_gen_addi_i32(trap, trap, rs2);
2682 rs2 = GET_FIELD_SP(insn, 0, 4);
2683 t1 = gen_load_gpr(dc, rs1);
2684 t2 = gen_load_gpr(dc, rs2);
2685 tcg_gen_add_tl(t1, t1, t2);
2686 tcg_gen_trunc_tl_i32(trap, t1);
2689 tcg_gen_andi_i32(trap, trap, mask);
2690 tcg_gen_addi_i32(trap, trap, TT_TRAP);
2693 gen_helper_raise_exception(cpu_env, trap);
2694 tcg_temp_free_i32(trap);
2697 /* An unconditional trap ends the TB. */
2701 /* A conditional trap falls through to the next insn. */
2705 } else if (xop == 0x28) {
2706 rs1 = GET_FIELD(insn, 13, 17);
2709 #ifndef TARGET_SPARC64
2710 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2711 manual, rdy on the microSPARC
2713 case 0x0f: /* stbar in the SPARCv8 manual,
2714 rdy on the microSPARC II */
2715 case 0x10 ... 0x1f: /* implementation-dependent in the
2716 SPARCv8 manual, rdy on the
2719 if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2720 TCGv t = gen_dest_gpr(dc, rd);
2721 /* Read Asr17 for a Leon3 monoprocessor */
2722 tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
2723 gen_store_gpr(dc, rd, t);
2727 gen_store_gpr(dc, rd, cpu_y);
2729 #ifdef TARGET_SPARC64
2730 case 0x2: /* V9 rdccr */
2732 gen_helper_rdccr(cpu_dst, cpu_env);
2733 gen_store_gpr(dc, rd, cpu_dst);
2735 case 0x3: /* V9 rdasi */
2736 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2737 gen_store_gpr(dc, rd, cpu_dst);
2739 case 0x4: /* V9 rdtick */
2743 r_tickptr = tcg_temp_new_ptr();
2744 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2745 offsetof(CPUSPARCState, tick));
2746 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2747 tcg_temp_free_ptr(r_tickptr);
2748 gen_store_gpr(dc, rd, cpu_dst);
2751 case 0x5: /* V9 rdpc */
2753 TCGv t = gen_dest_gpr(dc, rd);
2754 if (unlikely(AM_CHECK(dc))) {
2755 tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
2757 tcg_gen_movi_tl(t, dc->pc);
2759 gen_store_gpr(dc, rd, t);
2762 case 0x6: /* V9 rdfprs */
2763 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2764 gen_store_gpr(dc, rd, cpu_dst);
2766 case 0xf: /* V9 membar */
2767 break; /* no effect */
2768 case 0x13: /* Graphics Status */
2769 if (gen_trap_ifnofpu(dc)) {
2772 gen_store_gpr(dc, rd, cpu_gsr);
2774 case 0x16: /* Softint */
2775 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2776 gen_store_gpr(dc, rd, cpu_dst);
2778 case 0x17: /* Tick compare */
2779 gen_store_gpr(dc, rd, cpu_tick_cmpr);
2781 case 0x18: /* System tick */
2785 r_tickptr = tcg_temp_new_ptr();
2786 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2787 offsetof(CPUSPARCState, stick));
2788 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2789 tcg_temp_free_ptr(r_tickptr);
2790 gen_store_gpr(dc, rd, cpu_dst);
2793 case 0x19: /* System tick compare */
2794 gen_store_gpr(dc, rd, cpu_stick_cmpr);
2796 case 0x10: /* Performance Control */
2797 case 0x11: /* Performance Instrumentation Counter */
2798 case 0x12: /* Dispatch Control */
2799 case 0x14: /* Softint set, WO */
2800 case 0x15: /* Softint clear, WO */
2805 #if !defined(CONFIG_USER_ONLY)
2806 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2807 #ifndef TARGET_SPARC64
2808 if (!supervisor(dc)) {
2812 gen_helper_rdpsr(cpu_dst, cpu_env);
2814 CHECK_IU_FEATURE(dc, HYPV);
2815 if (!hypervisor(dc))
2817 rs1 = GET_FIELD(insn, 13, 17);
2820 // gen_op_rdhpstate();
2823 // gen_op_rdhtstate();
2826 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2829 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2832 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2834 case 31: // hstick_cmpr
2835 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2841 gen_store_gpr(dc, rd, cpu_dst);
2843 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2844 if (!supervisor(dc))
2846 #ifdef TARGET_SPARC64
2847 rs1 = GET_FIELD(insn, 13, 17);
2853 r_tsptr = tcg_temp_new_ptr();
2854 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2855 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2856 offsetof(trap_state, tpc));
2857 tcg_temp_free_ptr(r_tsptr);
2864 r_tsptr = tcg_temp_new_ptr();
2865 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2866 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2867 offsetof(trap_state, tnpc));
2868 tcg_temp_free_ptr(r_tsptr);
2875 r_tsptr = tcg_temp_new_ptr();
2876 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2877 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2878 offsetof(trap_state, tstate));
2879 tcg_temp_free_ptr(r_tsptr);
2886 r_tsptr = tcg_temp_new_ptr();
2887 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2888 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2889 offsetof(trap_state, tt));
2890 tcg_temp_free_ptr(r_tsptr);
2891 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2898 r_tickptr = tcg_temp_new_ptr();
2899 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2900 offsetof(CPUSPARCState, tick));
2901 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2902 tcg_temp_free_ptr(r_tickptr);
2906 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2909 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2910 offsetof(CPUSPARCState, pstate));
2911 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2914 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2915 offsetof(CPUSPARCState, tl));
2916 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2919 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2920 offsetof(CPUSPARCState, psrpil));
2921 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2924 gen_helper_rdcwp(cpu_tmp0, cpu_env);
2927 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2928 offsetof(CPUSPARCState, cansave));
2929 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2931 case 11: // canrestore
2932 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2933 offsetof(CPUSPARCState, canrestore));
2934 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2936 case 12: // cleanwin
2937 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2938 offsetof(CPUSPARCState, cleanwin));
2939 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2941 case 13: // otherwin
2942 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2943 offsetof(CPUSPARCState, otherwin));
2944 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2947 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2948 offsetof(CPUSPARCState, wstate));
2949 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2951 case 16: // UA2005 gl
2952 CHECK_IU_FEATURE(dc, GL);
2953 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2954 offsetof(CPUSPARCState, gl));
2955 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2957 case 26: // UA2005 strand status
2958 CHECK_IU_FEATURE(dc, HYPV);
2959 if (!hypervisor(dc))
2961 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2964 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2971 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2973 gen_store_gpr(dc, rd, cpu_tmp0);
2975 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2976 #ifdef TARGET_SPARC64
2978 gen_helper_flushw(cpu_env);
2980 if (!supervisor(dc))
2982 gen_store_gpr(dc, rd, cpu_tbr);
2986 } else if (xop == 0x34) { /* FPU Operations */
2987 if (gen_trap_ifnofpu(dc)) {
2990 gen_op_clear_ieee_excp_and_FTT();
2991 rs1 = GET_FIELD(insn, 13, 17);
2992 rs2 = GET_FIELD(insn, 27, 31);
2993 xop = GET_FIELD(insn, 18, 26);
2996 case 0x1: /* fmovs */
2997 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2998 gen_store_fpr_F(dc, rd, cpu_src1_32);
3000 case 0x5: /* fnegs */
3001 gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
3003 case 0x9: /* fabss */
3004 gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
3006 case 0x29: /* fsqrts */
3007 CHECK_FPU_FEATURE(dc, FSQRT);
3008 gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
3010 case 0x2a: /* fsqrtd */
3011 CHECK_FPU_FEATURE(dc, FSQRT);
3012 gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
3014 case 0x2b: /* fsqrtq */
3015 CHECK_FPU_FEATURE(dc, FLOAT128);
3016 gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
3018 case 0x41: /* fadds */
3019 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
3021 case 0x42: /* faddd */
3022 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
3024 case 0x43: /* faddq */
3025 CHECK_FPU_FEATURE(dc, FLOAT128);
3026 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
3028 case 0x45: /* fsubs */
3029 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3031 case 0x46: /* fsubd */
3032 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3034 case 0x47: /* fsubq */
3035 CHECK_FPU_FEATURE(dc, FLOAT128);
3036 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3038 case 0x49: /* fmuls */
3039 CHECK_FPU_FEATURE(dc, FMUL);
3040 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3042 case 0x4a: /* fmuld */
3043 CHECK_FPU_FEATURE(dc, FMUL);
3044 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3046 case 0x4b: /* fmulq */
3047 CHECK_FPU_FEATURE(dc, FLOAT128);
3048 CHECK_FPU_FEATURE(dc, FMUL);
3049 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3051 case 0x4d: /* fdivs */
3052 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3054 case 0x4e: /* fdivd */
3055 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3057 case 0x4f: /* fdivq */
3058 CHECK_FPU_FEATURE(dc, FLOAT128);
3059 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3061 case 0x69: /* fsmuld */
3062 CHECK_FPU_FEATURE(dc, FSMULD);
3063 gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3065 case 0x6e: /* fdmulq */
3066 CHECK_FPU_FEATURE(dc, FLOAT128);
3067 gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3069 case 0xc4: /* fitos */
3070 gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3072 case 0xc6: /* fdtos */
3073 gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3075 case 0xc7: /* fqtos */
3076 CHECK_FPU_FEATURE(dc, FLOAT128);
3077 gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3079 case 0xc8: /* fitod */
3080 gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3082 case 0xc9: /* fstod */
3083 gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3085 case 0xcb: /* fqtod */
3086 CHECK_FPU_FEATURE(dc, FLOAT128);
3087 gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3089 case 0xcc: /* fitoq */
3090 CHECK_FPU_FEATURE(dc, FLOAT128);
3091 gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3093 case 0xcd: /* fstoq */
3094 CHECK_FPU_FEATURE(dc, FLOAT128);
3095 gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3097 case 0xce: /* fdtoq */
3098 CHECK_FPU_FEATURE(dc, FLOAT128);
3099 gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3101 case 0xd1: /* fstoi */
3102 gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3104 case 0xd2: /* fdtoi */
3105 gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3107 case 0xd3: /* fqtoi */
3108 CHECK_FPU_FEATURE(dc, FLOAT128);
3109 gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3111 #ifdef TARGET_SPARC64
3112 case 0x2: /* V9 fmovd */
3113 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3114 gen_store_fpr_D(dc, rd, cpu_src1_64);
3116 case 0x3: /* V9 fmovq */
3117 CHECK_FPU_FEATURE(dc, FLOAT128);
3118 gen_move_Q(rd, rs2);
3120 case 0x6: /* V9 fnegd */
3121 gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3123 case 0x7: /* V9 fnegq */
3124 CHECK_FPU_FEATURE(dc, FLOAT128);
3125 gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3127 case 0xa: /* V9 fabsd */
3128 gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3130 case 0xb: /* V9 fabsq */
3131 CHECK_FPU_FEATURE(dc, FLOAT128);
3132 gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3134 case 0x81: /* V9 fstox */
3135 gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3137 case 0x82: /* V9 fdtox */
3138 gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3140 case 0x83: /* V9 fqtox */
3141 CHECK_FPU_FEATURE(dc, FLOAT128);
3142 gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3144 case 0x84: /* V9 fxtos */
3145 gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3147 case 0x88: /* V9 fxtod */
3148 gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3150 case 0x8c: /* V9 fxtoq */
3151 CHECK_FPU_FEATURE(dc, FLOAT128);
3152 gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3158 } else if (xop == 0x35) { /* FPU Operations */
3159 #ifdef TARGET_SPARC64
3162 if (gen_trap_ifnofpu(dc)) {
3165 gen_op_clear_ieee_excp_and_FTT();
3166 rs1 = GET_FIELD(insn, 13, 17);
3167 rs2 = GET_FIELD(insn, 27, 31);
3168 xop = GET_FIELD(insn, 18, 26);
3171 #ifdef TARGET_SPARC64
3175 cond = GET_FIELD_SP(insn, 14, 17); \
3176 cpu_src1 = get_src1(dc, insn); \
3177 gen_compare_reg(&cmp, cond, cpu_src1); \
3178 gen_fmov##sz(dc, &cmp, rd, rs2); \
3179 free_compare(&cmp); \
3182 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3185 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3188 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3189 CHECK_FPU_FEATURE(dc, FLOAT128);
3196 #ifdef TARGET_SPARC64
3197 #define FMOVCC(fcc, sz) \
3200 cond = GET_FIELD_SP(insn, 14, 17); \
3201 gen_fcompare(&cmp, fcc, cond); \
3202 gen_fmov##sz(dc, &cmp, rd, rs2); \
3203 free_compare(&cmp); \
3206 case 0x001: /* V9 fmovscc %fcc0 */
3209 case 0x002: /* V9 fmovdcc %fcc0 */
3212 case 0x003: /* V9 fmovqcc %fcc0 */
3213 CHECK_FPU_FEATURE(dc, FLOAT128);
3216 case 0x041: /* V9 fmovscc %fcc1 */
3219 case 0x042: /* V9 fmovdcc %fcc1 */
3222 case 0x043: /* V9 fmovqcc %fcc1 */
3223 CHECK_FPU_FEATURE(dc, FLOAT128);
3226 case 0x081: /* V9 fmovscc %fcc2 */
3229 case 0x082: /* V9 fmovdcc %fcc2 */
3232 case 0x083: /* V9 fmovqcc %fcc2 */
3233 CHECK_FPU_FEATURE(dc, FLOAT128);
3236 case 0x0c1: /* V9 fmovscc %fcc3 */
3239 case 0x0c2: /* V9 fmovdcc %fcc3 */
3242 case 0x0c3: /* V9 fmovqcc %fcc3 */
3243 CHECK_FPU_FEATURE(dc, FLOAT128);
3247 #define FMOVCC(xcc, sz) \
3250 cond = GET_FIELD_SP(insn, 14, 17); \
3251 gen_compare(&cmp, xcc, cond, dc); \
3252 gen_fmov##sz(dc, &cmp, rd, rs2); \
3253 free_compare(&cmp); \
3256 case 0x101: /* V9 fmovscc %icc */
3259 case 0x102: /* V9 fmovdcc %icc */
3262 case 0x103: /* V9 fmovqcc %icc */
3263 CHECK_FPU_FEATURE(dc, FLOAT128);
3266 case 0x181: /* V9 fmovscc %xcc */
3269 case 0x182: /* V9 fmovdcc %xcc */
3272 case 0x183: /* V9 fmovqcc %xcc */
3273 CHECK_FPU_FEATURE(dc, FLOAT128);
3278 case 0x51: /* fcmps, V9 %fcc */
3279 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3280 cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3281 gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3283 case 0x52: /* fcmpd, V9 %fcc */
3284 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3285 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3286 gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3288 case 0x53: /* fcmpq, V9 %fcc */
3289 CHECK_FPU_FEATURE(dc, FLOAT128);
3290 gen_op_load_fpr_QT0(QFPREG(rs1));
3291 gen_op_load_fpr_QT1(QFPREG(rs2));
3292 gen_op_fcmpq(rd & 3);
3294 case 0x55: /* fcmpes, V9 %fcc */
3295 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3296 cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3297 gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3299 case 0x56: /* fcmped, V9 %fcc */
3300 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3301 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3302 gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3304 case 0x57: /* fcmpeq, V9 %fcc */
3305 CHECK_FPU_FEATURE(dc, FLOAT128);
3306 gen_op_load_fpr_QT0(QFPREG(rs1));
3307 gen_op_load_fpr_QT1(QFPREG(rs2));
3308 gen_op_fcmpeq(rd & 3);
3313 } else if (xop == 0x2) {
3314 TCGv dst = gen_dest_gpr(dc, rd);
3315 rs1 = GET_FIELD(insn, 13, 17);
3317 /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3318 if (IS_IMM) { /* immediate */
3319 simm = GET_FIELDs(insn, 19, 31);
3320 tcg_gen_movi_tl(dst, simm);
3321 gen_store_gpr(dc, rd, dst);
3322 } else { /* register */
3323 rs2 = GET_FIELD(insn, 27, 31);
3325 tcg_gen_movi_tl(dst, 0);
3326 gen_store_gpr(dc, rd, dst);
3328 cpu_src2 = gen_load_gpr(dc, rs2);
3329 gen_store_gpr(dc, rd, cpu_src2);
3333 cpu_src1 = get_src1(dc, insn);
3334 if (IS_IMM) { /* immediate */
3335 simm = GET_FIELDs(insn, 19, 31);
3336 tcg_gen_ori_tl(dst, cpu_src1, simm);
3337 gen_store_gpr(dc, rd, dst);
3338 } else { /* register */
3339 rs2 = GET_FIELD(insn, 27, 31);
3341 /* mov shortcut: or x, %g0, y -> mov x, y */
3342 gen_store_gpr(dc, rd, cpu_src1);
3344 cpu_src2 = gen_load_gpr(dc, rs2);
3345 tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
3346 gen_store_gpr(dc, rd, dst);
3350 #ifdef TARGET_SPARC64
3351 } else if (xop == 0x25) { /* sll, V9 sllx */
3352 cpu_src1 = get_src1(dc, insn);
3353 if (IS_IMM) { /* immediate */
3354 simm = GET_FIELDs(insn, 20, 31);
3355 if (insn & (1 << 12)) {
3356 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3358 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3360 } else { /* register */
3361 rs2 = GET_FIELD(insn, 27, 31);
3362 cpu_src2 = gen_load_gpr(dc, rs2);
3363 if (insn & (1 << 12)) {
3364 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3366 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3368 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3370 gen_store_gpr(dc, rd, cpu_dst);
3371 } else if (xop == 0x26) { /* srl, V9 srlx */
3372 cpu_src1 = get_src1(dc, insn);
3373 if (IS_IMM) { /* immediate */
3374 simm = GET_FIELDs(insn, 20, 31);
3375 if (insn & (1 << 12)) {
3376 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3378 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3379 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3381 } else { /* register */
3382 rs2 = GET_FIELD(insn, 27, 31);
3383 cpu_src2 = gen_load_gpr(dc, rs2);
3384 if (insn & (1 << 12)) {
3385 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3386 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3388 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3389 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3390 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3393 gen_store_gpr(dc, rd, cpu_dst);
3394 } else if (xop == 0x27) { /* sra, V9 srax */
3395 cpu_src1 = get_src1(dc, insn);
3396 if (IS_IMM) { /* immediate */
3397 simm = GET_FIELDs(insn, 20, 31);
3398 if (insn & (1 << 12)) {
3399 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3401 tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3402 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3404 } else { /* register */
3405 rs2 = GET_FIELD(insn, 27, 31);
3406 cpu_src2 = gen_load_gpr(dc, rs2);
3407 if (insn & (1 << 12)) {
3408 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3409 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3411 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3412 tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3413 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3416 gen_store_gpr(dc, rd, cpu_dst);
3418 } else if (xop < 0x36) {
3420 cpu_src1 = get_src1(dc, insn);
3421 cpu_src2 = get_src2(dc, insn);
3422 switch (xop & ~0x10) {
3425 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3426 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3427 dc->cc_op = CC_OP_ADD;
3429 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3433 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3435 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3436 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3437 dc->cc_op = CC_OP_LOGIC;
3441 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3443 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3444 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3445 dc->cc_op = CC_OP_LOGIC;
3449 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3451 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3452 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3453 dc->cc_op = CC_OP_LOGIC;
3458 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3459 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3460 dc->cc_op = CC_OP_SUB;
3462 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3465 case 0x5: /* andn */
3466 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3468 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3469 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3470 dc->cc_op = CC_OP_LOGIC;
3474 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3476 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3477 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3478 dc->cc_op = CC_OP_LOGIC;
3481 case 0x7: /* xorn */
3482 tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
3484 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3485 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3486 dc->cc_op = CC_OP_LOGIC;
3489 case 0x8: /* addx, V9 addc */
3490 gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3493 #ifdef TARGET_SPARC64
3494 case 0x9: /* V9 mulx */
3495 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3498 case 0xa: /* umul */
3499 CHECK_IU_FEATURE(dc, MUL);
3500 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3502 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3503 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3504 dc->cc_op = CC_OP_LOGIC;
3507 case 0xb: /* smul */
3508 CHECK_IU_FEATURE(dc, MUL);
3509 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3511 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3512 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3513 dc->cc_op = CC_OP_LOGIC;
3516 case 0xc: /* subx, V9 subc */
3517 gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3520 #ifdef TARGET_SPARC64
3521 case 0xd: /* V9 udivx */
3522 gen_helper_udivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3525 case 0xe: /* udiv */
3526 CHECK_IU_FEATURE(dc, DIV);
3528 gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
3530 dc->cc_op = CC_OP_DIV;
3532 gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
3536 case 0xf: /* sdiv */
3537 CHECK_IU_FEATURE(dc, DIV);
3539 gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
3541 dc->cc_op = CC_OP_DIV;
3543 gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
3550 gen_store_gpr(dc, rd, cpu_dst);
3552 cpu_src1 = get_src1(dc, insn);
3553 cpu_src2 = get_src2(dc, insn);
3555 case 0x20: /* taddcc */
3556 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3557 gen_store_gpr(dc, rd, cpu_dst);
3558 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3559 dc->cc_op = CC_OP_TADD;
3561 case 0x21: /* tsubcc */
3562 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3563 gen_store_gpr(dc, rd, cpu_dst);
3564 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3565 dc->cc_op = CC_OP_TSUB;
3567 case 0x22: /* taddcctv */
3568 gen_helper_taddcctv(cpu_dst, cpu_env,
3569 cpu_src1, cpu_src2);
3570 gen_store_gpr(dc, rd, cpu_dst);
3571 dc->cc_op = CC_OP_TADDTV;
3573 case 0x23: /* tsubcctv */
3574 gen_helper_tsubcctv(cpu_dst, cpu_env,
3575 cpu_src1, cpu_src2);
3576 gen_store_gpr(dc, rd, cpu_dst);
3577 dc->cc_op = CC_OP_TSUBTV;
3579 case 0x24: /* mulscc */
3581 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3582 gen_store_gpr(dc, rd, cpu_dst);
3583 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3584 dc->cc_op = CC_OP_ADD;
3586 #ifndef TARGET_SPARC64
3587 case 0x25: /* sll */
3588 if (IS_IMM) { /* immediate */
3589 simm = GET_FIELDs(insn, 20, 31);
3590 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3591 } else { /* register */
3592 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3593 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3595 gen_store_gpr(dc, rd, cpu_dst);
3597 case 0x26: /* srl */
3598 if (IS_IMM) { /* immediate */
3599 simm = GET_FIELDs(insn, 20, 31);
3600 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3601 } else { /* register */
3602 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3603 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3605 gen_store_gpr(dc, rd, cpu_dst);
3607 case 0x27: /* sra */
3608 if (IS_IMM) { /* immediate */
3609 simm = GET_FIELDs(insn, 20, 31);
3610 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3611 } else { /* register */
3612 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3613 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3615 gen_store_gpr(dc, rd, cpu_dst);
3622 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3623 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3625 #ifndef TARGET_SPARC64
3626 case 0x01 ... 0x0f: /* undefined in the
3630 case 0x10 ... 0x1f: /* implementation-dependent
3636 case 0x2: /* V9 wrccr */
3637 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3638 gen_helper_wrccr(cpu_env, cpu_dst);
3639 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3640 dc->cc_op = CC_OP_FLAGS;
3642 case 0x3: /* V9 wrasi */
3643 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3644 tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3645 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3647 case 0x6: /* V9 wrfprs */
3648 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3649 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3655 case 0xf: /* V9 sir, nop if user */
3656 #if !defined(CONFIG_USER_ONLY)
3657 if (supervisor(dc)) {
3662 case 0x13: /* Graphics Status */
3663 if (gen_trap_ifnofpu(dc)) {
3666 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3668 case 0x14: /* Softint set */
3669 if (!supervisor(dc))
3671 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3672 gen_helper_set_softint(cpu_env, cpu_tmp64);
3674 case 0x15: /* Softint clear */
3675 if (!supervisor(dc))
3677 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3678 gen_helper_clear_softint(cpu_env, cpu_tmp64);
3680 case 0x16: /* Softint write */
3681 if (!supervisor(dc))
3683 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3684 gen_helper_write_softint(cpu_env, cpu_tmp64);
3686 case 0x17: /* Tick compare */
3687 #if !defined(CONFIG_USER_ONLY)
3688 if (!supervisor(dc))
3694 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3696 r_tickptr = tcg_temp_new_ptr();
3697 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3698 offsetof(CPUSPARCState, tick));
3699 gen_helper_tick_set_limit(r_tickptr,
3701 tcg_temp_free_ptr(r_tickptr);
3704 case 0x18: /* System tick */
3705 #if !defined(CONFIG_USER_ONLY)
3706 if (!supervisor(dc))
3712 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3714 r_tickptr = tcg_temp_new_ptr();
3715 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3716 offsetof(CPUSPARCState, stick));
3717 gen_helper_tick_set_count(r_tickptr,
3719 tcg_temp_free_ptr(r_tickptr);
3722 case 0x19: /* System tick compare */
3723 #if !defined(CONFIG_USER_ONLY)
3724 if (!supervisor(dc))
3730 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3732 r_tickptr = tcg_temp_new_ptr();
3733 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3734 offsetof(CPUSPARCState, stick));
3735 gen_helper_tick_set_limit(r_tickptr,
3737 tcg_temp_free_ptr(r_tickptr);
3741 case 0x10: /* Performance Control */
3742 case 0x11: /* Performance Instrumentation
3744 case 0x12: /* Dispatch Control */
3751 #if !defined(CONFIG_USER_ONLY)
3752 case 0x31: /* wrpsr, V9 saved, restored */
3754 if (!supervisor(dc))
3756 #ifdef TARGET_SPARC64
3759 gen_helper_saved(cpu_env);
3762 gen_helper_restored(cpu_env);
3764 case 2: /* UA2005 allclean */
3765 case 3: /* UA2005 otherw */
3766 case 4: /* UA2005 normalw */
3767 case 5: /* UA2005 invalw */
3773 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3774 gen_helper_wrpsr(cpu_env, cpu_dst);
3775 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3776 dc->cc_op = CC_OP_FLAGS;
3784 case 0x32: /* wrwim, V9 wrpr */
3786 if (!supervisor(dc))
3788 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3789 #ifdef TARGET_SPARC64
3795 r_tsptr = tcg_temp_new_ptr();
3796 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3797 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3798 offsetof(trap_state, tpc));
3799 tcg_temp_free_ptr(r_tsptr);
3806 r_tsptr = tcg_temp_new_ptr();
3807 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3808 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3809 offsetof(trap_state, tnpc));
3810 tcg_temp_free_ptr(r_tsptr);
3817 r_tsptr = tcg_temp_new_ptr();
3818 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3819 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3820 offsetof(trap_state,
3822 tcg_temp_free_ptr(r_tsptr);
3829 r_tsptr = tcg_temp_new_ptr();
3830 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3831 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3832 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3833 offsetof(trap_state, tt));
3834 tcg_temp_free_ptr(r_tsptr);
3841 r_tickptr = tcg_temp_new_ptr();
3842 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3843 offsetof(CPUSPARCState, tick));
3844 gen_helper_tick_set_count(r_tickptr,
3846 tcg_temp_free_ptr(r_tickptr);
3850 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3854 gen_helper_wrpstate(cpu_env, cpu_tmp0);
3855 dc->npc = DYNAMIC_PC;
3859 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3860 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3861 offsetof(CPUSPARCState, tl));
3862 dc->npc = DYNAMIC_PC;
3865 gen_helper_wrpil(cpu_env, cpu_tmp0);
3868 gen_helper_wrcwp(cpu_env, cpu_tmp0);
3871 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3872 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3873 offsetof(CPUSPARCState,
3876 case 11: // canrestore
3877 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3878 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3879 offsetof(CPUSPARCState,
3882 case 12: // cleanwin
3883 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3884 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3885 offsetof(CPUSPARCState,
3888 case 13: // otherwin
3889 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3890 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3891 offsetof(CPUSPARCState,
3895 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3896 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3897 offsetof(CPUSPARCState,
3900 case 16: // UA2005 gl
3901 CHECK_IU_FEATURE(dc, GL);
3902 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3903 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3904 offsetof(CPUSPARCState, gl));
3906 case 26: // UA2005 strand status
3907 CHECK_IU_FEATURE(dc, HYPV);
3908 if (!hypervisor(dc))
3910 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3916 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3917 if (dc->def->nwindows != 32)
3918 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3919 (1 << dc->def->nwindows) - 1);
3920 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3924 case 0x33: /* wrtbr, UA2005 wrhpr */
3926 #ifndef TARGET_SPARC64
3927 if (!supervisor(dc))
3929 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3931 CHECK_IU_FEATURE(dc, HYPV);
3932 if (!hypervisor(dc))
3934 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3937 // XXX gen_op_wrhpstate();
3944 // XXX gen_op_wrhtstate();
3947 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3950 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3952 case 31: // hstick_cmpr
3956 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3957 r_tickptr = tcg_temp_new_ptr();
3958 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3959 offsetof(CPUSPARCState, hstick));
3960 gen_helper_tick_set_limit(r_tickptr,
3962 tcg_temp_free_ptr(r_tickptr);
3965 case 6: // hver readonly
3973 #ifdef TARGET_SPARC64
3974 case 0x2c: /* V9 movcc */
3976 int cc = GET_FIELD_SP(insn, 11, 12);
3977 int cond = GET_FIELD_SP(insn, 14, 17);
3981 if (insn & (1 << 18)) {
3983 gen_compare(&cmp, 0, cond, dc);
3984 } else if (cc == 2) {
3985 gen_compare(&cmp, 1, cond, dc);
3990 gen_fcompare(&cmp, cc, cond);
3993 /* The get_src2 above loaded the normal 13-bit
3994 immediate field, not the 11-bit field we have
3995 in movcc. But it did handle the reg case. */
3997 simm = GET_FIELD_SPs(insn, 0, 10);
3998 tcg_gen_movi_tl(cpu_src2, simm);
4001 dst = gen_load_gpr(dc, rd);
4002 tcg_gen_movcond_tl(cmp.cond, dst,
4006 gen_store_gpr(dc, rd, dst);
4009 case 0x2d: /* V9 sdivx */
4010 gen_helper_sdivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4011 gen_store_gpr(dc, rd, cpu_dst);
4013 case 0x2e: /* V9 popc */
4014 gen_helper_popc(cpu_dst, cpu_src2);
4015 gen_store_gpr(dc, rd, cpu_dst);
4017 case 0x2f: /* V9 movr */
4019 int cond = GET_FIELD_SP(insn, 10, 12);
4023 gen_compare_reg(&cmp, cond, cpu_src1);
4025 /* The get_src2 above loaded the normal 13-bit
4026 immediate field, not the 10-bit field we have
4027 in movr. But it did handle the reg case. */
4029 simm = GET_FIELD_SPs(insn, 0, 9);
4030 tcg_gen_movi_tl(cpu_src2, simm);
4033 dst = gen_load_gpr(dc, rd);
4034 tcg_gen_movcond_tl(cmp.cond, dst,
4038 gen_store_gpr(dc, rd, dst);
4046 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4047 #ifdef TARGET_SPARC64
4048 int opf = GET_FIELD_SP(insn, 5, 13);
4049 rs1 = GET_FIELD(insn, 13, 17);
4050 rs2 = GET_FIELD(insn, 27, 31);
4051 if (gen_trap_ifnofpu(dc)) {
4056 case 0x000: /* VIS I edge8cc */
4057 CHECK_FPU_FEATURE(dc, VIS1);
4058 cpu_src1 = gen_load_gpr(dc, rs1);
4059 cpu_src2 = gen_load_gpr(dc, rs2);
4060 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4061 gen_store_gpr(dc, rd, cpu_dst);
4063 case 0x001: /* VIS II edge8n */
4064 CHECK_FPU_FEATURE(dc, VIS2);
4065 cpu_src1 = gen_load_gpr(dc, rs1);
4066 cpu_src2 = gen_load_gpr(dc, rs2);
4067 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4068 gen_store_gpr(dc, rd, cpu_dst);
4070 case 0x002: /* VIS I edge8lcc */
4071 CHECK_FPU_FEATURE(dc, VIS1);
4072 cpu_src1 = gen_load_gpr(dc, rs1);
4073 cpu_src2 = gen_load_gpr(dc, rs2);
4074 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4075 gen_store_gpr(dc, rd, cpu_dst);
4077 case 0x003: /* VIS II edge8ln */
4078 CHECK_FPU_FEATURE(dc, VIS2);
4079 cpu_src1 = gen_load_gpr(dc, rs1);
4080 cpu_src2 = gen_load_gpr(dc, rs2);
4081 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4082 gen_store_gpr(dc, rd, cpu_dst);
4084 case 0x004: /* VIS I edge16cc */
4085 CHECK_FPU_FEATURE(dc, VIS1);
4086 cpu_src1 = gen_load_gpr(dc, rs1);
4087 cpu_src2 = gen_load_gpr(dc, rs2);
4088 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4089 gen_store_gpr(dc, rd, cpu_dst);
4091 case 0x005: /* VIS II edge16n */
4092 CHECK_FPU_FEATURE(dc, VIS2);
4093 cpu_src1 = gen_load_gpr(dc, rs1);
4094 cpu_src2 = gen_load_gpr(dc, rs2);
4095 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4096 gen_store_gpr(dc, rd, cpu_dst);
4098 case 0x006: /* VIS I edge16lcc */
4099 CHECK_FPU_FEATURE(dc, VIS1);
4100 cpu_src1 = gen_load_gpr(dc, rs1);
4101 cpu_src2 = gen_load_gpr(dc, rs2);
4102 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4103 gen_store_gpr(dc, rd, cpu_dst);
4105 case 0x007: /* VIS II edge16ln */
4106 CHECK_FPU_FEATURE(dc, VIS2);
4107 cpu_src1 = gen_load_gpr(dc, rs1);
4108 cpu_src2 = gen_load_gpr(dc, rs2);
4109 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4110 gen_store_gpr(dc, rd, cpu_dst);
4112 case 0x008: /* VIS I edge32cc */
4113 CHECK_FPU_FEATURE(dc, VIS1);
4114 cpu_src1 = gen_load_gpr(dc, rs1);
4115 cpu_src2 = gen_load_gpr(dc, rs2);
4116 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4117 gen_store_gpr(dc, rd, cpu_dst);
4119 case 0x009: /* VIS II edge32n */
4120 CHECK_FPU_FEATURE(dc, VIS2);
4121 cpu_src1 = gen_load_gpr(dc, rs1);
4122 cpu_src2 = gen_load_gpr(dc, rs2);
4123 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4124 gen_store_gpr(dc, rd, cpu_dst);
4126 case 0x00a: /* VIS I edge32lcc */
4127 CHECK_FPU_FEATURE(dc, VIS1);
4128 cpu_src1 = gen_load_gpr(dc, rs1);
4129 cpu_src2 = gen_load_gpr(dc, rs2);
4130 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4131 gen_store_gpr(dc, rd, cpu_dst);
4133 case 0x00b: /* VIS II edge32ln */
4134 CHECK_FPU_FEATURE(dc, VIS2);
4135 cpu_src1 = gen_load_gpr(dc, rs1);
4136 cpu_src2 = gen_load_gpr(dc, rs2);
4137 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4138 gen_store_gpr(dc, rd, cpu_dst);
4140 case 0x010: /* VIS I array8 */
4141 CHECK_FPU_FEATURE(dc, VIS1);
4142 cpu_src1 = gen_load_gpr(dc, rs1);
4143 cpu_src2 = gen_load_gpr(dc, rs2);
4144 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4145 gen_store_gpr(dc, rd, cpu_dst);
4147 case 0x012: /* VIS I array16 */
4148 CHECK_FPU_FEATURE(dc, VIS1);
4149 cpu_src1 = gen_load_gpr(dc, rs1);
4150 cpu_src2 = gen_load_gpr(dc, rs2);
4151 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4152 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4153 gen_store_gpr(dc, rd, cpu_dst);
4155 case 0x014: /* VIS I array32 */
4156 CHECK_FPU_FEATURE(dc, VIS1);
4157 cpu_src1 = gen_load_gpr(dc, rs1);
4158 cpu_src2 = gen_load_gpr(dc, rs2);
4159 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4160 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4161 gen_store_gpr(dc, rd, cpu_dst);
4163 case 0x018: /* VIS I alignaddr */
4164 CHECK_FPU_FEATURE(dc, VIS1);
4165 cpu_src1 = gen_load_gpr(dc, rs1);
4166 cpu_src2 = gen_load_gpr(dc, rs2);
4167 gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4168 gen_store_gpr(dc, rd, cpu_dst);
4170 case 0x01a: /* VIS I alignaddrl */
4171 CHECK_FPU_FEATURE(dc, VIS1);
4172 cpu_src1 = gen_load_gpr(dc, rs1);
4173 cpu_src2 = gen_load_gpr(dc, rs2);
4174 gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4175 gen_store_gpr(dc, rd, cpu_dst);
4177 case 0x019: /* VIS II bmask */
4178 CHECK_FPU_FEATURE(dc, VIS2);
4179 cpu_src1 = gen_load_gpr(dc, rs1);
4180 cpu_src2 = gen_load_gpr(dc, rs2);
4181 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4182 tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4183 gen_store_gpr(dc, rd, cpu_dst);
4185 case 0x020: /* VIS I fcmple16 */
4186 CHECK_FPU_FEATURE(dc, VIS1);
4187 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4188 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4189 gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4190 gen_store_gpr(dc, rd, cpu_dst);
4192 case 0x022: /* VIS I fcmpne16 */
4193 CHECK_FPU_FEATURE(dc, VIS1);
4194 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4195 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4196 gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4197 gen_store_gpr(dc, rd, cpu_dst);
4199 case 0x024: /* VIS I fcmple32 */
4200 CHECK_FPU_FEATURE(dc, VIS1);
4201 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4202 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4203 gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4204 gen_store_gpr(dc, rd, cpu_dst);
4206 case 0x026: /* VIS I fcmpne32 */
4207 CHECK_FPU_FEATURE(dc, VIS1);
4208 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4209 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4210 gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4211 gen_store_gpr(dc, rd, cpu_dst);
4213 case 0x028: /* VIS I fcmpgt16 */
4214 CHECK_FPU_FEATURE(dc, VIS1);
4215 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4216 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4217 gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4218 gen_store_gpr(dc, rd, cpu_dst);
4220 case 0x02a: /* VIS I fcmpeq16 */
4221 CHECK_FPU_FEATURE(dc, VIS1);
4222 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4223 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4224 gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4225 gen_store_gpr(dc, rd, cpu_dst);
4227 case 0x02c: /* VIS I fcmpgt32 */
4228 CHECK_FPU_FEATURE(dc, VIS1);
4229 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4230 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4231 gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4232 gen_store_gpr(dc, rd, cpu_dst);
4234 case 0x02e: /* VIS I fcmpeq32 */
4235 CHECK_FPU_FEATURE(dc, VIS1);
4236 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4237 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4238 gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4239 gen_store_gpr(dc, rd, cpu_dst);
4241 case 0x031: /* VIS I fmul8x16 */
4242 CHECK_FPU_FEATURE(dc, VIS1);
4243 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4245 case 0x033: /* VIS I fmul8x16au */
4246 CHECK_FPU_FEATURE(dc, VIS1);
4247 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4249 case 0x035: /* VIS I fmul8x16al */
4250 CHECK_FPU_FEATURE(dc, VIS1);
4251 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4253 case 0x036: /* VIS I fmul8sux16 */
4254 CHECK_FPU_FEATURE(dc, VIS1);
4255 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4257 case 0x037: /* VIS I fmul8ulx16 */
4258 CHECK_FPU_FEATURE(dc, VIS1);
4259 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4261 case 0x038: /* VIS I fmuld8sux16 */
4262 CHECK_FPU_FEATURE(dc, VIS1);
4263 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4265 case 0x039: /* VIS I fmuld8ulx16 */
4266 CHECK_FPU_FEATURE(dc, VIS1);
4267 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4269 case 0x03a: /* VIS I fpack32 */
4270 CHECK_FPU_FEATURE(dc, VIS1);
4271 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4273 case 0x03b: /* VIS I fpack16 */
4274 CHECK_FPU_FEATURE(dc, VIS1);
4275 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4276 cpu_dst_32 = gen_dest_fpr_F();
4277 gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4278 gen_store_fpr_F(dc, rd, cpu_dst_32);
4280 case 0x03d: /* VIS I fpackfix */
4281 CHECK_FPU_FEATURE(dc, VIS1);
4282 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4283 cpu_dst_32 = gen_dest_fpr_F();
4284 gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4285 gen_store_fpr_F(dc, rd, cpu_dst_32);
4287 case 0x03e: /* VIS I pdist */
4288 CHECK_FPU_FEATURE(dc, VIS1);
4289 gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4291 case 0x048: /* VIS I faligndata */
4292 CHECK_FPU_FEATURE(dc, VIS1);
4293 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4295 case 0x04b: /* VIS I fpmerge */
4296 CHECK_FPU_FEATURE(dc, VIS1);
4297 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4299 case 0x04c: /* VIS II bshuffle */
4300 CHECK_FPU_FEATURE(dc, VIS2);
4301 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4303 case 0x04d: /* VIS I fexpand */
4304 CHECK_FPU_FEATURE(dc, VIS1);
4305 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4307 case 0x050: /* VIS I fpadd16 */
4308 CHECK_FPU_FEATURE(dc, VIS1);
4309 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4311 case 0x051: /* VIS I fpadd16s */
4312 CHECK_FPU_FEATURE(dc, VIS1);
4313 gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4315 case 0x052: /* VIS I fpadd32 */
4316 CHECK_FPU_FEATURE(dc, VIS1);
4317 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4319 case 0x053: /* VIS I fpadd32s */
4320 CHECK_FPU_FEATURE(dc, VIS1);
4321 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4323 case 0x054: /* VIS I fpsub16 */
4324 CHECK_FPU_FEATURE(dc, VIS1);
4325 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4327 case 0x055: /* VIS I fpsub16s */
4328 CHECK_FPU_FEATURE(dc, VIS1);
4329 gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4331 case 0x056: /* VIS I fpsub32 */
4332 CHECK_FPU_FEATURE(dc, VIS1);
4333 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4335 case 0x057: /* VIS I fpsub32s */
4336 CHECK_FPU_FEATURE(dc, VIS1);
4337 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4339 case 0x060: /* VIS I fzero */
4340 CHECK_FPU_FEATURE(dc, VIS1);
4341 cpu_dst_64 = gen_dest_fpr_D();
4342 tcg_gen_movi_i64(cpu_dst_64, 0);
4343 gen_store_fpr_D(dc, rd, cpu_dst_64);
4345 case 0x061: /* VIS I fzeros */
4346 CHECK_FPU_FEATURE(dc, VIS1);
4347 cpu_dst_32 = gen_dest_fpr_F();
4348 tcg_gen_movi_i32(cpu_dst_32, 0);
4349 gen_store_fpr_F(dc, rd, cpu_dst_32);
4351 case 0x062: /* VIS I fnor */
4352 CHECK_FPU_FEATURE(dc, VIS1);
4353 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4355 case 0x063: /* VIS I fnors */
4356 CHECK_FPU_FEATURE(dc, VIS1);
4357 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4359 case 0x064: /* VIS I fandnot2 */
4360 CHECK_FPU_FEATURE(dc, VIS1);
4361 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4363 case 0x065: /* VIS I fandnot2s */
4364 CHECK_FPU_FEATURE(dc, VIS1);
4365 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4367 case 0x066: /* VIS I fnot2 */
4368 CHECK_FPU_FEATURE(dc, VIS1);
4369 gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4371 case 0x067: /* VIS I fnot2s */
4372 CHECK_FPU_FEATURE(dc, VIS1);
4373 gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4375 case 0x068: /* VIS I fandnot1 */
4376 CHECK_FPU_FEATURE(dc, VIS1);
4377 gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4379 case 0x069: /* VIS I fandnot1s */
4380 CHECK_FPU_FEATURE(dc, VIS1);
4381 gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4383 case 0x06a: /* VIS I fnot1 */
4384 CHECK_FPU_FEATURE(dc, VIS1);
4385 gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4387 case 0x06b: /* VIS I fnot1s */
4388 CHECK_FPU_FEATURE(dc, VIS1);
4389 gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4391 case 0x06c: /* VIS I fxor */
4392 CHECK_FPU_FEATURE(dc, VIS1);
4393 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4395 case 0x06d: /* VIS I fxors */
4396 CHECK_FPU_FEATURE(dc, VIS1);
4397 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4399 case 0x06e: /* VIS I fnand */
4400 CHECK_FPU_FEATURE(dc, VIS1);
4401 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4403 case 0x06f: /* VIS I fnands */
4404 CHECK_FPU_FEATURE(dc, VIS1);
4405 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4407 case 0x070: /* VIS I fand */
4408 CHECK_FPU_FEATURE(dc, VIS1);
4409 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4411 case 0x071: /* VIS I fands */
4412 CHECK_FPU_FEATURE(dc, VIS1);
4413 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4415 case 0x072: /* VIS I fxnor */
4416 CHECK_FPU_FEATURE(dc, VIS1);
4417 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4419 case 0x073: /* VIS I fxnors */
4420 CHECK_FPU_FEATURE(dc, VIS1);
4421 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4423 case 0x074: /* VIS I fsrc1 */
4424 CHECK_FPU_FEATURE(dc, VIS1);
4425 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4426 gen_store_fpr_D(dc, rd, cpu_src1_64);
4428 case 0x075: /* VIS I fsrc1s */
4429 CHECK_FPU_FEATURE(dc, VIS1);
4430 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4431 gen_store_fpr_F(dc, rd, cpu_src1_32);
4433 case 0x076: /* VIS I fornot2 */
4434 CHECK_FPU_FEATURE(dc, VIS1);
4435 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4437 case 0x077: /* VIS I fornot2s */
4438 CHECK_FPU_FEATURE(dc, VIS1);
4439 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4441 case 0x078: /* VIS I fsrc2 */
4442 CHECK_FPU_FEATURE(dc, VIS1);
4443 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4444 gen_store_fpr_D(dc, rd, cpu_src1_64);
4446 case 0x079: /* VIS I fsrc2s */
4447 CHECK_FPU_FEATURE(dc, VIS1);
4448 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4449 gen_store_fpr_F(dc, rd, cpu_src1_32);
4451 case 0x07a: /* VIS I fornot1 */
4452 CHECK_FPU_FEATURE(dc, VIS1);
4453 gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4455 case 0x07b: /* VIS I fornot1s */
4456 CHECK_FPU_FEATURE(dc, VIS1);
4457 gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
4459 case 0x07c: /* VIS I for */
4460 CHECK_FPU_FEATURE(dc, VIS1);
4461 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
4463 case 0x07d: /* VIS I fors */
4464 CHECK_FPU_FEATURE(dc, VIS1);
4465 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
4467 case 0x07e: /* VIS I fone */
4468 CHECK_FPU_FEATURE(dc, VIS1);
4469 cpu_dst_64 = gen_dest_fpr_D();
4470 tcg_gen_movi_i64(cpu_dst_64, -1);
4471 gen_store_fpr_D(dc, rd, cpu_dst_64);
4473 case 0x07f: /* VIS I fones */
4474 CHECK_FPU_FEATURE(dc, VIS1);
4475 cpu_dst_32 = gen_dest_fpr_F();
4476 tcg_gen_movi_i32(cpu_dst_32, -1);
4477 gen_store_fpr_F(dc, rd, cpu_dst_32);
4479 case 0x080: /* VIS I shutdown */
4480 case 0x081: /* VIS II siam */
4489 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4490 #ifdef TARGET_SPARC64
4495 #ifdef TARGET_SPARC64
4496 } else if (xop == 0x39) { /* V9 return */
4500 cpu_src1 = get_src1(dc, insn);
4501 if (IS_IMM) { /* immediate */
4502 simm = GET_FIELDs(insn, 19, 31);
4503 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4504 } else { /* register */
4505 rs2 = GET_FIELD(insn, 27, 31);
4507 cpu_src2 = gen_load_gpr(dc, rs2);
4508 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4510 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4513 gen_helper_restore(cpu_env);
4515 r_const = tcg_const_i32(3);
4516 gen_helper_check_align(cpu_env, cpu_dst, r_const);
4517 tcg_temp_free_i32(r_const);
4518 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4519 dc->npc = DYNAMIC_PC;
4523 cpu_src1 = get_src1(dc, insn);
4524 if (IS_IMM) { /* immediate */
4525 simm = GET_FIELDs(insn, 19, 31);
4526 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4527 } else { /* register */
4528 rs2 = GET_FIELD(insn, 27, 31);
4530 cpu_src2 = gen_load_gpr(dc, rs2);
4531 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4533 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4537 case 0x38: /* jmpl */
4542 t = gen_dest_gpr(dc, rd);
4543 tcg_gen_movi_tl(t, dc->pc);
4544 gen_store_gpr(dc, rd, t);
4546 r_const = tcg_const_i32(3);
4547 gen_helper_check_align(cpu_env, cpu_dst, r_const);
4548 tcg_temp_free_i32(r_const);
4549 gen_address_mask(dc, cpu_dst);
4550 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4551 dc->npc = DYNAMIC_PC;
4554 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4555 case 0x39: /* rett, V9 return */
4559 if (!supervisor(dc))
4562 r_const = tcg_const_i32(3);
4563 gen_helper_check_align(cpu_env, cpu_dst, r_const);
4564 tcg_temp_free_i32(r_const);
4565 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4566 dc->npc = DYNAMIC_PC;
4567 gen_helper_rett(cpu_env);
4571 case 0x3b: /* flush */
4572 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4576 case 0x3c: /* save */
4578 gen_helper_save(cpu_env);
4579 gen_store_gpr(dc, rd, cpu_dst);
4581 case 0x3d: /* restore */
4583 gen_helper_restore(cpu_env);
4584 gen_store_gpr(dc, rd, cpu_dst);
4586 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4587 case 0x3e: /* V9 done/retry */
4591 if (!supervisor(dc))
4593 dc->npc = DYNAMIC_PC;
4594 dc->pc = DYNAMIC_PC;
4595 gen_helper_done(cpu_env);
4598 if (!supervisor(dc))
4600 dc->npc = DYNAMIC_PC;
4601 dc->pc = DYNAMIC_PC;
4602 gen_helper_retry(cpu_env);
4617 case 3: /* load/store instructions */
4619 unsigned int xop = GET_FIELD(insn, 7, 12);
4621 cpu_src1 = get_src1(dc, insn);
4622 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4623 rs2 = GET_FIELD(insn, 27, 31);
4624 cpu_src2 = gen_load_gpr(dc, rs2);
4625 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4626 } else if (IS_IMM) { /* immediate */
4627 simm = GET_FIELDs(insn, 19, 31);
4628 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4629 } else { /* register */
4630 rs2 = GET_FIELD(insn, 27, 31);
4632 cpu_src2 = gen_load_gpr(dc, rs2);
4633 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4635 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4638 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4639 (xop > 0x17 && xop <= 0x1d ) ||
4640 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4642 case 0x0: /* ld, V9 lduw, load unsigned word */
4643 gen_address_mask(dc, cpu_addr);
4644 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4646 case 0x1: /* ldub, load unsigned byte */
4647 gen_address_mask(dc, cpu_addr);
4648 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4650 case 0x2: /* lduh, load unsigned halfword */
4651 gen_address_mask(dc, cpu_addr);
4652 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4654 case 0x3: /* ldd, load double word */
4661 r_const = tcg_const_i32(7);
4662 /* XXX remove alignment check */
4663 gen_helper_check_align(cpu_env, cpu_addr, r_const);
4664 tcg_temp_free_i32(r_const);
4665 gen_address_mask(dc, cpu_addr);
4666 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4667 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4668 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4669 gen_store_gpr(dc, rd + 1, cpu_tmp0);
4670 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4671 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4672 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4675 case 0x9: /* ldsb, load signed byte */
4676 gen_address_mask(dc, cpu_addr);
4677 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4679 case 0xa: /* ldsh, load signed halfword */
4680 gen_address_mask(dc, cpu_addr);
4681 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4683 case 0xd: /* ldstub -- XXX: should be atomically */
4687 gen_address_mask(dc, cpu_addr);
4688 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4689 r_const = tcg_const_tl(0xff);
4690 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4691 tcg_temp_free(r_const);
4694 case 0x0f: /* swap, swap register with memory. Also
4696 CHECK_IU_FEATURE(dc, SWAP);
4697 cpu_src1 = gen_load_gpr(dc, rd);
4698 gen_address_mask(dc, cpu_addr);
4699 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4700 tcg_gen_qemu_st32(cpu_src1, cpu_addr, dc->mem_idx);
4701 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4703 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4704 case 0x10: /* lda, V9 lduwa, load word alternate */
4705 #ifndef TARGET_SPARC64
4708 if (!supervisor(dc))
4712 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4714 case 0x11: /* lduba, load unsigned byte alternate */
4715 #ifndef TARGET_SPARC64
4718 if (!supervisor(dc))
4722 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4724 case 0x12: /* lduha, load unsigned halfword alternate */
4725 #ifndef TARGET_SPARC64
4728 if (!supervisor(dc))
4732 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4734 case 0x13: /* ldda, load double word alternate */
4735 #ifndef TARGET_SPARC64
4738 if (!supervisor(dc))
4744 gen_ldda_asi(dc, cpu_val, cpu_addr, insn, rd);
4746 case 0x19: /* ldsba, load signed byte alternate */
4747 #ifndef TARGET_SPARC64
4750 if (!supervisor(dc))
4754 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4756 case 0x1a: /* ldsha, load signed halfword alternate */
4757 #ifndef TARGET_SPARC64
4760 if (!supervisor(dc))
4764 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4766 case 0x1d: /* ldstuba -- XXX: should be atomically */
4767 #ifndef TARGET_SPARC64
4770 if (!supervisor(dc))
4774 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4776 case 0x1f: /* swapa, swap reg with alt. memory. Also
4778 CHECK_IU_FEATURE(dc, SWAP);
4779 #ifndef TARGET_SPARC64
4782 if (!supervisor(dc))
4786 cpu_src1 = gen_load_gpr(dc, rd);
4787 gen_swap_asi(cpu_val, cpu_src1, cpu_addr, insn);
4790 #ifndef TARGET_SPARC64
4791 case 0x30: /* ldc */
4792 case 0x31: /* ldcsr */
4793 case 0x33: /* lddc */
4797 #ifdef TARGET_SPARC64
4798 case 0x08: /* V9 ldsw */
4799 gen_address_mask(dc, cpu_addr);
4800 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4802 case 0x0b: /* V9 ldx */
4803 gen_address_mask(dc, cpu_addr);
4804 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4806 case 0x18: /* V9 ldswa */
4808 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4810 case 0x1b: /* V9 ldxa */
4812 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4814 case 0x2d: /* V9 prefetch, no effect */
4816 case 0x30: /* V9 ldfa */
4817 if (gen_trap_ifnofpu(dc)) {
4821 gen_ldf_asi(cpu_addr, insn, 4, rd);
4822 gen_update_fprs_dirty(rd);
4824 case 0x33: /* V9 lddfa */
4825 if (gen_trap_ifnofpu(dc)) {
4829 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4830 gen_update_fprs_dirty(DFPREG(rd));
4832 case 0x3d: /* V9 prefetcha, no effect */
4834 case 0x32: /* V9 ldqfa */
4835 CHECK_FPU_FEATURE(dc, FLOAT128);
4836 if (gen_trap_ifnofpu(dc)) {
4840 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4841 gen_update_fprs_dirty(QFPREG(rd));
4847 gen_store_gpr(dc, rd, cpu_val);
4848 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4851 } else if (xop >= 0x20 && xop < 0x24) {
4852 if (gen_trap_ifnofpu(dc)) {
4857 case 0x20: /* ldf, load fpreg */
4858 gen_address_mask(dc, cpu_addr);
4859 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4860 cpu_dst_32 = gen_dest_fpr_F();
4861 tcg_gen_trunc_tl_i32(cpu_dst_32, cpu_tmp0);
4862 gen_store_fpr_F(dc, rd, cpu_dst_32);
4864 case 0x21: /* ldfsr, V9 ldxfsr */
4865 #ifdef TARGET_SPARC64
4866 gen_address_mask(dc, cpu_addr);
4868 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4869 gen_helper_ldxfsr(cpu_env, cpu_tmp64);
4871 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4872 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
4873 gen_helper_ldfsr(cpu_env, cpu_tmp32);
4877 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4878 gen_helper_ldfsr(cpu_env, cpu_tmp32);
4882 case 0x22: /* ldqf, load quad fpreg */
4886 CHECK_FPU_FEATURE(dc, FLOAT128);
4887 r_const = tcg_const_i32(dc->mem_idx);
4888 gen_address_mask(dc, cpu_addr);
4889 gen_helper_ldqf(cpu_env, cpu_addr, r_const);
4890 tcg_temp_free_i32(r_const);
4891 gen_op_store_QT0_fpr(QFPREG(rd));
4892 gen_update_fprs_dirty(QFPREG(rd));
4895 case 0x23: /* lddf, load double fpreg */
4896 gen_address_mask(dc, cpu_addr);
4897 cpu_dst_64 = gen_dest_fpr_D();
4898 tcg_gen_qemu_ld64(cpu_dst_64, cpu_addr, dc->mem_idx);
4899 gen_store_fpr_D(dc, rd, cpu_dst_64);
4904 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4905 xop == 0xe || xop == 0x1e) {
4906 gen_movl_reg_TN(rd, cpu_val);
4908 case 0x4: /* st, store word */
4909 gen_address_mask(dc, cpu_addr);
4910 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4912 case 0x5: /* stb, store byte */
4913 gen_address_mask(dc, cpu_addr);
4914 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4916 case 0x6: /* sth, store halfword */
4917 gen_address_mask(dc, cpu_addr);
4918 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4920 case 0x7: /* std, store double word */
4927 gen_address_mask(dc, cpu_addr);
4928 r_const = tcg_const_i32(7);
4929 /* XXX remove alignment check */
4930 gen_helper_check_align(cpu_env, cpu_addr, r_const);
4931 tcg_temp_free_i32(r_const);
4932 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4933 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4934 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4937 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4938 case 0x14: /* sta, V9 stwa, store word alternate */
4939 #ifndef TARGET_SPARC64
4942 if (!supervisor(dc))
4946 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4947 dc->npc = DYNAMIC_PC;
4949 case 0x15: /* stba, store byte alternate */
4950 #ifndef TARGET_SPARC64
4953 if (!supervisor(dc))
4957 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4958 dc->npc = DYNAMIC_PC;
4960 case 0x16: /* stha, store halfword alternate */
4961 #ifndef TARGET_SPARC64
4964 if (!supervisor(dc))
4968 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4969 dc->npc = DYNAMIC_PC;
4971 case 0x17: /* stda, store double word alternate */
4972 #ifndef TARGET_SPARC64
4975 if (!supervisor(dc))
4982 gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
4986 #ifdef TARGET_SPARC64
4987 case 0x0e: /* V9 stx */
4988 gen_address_mask(dc, cpu_addr);
4989 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4991 case 0x1e: /* V9 stxa */
4993 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4994 dc->npc = DYNAMIC_PC;
5000 } else if (xop > 0x23 && xop < 0x28) {
5001 if (gen_trap_ifnofpu(dc)) {
5006 case 0x24: /* stf, store fpreg */
5007 gen_address_mask(dc, cpu_addr);
5008 cpu_src1_32 = gen_load_fpr_F(dc, rd);
5009 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_src1_32);
5010 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
5012 case 0x25: /* stfsr, V9 stxfsr */
5013 #ifdef TARGET_SPARC64
5014 gen_address_mask(dc, cpu_addr);
5015 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUSPARCState, fsr));
5017 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
5019 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
5021 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fsr));
5022 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
5026 #ifdef TARGET_SPARC64
5027 /* V9 stqf, store quad fpreg */
5031 CHECK_FPU_FEATURE(dc, FLOAT128);
5032 gen_op_load_fpr_QT0(QFPREG(rd));
5033 r_const = tcg_const_i32(dc->mem_idx);
5034 gen_address_mask(dc, cpu_addr);
5035 gen_helper_stqf(cpu_env, cpu_addr, r_const);
5036 tcg_temp_free_i32(r_const);
5039 #else /* !TARGET_SPARC64 */
5040 /* stdfq, store floating point queue */
5041 #if defined(CONFIG_USER_ONLY)
5044 if (!supervisor(dc))
5046 if (gen_trap_ifnofpu(dc)) {
5052 case 0x27: /* stdf, store double fpreg */
5053 gen_address_mask(dc, cpu_addr);
5054 cpu_src1_64 = gen_load_fpr_D(dc, rd);
5055 tcg_gen_qemu_st64(cpu_src1_64, cpu_addr, dc->mem_idx);
5060 } else if (xop > 0x33 && xop < 0x3f) {
5063 #ifdef TARGET_SPARC64
5064 case 0x34: /* V9 stfa */
5065 if (gen_trap_ifnofpu(dc)) {
5068 gen_stf_asi(cpu_addr, insn, 4, rd);
5070 case 0x36: /* V9 stqfa */
5074 CHECK_FPU_FEATURE(dc, FLOAT128);
5075 if (gen_trap_ifnofpu(dc)) {
5078 r_const = tcg_const_i32(7);
5079 gen_helper_check_align(cpu_env, cpu_addr, r_const);
5080 tcg_temp_free_i32(r_const);
5081 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
5084 case 0x37: /* V9 stdfa */
5085 if (gen_trap_ifnofpu(dc)) {
5088 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
5090 case 0x3c: /* V9 casa */
5091 gen_cas_asi(dc, cpu_val, cpu_addr, cpu_src2, insn, rd);
5092 gen_store_gpr(dc, rd, cpu_val);
5094 case 0x3e: /* V9 casxa */
5095 gen_casx_asi(dc, cpu_val, cpu_addr, cpu_src2, insn, rd);
5096 gen_store_gpr(dc, rd, cpu_val);
5099 case 0x34: /* stc */
5100 case 0x35: /* stcsr */
5101 case 0x36: /* stdcq */
5102 case 0x37: /* stdc */
5113 /* default case for non jump instructions */
5114 if (dc->npc == DYNAMIC_PC) {
5115 dc->pc = DYNAMIC_PC;
5117 } else if (dc->npc == JUMP_PC) {
5118 /* we can do a static jump */
5119 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5123 dc->npc = dc->npc + 4;
5132 r_const = tcg_const_i32(TT_ILL_INSN);
5133 gen_helper_raise_exception(cpu_env, r_const);
5134 tcg_temp_free_i32(r_const);
5143 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
5144 gen_helper_raise_exception(cpu_env, r_const);
5145 tcg_temp_free_i32(r_const);
5149 #if !defined(CONFIG_USER_ONLY)
5155 r_const = tcg_const_i32(TT_PRIV_INSN);
5156 gen_helper_raise_exception(cpu_env, r_const);
5157 tcg_temp_free_i32(r_const);
5164 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
5167 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5170 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
5174 #ifndef TARGET_SPARC64
5180 r_const = tcg_const_i32(TT_NCP_INSN);
5181 gen_helper_raise_exception(cpu_env, r_const);
5182 tcg_temp_free(r_const);
5188 tcg_temp_free(cpu_tmp1);
5189 tcg_temp_free(cpu_tmp2);
5190 if (dc->n_t32 != 0) {
5192 for (i = dc->n_t32 - 1; i >= 0; --i) {
5193 tcg_temp_free_i32(dc->t32[i]);
5197 if (dc->n_ttl != 0) {
5199 for (i = dc->n_ttl - 1; i >= 0; --i) {
5200 tcg_temp_free(dc->ttl[i]);
5206 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
5207 int spc, CPUSPARCState *env)
5209 target_ulong pc_start, last_pc;
5210 uint16_t *gen_opc_end;
5211 DisasContext dc1, *dc = &dc1;
5218 memset(dc, 0, sizeof(DisasContext));
5223 dc->npc = (target_ulong) tb->cs_base;
5224 dc->cc_op = CC_OP_DYNAMIC;
5225 dc->mem_idx = cpu_mmu_index(env);
5227 dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5228 dc->address_mask_32bit = tb_am_enabled(tb->flags);
5229 dc->singlestep = (env->singlestep_enabled || singlestep);
5230 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
5233 max_insns = tb->cflags & CF_COUNT_MASK;
5235 max_insns = CF_COUNT_MASK;
5238 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
5239 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
5240 if (bp->pc == dc->pc) {
5241 if (dc->pc != pc_start)
5243 gen_helper_debug(cpu_env);
5251 qemu_log("Search PC...\n");
5252 j = gen_opc_ptr - gen_opc_buf;
5256 gen_opc_instr_start[lj++] = 0;
5257 gen_opc_pc[lj] = dc->pc;
5258 gen_opc_npc[lj] = dc->npc;
5259 gen_opc_instr_start[lj] = 1;
5260 gen_opc_icount[lj] = num_insns;
5263 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
5266 insn = cpu_ldl_code(env, dc->pc);
5268 cpu_tmp0 = tcg_temp_new();
5269 cpu_tmp32 = tcg_temp_new_i32();
5270 cpu_tmp64 = tcg_temp_new_i64();
5271 cpu_dst = tcg_temp_new();
5272 cpu_val = tcg_temp_new();
5273 cpu_addr = tcg_temp_new();
5275 disas_sparc_insn(dc, insn);
5278 tcg_temp_free(cpu_addr);
5279 tcg_temp_free(cpu_val);
5280 tcg_temp_free(cpu_dst);
5281 tcg_temp_free_i64(cpu_tmp64);
5282 tcg_temp_free_i32(cpu_tmp32);
5283 tcg_temp_free(cpu_tmp0);
5287 /* if the next PC is different, we abort now */
5288 if (dc->pc != (last_pc + 4))
5290 /* if we reach a page boundary, we stop generation so that the
5291 PC of a TT_TFAULT exception is always in the right page */
5292 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5294 /* if single step mode, we generate only one instruction and
5295 generate an exception */
5296 if (dc->singlestep) {
5299 } while ((gen_opc_ptr < gen_opc_end) &&
5300 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5301 num_insns < max_insns);
5304 if (tb->cflags & CF_LAST_IO) {
5308 if (dc->pc != DYNAMIC_PC &&
5309 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5310 /* static PC and NPC: we can use direct chaining */
5311 gen_goto_tb(dc, 0, dc->pc, dc->npc);
5313 if (dc->pc != DYNAMIC_PC) {
5314 tcg_gen_movi_tl(cpu_pc, dc->pc);
5320 gen_icount_end(tb, num_insns);
5321 *gen_opc_ptr = INDEX_op_end;
5323 j = gen_opc_ptr - gen_opc_buf;
5326 gen_opc_instr_start[lj++] = 0;
5330 gen_opc_jump_pc[0] = dc->jump_pc[0];
5331 gen_opc_jump_pc[1] = dc->jump_pc[1];
5333 tb->size = last_pc + 4 - pc_start;
5334 tb->icount = num_insns;
5337 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5338 qemu_log("--------------\n");
5339 qemu_log("IN: %s\n", lookup_symbol(pc_start));
5340 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
5346 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5348 gen_intermediate_code_internal(tb, 0, env);
5351 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5353 gen_intermediate_code_internal(tb, 1, env);
5356 void gen_intermediate_code_init(CPUSPARCState *env)
5360 static const char * const gregnames[8] = {
5361 NULL, // g0 not used
5370 static const char * const fregnames[32] = {
5371 "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5372 "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5373 "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5374 "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5377 /* init various static tables */
5381 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5382 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5383 offsetof(CPUSPARCState, regwptr),
5385 #ifdef TARGET_SPARC64
5386 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, xcc),
5388 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, asi),
5390 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, fprs),
5392 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, gsr),
5394 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5395 offsetof(CPUSPARCState, tick_cmpr),
5397 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5398 offsetof(CPUSPARCState, stick_cmpr),
5400 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5401 offsetof(CPUSPARCState, hstick_cmpr),
5403 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hintp),
5405 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, htba),
5407 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hver),
5409 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5410 offsetof(CPUSPARCState, ssr), "ssr");
5411 cpu_ver = tcg_global_mem_new(TCG_AREG0,
5412 offsetof(CPUSPARCState, version), "ver");
5413 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5414 offsetof(CPUSPARCState, softint),
5417 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, wim),
5420 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cond),
5422 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_src),
5424 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5425 offsetof(CPUSPARCState, cc_src2),
5427 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_dst),
5429 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, cc_op),
5431 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, psr),
5433 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, fsr),
5435 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, pc),
5437 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, npc),
5439 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, y), "y");
5440 #ifndef CONFIG_USER_ONLY
5441 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, tbr),
5444 for (i = 1; i < 8; i++) {
5445 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5446 offsetof(CPUSPARCState, gregs[i]),
5449 for (i = 0; i < TARGET_DPREGS; i++) {
5450 cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
5451 offsetof(CPUSPARCState, fpr[i]),
5455 /* register helpers */
5457 #define GEN_HELPER 2
5462 void restore_state_to_opc(CPUSPARCState *env, TranslationBlock *tb, int pc_pos)
5465 env->pc = gen_opc_pc[pc_pos];
5466 npc = gen_opc_npc[pc_pos];
5468 /* dynamic NPC: already stored */
5469 } else if (npc == 2) {
5470 /* jump PC: use 'cond' and the jump targets of the translation */
5472 env->npc = gen_opc_jump_pc[0];
5474 env->npc = gen_opc_jump_pc[1];