5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
37 #define DYNAMIC_PC 1 /* dynamic pc value */
38 #define JUMP_PC 2 /* dynamic pc value which takes only two values
39 according to jump_pc[T2] */
41 /* global register indexes */
42 static TCGv_ptr cpu_env, cpu_regwptr;
43 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
44 static TCGv_i32 cpu_cc_op;
45 static TCGv_i32 cpu_psr;
46 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
48 #ifndef CONFIG_USER_ONLY
51 static TCGv cpu_cond, cpu_dst, cpu_addr, cpu_val;
53 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
55 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
56 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
57 static TCGv_i32 cpu_softint;
61 /* local register indexes (only used inside old micro ops) */
63 static TCGv_i32 cpu_tmp32;
64 static TCGv_i64 cpu_tmp64;
65 /* Floating point registers */
66 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
68 static target_ulong gen_opc_npc[OPC_BUF_SIZE];
69 static target_ulong gen_opc_jump_pc[2];
71 #include "gen-icount.h"
73 typedef struct DisasContext {
74 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
75 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
76 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
80 int address_mask_32bit;
82 uint32_t cc_op; /* current CC operation */
83 struct TranslationBlock *tb;
96 // This function uses non-native bit order
97 #define GET_FIELD(X, FROM, TO) \
98 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
100 // This function uses the order in the manuals, i.e. bit 0 is 2^0
101 #define GET_FIELD_SP(X, FROM, TO) \
102 GET_FIELD(X, 31 - (TO), 31 - (FROM))
104 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
105 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
107 #ifdef TARGET_SPARC64
108 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
109 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
111 #define DFPREG(r) (r & 0x1e)
112 #define QFPREG(r) (r & 0x1c)
115 #define UA2005_HTRAP_MASK 0xff
116 #define V8_TRAP_MASK 0x7f
118 static int sign_extend(int x, int len)
121 return (x << len) >> len;
124 #define IS_IMM (insn & (1<<13))
126 static inline void gen_update_fprs_dirty(int rd)
128 #if defined(TARGET_SPARC64)
129 tcg_gen_ori_i32(cpu_fprs, cpu_fprs, (rd < 32) ? 1 : 2);
133 /* floating point registers moves */
134 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
136 #if TCG_TARGET_REG_BITS == 32
138 return TCGV_LOW(cpu_fpr[src / 2]);
140 return TCGV_HIGH(cpu_fpr[src / 2]);
144 return MAKE_TCGV_I32(GET_TCGV_I64(cpu_fpr[src / 2]));
146 TCGv_i32 ret = tcg_temp_local_new_i32();
147 TCGv_i64 t = tcg_temp_new_i64();
149 tcg_gen_shri_i64(t, cpu_fpr[src / 2], 32);
150 tcg_gen_trunc_i64_i32(ret, t);
151 tcg_temp_free_i64(t);
153 dc->t32[dc->n_t32++] = ret;
154 assert(dc->n_t32 <= ARRAY_SIZE(dc->t32));
161 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
163 #if TCG_TARGET_REG_BITS == 32
165 tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
167 tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
170 TCGv_i64 t = MAKE_TCGV_I64(GET_TCGV_I32(v));
171 tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
172 (dst & 1 ? 0 : 32), 32);
174 gen_update_fprs_dirty(dst);
177 static TCGv_i32 gen_dest_fpr_F(void)
182 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
185 return cpu_fpr[src / 2];
188 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
191 tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
192 gen_update_fprs_dirty(dst);
195 static TCGv_i64 gen_dest_fpr_D(void)
200 static void gen_op_load_fpr_QT0(unsigned int src)
202 tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
203 offsetof(CPU_QuadU, ll.upper));
204 tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
205 offsetof(CPU_QuadU, ll.lower));
208 static void gen_op_load_fpr_QT1(unsigned int src)
210 tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
211 offsetof(CPU_QuadU, ll.upper));
212 tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
213 offsetof(CPU_QuadU, ll.lower));
216 static void gen_op_store_QT0_fpr(unsigned int dst)
218 tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
219 offsetof(CPU_QuadU, ll.upper));
220 tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
221 offsetof(CPU_QuadU, ll.lower));
224 #ifdef TARGET_SPARC64
225 static void gen_move_Q(unsigned int rd, unsigned int rs)
230 tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
231 tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
232 gen_update_fprs_dirty(rd);
237 #ifdef CONFIG_USER_ONLY
238 #define supervisor(dc) 0
239 #ifdef TARGET_SPARC64
240 #define hypervisor(dc) 0
243 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
244 #ifdef TARGET_SPARC64
245 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
250 #ifdef TARGET_SPARC64
252 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
254 #define AM_CHECK(dc) (1)
258 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
260 #ifdef TARGET_SPARC64
262 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
266 static inline void gen_movl_reg_TN(int reg, TCGv tn)
269 tcg_gen_movi_tl(tn, 0);
271 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
273 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
277 static inline void gen_movl_TN_reg(int reg, TCGv tn)
282 tcg_gen_mov_tl(cpu_gregs[reg], tn);
284 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
288 static inline void gen_goto_tb(DisasContext *s, int tb_num,
289 target_ulong pc, target_ulong npc)
291 TranslationBlock *tb;
294 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
295 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
297 /* jump to same page: we can use a direct jump */
298 tcg_gen_goto_tb(tb_num);
299 tcg_gen_movi_tl(cpu_pc, pc);
300 tcg_gen_movi_tl(cpu_npc, npc);
301 tcg_gen_exit_tb((tcg_target_long)tb + tb_num);
303 /* jump to another page: currently not optimized */
304 tcg_gen_movi_tl(cpu_pc, pc);
305 tcg_gen_movi_tl(cpu_npc, npc);
311 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
313 tcg_gen_extu_i32_tl(reg, src);
314 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
315 tcg_gen_andi_tl(reg, reg, 0x1);
318 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
320 tcg_gen_extu_i32_tl(reg, src);
321 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
322 tcg_gen_andi_tl(reg, reg, 0x1);
325 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
327 tcg_gen_extu_i32_tl(reg, src);
328 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
329 tcg_gen_andi_tl(reg, reg, 0x1);
332 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
334 tcg_gen_extu_i32_tl(reg, src);
335 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
336 tcg_gen_andi_tl(reg, reg, 0x1);
339 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
341 tcg_gen_mov_tl(cpu_cc_src, src1);
342 tcg_gen_movi_tl(cpu_cc_src2, src2);
343 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
344 tcg_gen_mov_tl(dst, cpu_cc_dst);
347 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
349 tcg_gen_mov_tl(cpu_cc_src, src1);
350 tcg_gen_mov_tl(cpu_cc_src2, src2);
351 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
352 tcg_gen_mov_tl(dst, cpu_cc_dst);
355 static TCGv_i32 gen_add32_carry32(void)
357 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
359 /* Carry is computed from a previous add: (dst < src) */
360 #if TARGET_LONG_BITS == 64
361 cc_src1_32 = tcg_temp_new_i32();
362 cc_src2_32 = tcg_temp_new_i32();
363 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_dst);
364 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src);
366 cc_src1_32 = cpu_cc_dst;
367 cc_src2_32 = cpu_cc_src;
370 carry_32 = tcg_temp_new_i32();
371 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
373 #if TARGET_LONG_BITS == 64
374 tcg_temp_free_i32(cc_src1_32);
375 tcg_temp_free_i32(cc_src2_32);
381 static TCGv_i32 gen_sub32_carry32(void)
383 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
385 /* Carry is computed from a previous borrow: (src1 < src2) */
386 #if TARGET_LONG_BITS == 64
387 cc_src1_32 = tcg_temp_new_i32();
388 cc_src2_32 = tcg_temp_new_i32();
389 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_src);
390 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src2);
392 cc_src1_32 = cpu_cc_src;
393 cc_src2_32 = cpu_cc_src2;
396 carry_32 = tcg_temp_new_i32();
397 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
399 #if TARGET_LONG_BITS == 64
400 tcg_temp_free_i32(cc_src1_32);
401 tcg_temp_free_i32(cc_src2_32);
407 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
408 TCGv src2, int update_cc)
416 /* Carry is known to be zero. Fall back to plain ADD. */
418 gen_op_add_cc(dst, src1, src2);
420 tcg_gen_add_tl(dst, src1, src2);
427 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
429 /* For 32-bit hosts, we can re-use the host's hardware carry
430 generation by using an ADD2 opcode. We discard the low
431 part of the output. Ideally we'd combine this operation
432 with the add that generated the carry in the first place. */
433 TCGv dst_low = tcg_temp_new();
434 tcg_gen_op6_i32(INDEX_op_add2_i32, dst_low, dst,
435 cpu_cc_src, src1, cpu_cc_src2, src2);
436 tcg_temp_free(dst_low);
440 carry_32 = gen_add32_carry32();
446 carry_32 = gen_sub32_carry32();
450 /* We need external help to produce the carry. */
451 carry_32 = tcg_temp_new_i32();
452 gen_helper_compute_C_icc(carry_32, cpu_env);
456 #if TARGET_LONG_BITS == 64
457 carry = tcg_temp_new();
458 tcg_gen_extu_i32_i64(carry, carry_32);
463 tcg_gen_add_tl(dst, src1, src2);
464 tcg_gen_add_tl(dst, dst, carry);
466 tcg_temp_free_i32(carry_32);
467 #if TARGET_LONG_BITS == 64
468 tcg_temp_free(carry);
471 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
475 tcg_gen_mov_tl(cpu_cc_src, src1);
476 tcg_gen_mov_tl(cpu_cc_src2, src2);
477 tcg_gen_mov_tl(cpu_cc_dst, dst);
478 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
479 dc->cc_op = CC_OP_ADDX;
483 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
485 tcg_gen_mov_tl(cpu_cc_src, src1);
486 tcg_gen_movi_tl(cpu_cc_src2, src2);
488 tcg_gen_mov_tl(cpu_cc_dst, src1);
489 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
490 dc->cc_op = CC_OP_LOGIC;
492 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
493 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
494 dc->cc_op = CC_OP_SUB;
496 tcg_gen_mov_tl(dst, cpu_cc_dst);
499 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
501 tcg_gen_mov_tl(cpu_cc_src, src1);
502 tcg_gen_mov_tl(cpu_cc_src2, src2);
503 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
504 tcg_gen_mov_tl(dst, cpu_cc_dst);
507 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
508 TCGv src2, int update_cc)
516 /* Carry is known to be zero. Fall back to plain SUB. */
518 gen_op_sub_cc(dst, src1, src2);
520 tcg_gen_sub_tl(dst, src1, src2);
527 carry_32 = gen_add32_carry32();
533 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
535 /* For 32-bit hosts, we can re-use the host's hardware carry
536 generation by using a SUB2 opcode. We discard the low
537 part of the output. Ideally we'd combine this operation
538 with the add that generated the carry in the first place. */
539 TCGv dst_low = tcg_temp_new();
540 tcg_gen_op6_i32(INDEX_op_sub2_i32, dst_low, dst,
541 cpu_cc_src, src1, cpu_cc_src2, src2);
542 tcg_temp_free(dst_low);
546 carry_32 = gen_sub32_carry32();
550 /* We need external help to produce the carry. */
551 carry_32 = tcg_temp_new_i32();
552 gen_helper_compute_C_icc(carry_32, cpu_env);
556 #if TARGET_LONG_BITS == 64
557 carry = tcg_temp_new();
558 tcg_gen_extu_i32_i64(carry, carry_32);
563 tcg_gen_sub_tl(dst, src1, src2);
564 tcg_gen_sub_tl(dst, dst, carry);
566 tcg_temp_free_i32(carry_32);
567 #if TARGET_LONG_BITS == 64
568 tcg_temp_free(carry);
571 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
575 tcg_gen_mov_tl(cpu_cc_src, src1);
576 tcg_gen_mov_tl(cpu_cc_src2, src2);
577 tcg_gen_mov_tl(cpu_cc_dst, dst);
578 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
579 dc->cc_op = CC_OP_SUBX;
583 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
587 r_temp = tcg_temp_new();
593 zero = tcg_const_tl(0);
594 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
595 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
596 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
597 tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
602 // env->y = (b2 << 31) | (env->y >> 1);
603 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
604 tcg_gen_shli_tl(r_temp, r_temp, 31);
605 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
606 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
607 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
608 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
611 gen_mov_reg_N(cpu_tmp0, cpu_psr);
612 gen_mov_reg_V(r_temp, cpu_psr);
613 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
614 tcg_temp_free(r_temp);
616 // T0 = (b1 << 31) | (T0 >> 1);
618 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
619 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
620 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
622 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
624 tcg_gen_mov_tl(dst, cpu_cc_dst);
627 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
629 TCGv_i32 r_src1, r_src2;
630 TCGv_i64 r_temp, r_temp2;
632 r_src1 = tcg_temp_new_i32();
633 r_src2 = tcg_temp_new_i32();
635 tcg_gen_trunc_tl_i32(r_src1, src1);
636 tcg_gen_trunc_tl_i32(r_src2, src2);
638 r_temp = tcg_temp_new_i64();
639 r_temp2 = tcg_temp_new_i64();
642 tcg_gen_ext_i32_i64(r_temp, r_src2);
643 tcg_gen_ext_i32_i64(r_temp2, r_src1);
645 tcg_gen_extu_i32_i64(r_temp, r_src2);
646 tcg_gen_extu_i32_i64(r_temp2, r_src1);
649 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
651 tcg_gen_shri_i64(r_temp, r_temp2, 32);
652 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
653 tcg_temp_free_i64(r_temp);
654 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
656 tcg_gen_trunc_i64_tl(dst, r_temp2);
658 tcg_temp_free_i64(r_temp2);
660 tcg_temp_free_i32(r_src1);
661 tcg_temp_free_i32(r_src2);
664 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
666 /* zero-extend truncated operands before multiplication */
667 gen_op_multiply(dst, src1, src2, 0);
670 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
672 /* sign-extend truncated operands before multiplication */
673 gen_op_multiply(dst, src1, src2, 1);
677 static inline void gen_op_eval_ba(TCGv dst)
679 tcg_gen_movi_tl(dst, 1);
683 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
685 gen_mov_reg_Z(dst, src);
689 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
691 gen_mov_reg_N(cpu_tmp0, src);
692 gen_mov_reg_V(dst, src);
693 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
694 gen_mov_reg_Z(cpu_tmp0, src);
695 tcg_gen_or_tl(dst, dst, cpu_tmp0);
699 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
701 gen_mov_reg_V(cpu_tmp0, src);
702 gen_mov_reg_N(dst, src);
703 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
707 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
709 gen_mov_reg_Z(cpu_tmp0, src);
710 gen_mov_reg_C(dst, src);
711 tcg_gen_or_tl(dst, dst, cpu_tmp0);
715 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
717 gen_mov_reg_C(dst, src);
721 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
723 gen_mov_reg_V(dst, src);
727 static inline void gen_op_eval_bn(TCGv dst)
729 tcg_gen_movi_tl(dst, 0);
733 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
735 gen_mov_reg_N(dst, src);
739 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
741 gen_mov_reg_Z(dst, src);
742 tcg_gen_xori_tl(dst, dst, 0x1);
746 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
748 gen_mov_reg_N(cpu_tmp0, src);
749 gen_mov_reg_V(dst, src);
750 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
751 gen_mov_reg_Z(cpu_tmp0, src);
752 tcg_gen_or_tl(dst, dst, cpu_tmp0);
753 tcg_gen_xori_tl(dst, dst, 0x1);
757 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
759 gen_mov_reg_V(cpu_tmp0, src);
760 gen_mov_reg_N(dst, src);
761 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
762 tcg_gen_xori_tl(dst, dst, 0x1);
766 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
768 gen_mov_reg_Z(cpu_tmp0, src);
769 gen_mov_reg_C(dst, src);
770 tcg_gen_or_tl(dst, dst, cpu_tmp0);
771 tcg_gen_xori_tl(dst, dst, 0x1);
775 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
777 gen_mov_reg_C(dst, src);
778 tcg_gen_xori_tl(dst, dst, 0x1);
782 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
784 gen_mov_reg_N(dst, src);
785 tcg_gen_xori_tl(dst, dst, 0x1);
789 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
791 gen_mov_reg_V(dst, src);
792 tcg_gen_xori_tl(dst, dst, 0x1);
796 FPSR bit field FCC1 | FCC0:
802 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
803 unsigned int fcc_offset)
805 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
806 tcg_gen_andi_tl(reg, reg, 0x1);
809 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
810 unsigned int fcc_offset)
812 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
813 tcg_gen_andi_tl(reg, reg, 0x1);
817 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
818 unsigned int fcc_offset)
820 gen_mov_reg_FCC0(dst, src, fcc_offset);
821 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
822 tcg_gen_or_tl(dst, dst, cpu_tmp0);
825 // 1 or 2: FCC0 ^ FCC1
826 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
827 unsigned int fcc_offset)
829 gen_mov_reg_FCC0(dst, src, fcc_offset);
830 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
831 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
835 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
836 unsigned int fcc_offset)
838 gen_mov_reg_FCC0(dst, src, fcc_offset);
842 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
843 unsigned int fcc_offset)
845 gen_mov_reg_FCC0(dst, src, fcc_offset);
846 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
847 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
848 tcg_gen_and_tl(dst, dst, cpu_tmp0);
852 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
853 unsigned int fcc_offset)
855 gen_mov_reg_FCC1(dst, src, fcc_offset);
859 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
860 unsigned int fcc_offset)
862 gen_mov_reg_FCC0(dst, src, fcc_offset);
863 tcg_gen_xori_tl(dst, dst, 0x1);
864 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
865 tcg_gen_and_tl(dst, dst, cpu_tmp0);
869 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
870 unsigned int fcc_offset)
872 gen_mov_reg_FCC0(dst, src, fcc_offset);
873 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
874 tcg_gen_and_tl(dst, dst, cpu_tmp0);
878 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
879 unsigned int fcc_offset)
881 gen_mov_reg_FCC0(dst, src, fcc_offset);
882 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
883 tcg_gen_or_tl(dst, dst, cpu_tmp0);
884 tcg_gen_xori_tl(dst, dst, 0x1);
887 // 0 or 3: !(FCC0 ^ FCC1)
888 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
889 unsigned int fcc_offset)
891 gen_mov_reg_FCC0(dst, src, fcc_offset);
892 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
893 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
894 tcg_gen_xori_tl(dst, dst, 0x1);
898 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
899 unsigned int fcc_offset)
901 gen_mov_reg_FCC0(dst, src, fcc_offset);
902 tcg_gen_xori_tl(dst, dst, 0x1);
905 // !1: !(FCC0 & !FCC1)
906 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
907 unsigned int fcc_offset)
909 gen_mov_reg_FCC0(dst, src, fcc_offset);
910 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
911 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
912 tcg_gen_and_tl(dst, dst, cpu_tmp0);
913 tcg_gen_xori_tl(dst, dst, 0x1);
917 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
918 unsigned int fcc_offset)
920 gen_mov_reg_FCC1(dst, src, fcc_offset);
921 tcg_gen_xori_tl(dst, dst, 0x1);
924 // !2: !(!FCC0 & FCC1)
925 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
926 unsigned int fcc_offset)
928 gen_mov_reg_FCC0(dst, src, fcc_offset);
929 tcg_gen_xori_tl(dst, dst, 0x1);
930 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
931 tcg_gen_and_tl(dst, dst, cpu_tmp0);
932 tcg_gen_xori_tl(dst, dst, 0x1);
935 // !3: !(FCC0 & FCC1)
936 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
937 unsigned int fcc_offset)
939 gen_mov_reg_FCC0(dst, src, fcc_offset);
940 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
941 tcg_gen_and_tl(dst, dst, cpu_tmp0);
942 tcg_gen_xori_tl(dst, dst, 0x1);
945 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
946 target_ulong pc2, TCGv r_cond)
950 l1 = gen_new_label();
952 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
954 gen_goto_tb(dc, 0, pc1, pc1 + 4);
957 gen_goto_tb(dc, 1, pc2, pc2 + 4);
960 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
961 target_ulong pc2, TCGv r_cond)
965 l1 = gen_new_label();
967 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
969 gen_goto_tb(dc, 0, pc2, pc1);
972 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
975 static inline void gen_generic_branch(DisasContext *dc)
977 TCGv npc0 = tcg_const_tl(dc->jump_pc[0]);
978 TCGv npc1 = tcg_const_tl(dc->jump_pc[1]);
979 TCGv zero = tcg_const_tl(0);
981 tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
988 /* call this function before using the condition register as it may
989 have been set for a jump */
990 static inline void flush_cond(DisasContext *dc)
992 if (dc->npc == JUMP_PC) {
993 gen_generic_branch(dc);
994 dc->npc = DYNAMIC_PC;
998 static inline void save_npc(DisasContext *dc)
1000 if (dc->npc == JUMP_PC) {
1001 gen_generic_branch(dc);
1002 dc->npc = DYNAMIC_PC;
1003 } else if (dc->npc != DYNAMIC_PC) {
1004 tcg_gen_movi_tl(cpu_npc, dc->npc);
1008 static inline void save_state(DisasContext *dc)
1010 tcg_gen_movi_tl(cpu_pc, dc->pc);
1011 /* flush pending conditional evaluations before exposing cpu state */
1012 if (dc->cc_op != CC_OP_FLAGS) {
1013 dc->cc_op = CC_OP_FLAGS;
1014 gen_helper_compute_psr(cpu_env);
1019 static inline void gen_mov_pc_npc(DisasContext *dc)
1021 if (dc->npc == JUMP_PC) {
1022 gen_generic_branch(dc);
1023 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1024 dc->pc = DYNAMIC_PC;
1025 } else if (dc->npc == DYNAMIC_PC) {
1026 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1027 dc->pc = DYNAMIC_PC;
1033 static inline void gen_op_next_insn(void)
1035 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1036 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1039 static void free_compare(DisasCompare *cmp)
1042 tcg_temp_free(cmp->c1);
1045 tcg_temp_free(cmp->c2);
1049 static void gen_compare(DisasCompare *cmp, unsigned int cc, unsigned int cond,
1055 /* For now we still generate a straight boolean result. */
1056 cmp->cond = TCG_COND_NE;
1057 cmp->is_bool = true;
1058 cmp->g1 = cmp->g2 = false;
1059 cmp->c1 = r_dst = tcg_temp_new();
1060 cmp->c2 = tcg_const_tl(0);
1062 #ifdef TARGET_SPARC64
1070 switch (dc->cc_op) {
1074 gen_helper_compute_psr(cpu_env);
1075 dc->cc_op = CC_OP_FLAGS;
1080 gen_op_eval_bn(r_dst);
1083 gen_op_eval_be(r_dst, r_src);
1086 gen_op_eval_ble(r_dst, r_src);
1089 gen_op_eval_bl(r_dst, r_src);
1092 gen_op_eval_bleu(r_dst, r_src);
1095 gen_op_eval_bcs(r_dst, r_src);
1098 gen_op_eval_bneg(r_dst, r_src);
1101 gen_op_eval_bvs(r_dst, r_src);
1104 gen_op_eval_ba(r_dst);
1107 gen_op_eval_bne(r_dst, r_src);
1110 gen_op_eval_bg(r_dst, r_src);
1113 gen_op_eval_bge(r_dst, r_src);
1116 gen_op_eval_bgu(r_dst, r_src);
1119 gen_op_eval_bcc(r_dst, r_src);
1122 gen_op_eval_bpos(r_dst, r_src);
1125 gen_op_eval_bvc(r_dst, r_src);
1130 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1132 unsigned int offset;
1135 /* For now we still generate a straight boolean result. */
1136 cmp->cond = TCG_COND_NE;
1137 cmp->is_bool = true;
1138 cmp->g1 = cmp->g2 = false;
1139 cmp->c1 = r_dst = tcg_temp_new();
1140 cmp->c2 = tcg_const_tl(0);
1160 gen_op_eval_bn(r_dst);
1163 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1166 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1169 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1172 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1175 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1178 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1181 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1184 gen_op_eval_ba(r_dst);
1187 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1190 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1193 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1196 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1199 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1202 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1205 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1210 static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1214 gen_compare(&cmp, cc, cond, dc);
1216 /* The interface is to return a boolean in r_dst. */
1218 tcg_gen_mov_tl(r_dst, cmp.c1);
1220 tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1226 static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1229 gen_fcompare(&cmp, cc, cond);
1231 /* The interface is to return a boolean in r_dst. */
1233 tcg_gen_mov_tl(r_dst, cmp.c1);
1235 tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1241 #ifdef TARGET_SPARC64
1243 static const int gen_tcg_cond_reg[8] = {
1254 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1256 cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1257 cmp->is_bool = false;
1261 cmp->c2 = tcg_const_tl(0);
1264 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1267 gen_compare_reg(&cmp, cond, r_src);
1269 /* The interface is to return a boolean in r_dst. */
1270 tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1276 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1278 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1279 target_ulong target = dc->pc + offset;
1281 #ifdef TARGET_SPARC64
1282 if (unlikely(AM_CHECK(dc))) {
1283 target &= 0xffffffffULL;
1287 /* unconditional not taken */
1289 dc->pc = dc->npc + 4;
1290 dc->npc = dc->pc + 4;
1293 dc->npc = dc->pc + 4;
1295 } else if (cond == 0x8) {
1296 /* unconditional taken */
1299 dc->npc = dc->pc + 4;
1303 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1307 gen_cond(cpu_cond, cc, cond, dc);
1309 gen_branch_a(dc, target, dc->npc, cpu_cond);
1313 dc->jump_pc[0] = target;
1314 if (unlikely(dc->npc == DYNAMIC_PC)) {
1315 dc->jump_pc[1] = DYNAMIC_PC;
1316 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1318 dc->jump_pc[1] = dc->npc + 4;
1325 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1327 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1328 target_ulong target = dc->pc + offset;
1330 #ifdef TARGET_SPARC64
1331 if (unlikely(AM_CHECK(dc))) {
1332 target &= 0xffffffffULL;
1336 /* unconditional not taken */
1338 dc->pc = dc->npc + 4;
1339 dc->npc = dc->pc + 4;
1342 dc->npc = dc->pc + 4;
1344 } else if (cond == 0x8) {
1345 /* unconditional taken */
1348 dc->npc = dc->pc + 4;
1352 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1356 gen_fcond(cpu_cond, cc, cond);
1358 gen_branch_a(dc, target, dc->npc, cpu_cond);
1362 dc->jump_pc[0] = target;
1363 if (unlikely(dc->npc == DYNAMIC_PC)) {
1364 dc->jump_pc[1] = DYNAMIC_PC;
1365 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1367 dc->jump_pc[1] = dc->npc + 4;
1374 #ifdef TARGET_SPARC64
1375 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1378 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1379 target_ulong target = dc->pc + offset;
1381 if (unlikely(AM_CHECK(dc))) {
1382 target &= 0xffffffffULL;
1385 gen_cond_reg(cpu_cond, cond, r_reg);
1387 gen_branch_a(dc, target, dc->npc, cpu_cond);
1391 dc->jump_pc[0] = target;
1392 if (unlikely(dc->npc == DYNAMIC_PC)) {
1393 dc->jump_pc[1] = DYNAMIC_PC;
1394 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1396 dc->jump_pc[1] = dc->npc + 4;
1402 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1406 gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1409 gen_helper_fcmps_fcc1(cpu_env, r_rs1, r_rs2);
1412 gen_helper_fcmps_fcc2(cpu_env, r_rs1, r_rs2);
1415 gen_helper_fcmps_fcc3(cpu_env, r_rs1, r_rs2);
1420 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1424 gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1427 gen_helper_fcmpd_fcc1(cpu_env, r_rs1, r_rs2);
1430 gen_helper_fcmpd_fcc2(cpu_env, r_rs1, r_rs2);
1433 gen_helper_fcmpd_fcc3(cpu_env, r_rs1, r_rs2);
1438 static inline void gen_op_fcmpq(int fccno)
1442 gen_helper_fcmpq(cpu_env);
1445 gen_helper_fcmpq_fcc1(cpu_env);
1448 gen_helper_fcmpq_fcc2(cpu_env);
1451 gen_helper_fcmpq_fcc3(cpu_env);
1456 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1460 gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1463 gen_helper_fcmpes_fcc1(cpu_env, r_rs1, r_rs2);
1466 gen_helper_fcmpes_fcc2(cpu_env, r_rs1, r_rs2);
1469 gen_helper_fcmpes_fcc3(cpu_env, r_rs1, r_rs2);
1474 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1478 gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1481 gen_helper_fcmped_fcc1(cpu_env, r_rs1, r_rs2);
1484 gen_helper_fcmped_fcc2(cpu_env, r_rs1, r_rs2);
1487 gen_helper_fcmped_fcc3(cpu_env, r_rs1, r_rs2);
1492 static inline void gen_op_fcmpeq(int fccno)
1496 gen_helper_fcmpeq(cpu_env);
1499 gen_helper_fcmpeq_fcc1(cpu_env);
1502 gen_helper_fcmpeq_fcc2(cpu_env);
1505 gen_helper_fcmpeq_fcc3(cpu_env);
1512 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1514 gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1517 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1519 gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1522 static inline void gen_op_fcmpq(int fccno)
1524 gen_helper_fcmpq(cpu_env);
1527 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1529 gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1532 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1534 gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1537 static inline void gen_op_fcmpeq(int fccno)
1539 gen_helper_fcmpeq(cpu_env);
1543 static inline void gen_op_fpexception_im(int fsr_flags)
1547 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1548 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1549 r_const = tcg_const_i32(TT_FP_EXCP);
1550 gen_helper_raise_exception(cpu_env, r_const);
1551 tcg_temp_free_i32(r_const);
1554 static int gen_trap_ifnofpu(DisasContext *dc)
1556 #if !defined(CONFIG_USER_ONLY)
1557 if (!dc->fpu_enabled) {
1561 r_const = tcg_const_i32(TT_NFPU_INSN);
1562 gen_helper_raise_exception(cpu_env, r_const);
1563 tcg_temp_free_i32(r_const);
1571 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1573 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1576 static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1577 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1581 src = gen_load_fpr_F(dc, rs);
1582 dst = gen_dest_fpr_F();
1584 gen(dst, cpu_env, src);
1586 gen_store_fpr_F(dc, rd, dst);
1589 static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1590 void (*gen)(TCGv_i32, TCGv_i32))
1594 src = gen_load_fpr_F(dc, rs);
1595 dst = gen_dest_fpr_F();
1599 gen_store_fpr_F(dc, rd, dst);
1602 static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1603 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1605 TCGv_i32 dst, src1, src2;
1607 src1 = gen_load_fpr_F(dc, rs1);
1608 src2 = gen_load_fpr_F(dc, rs2);
1609 dst = gen_dest_fpr_F();
1611 gen(dst, cpu_env, src1, src2);
1613 gen_store_fpr_F(dc, rd, dst);
1616 #ifdef TARGET_SPARC64
1617 static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1618 void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1620 TCGv_i32 dst, src1, src2;
1622 src1 = gen_load_fpr_F(dc, rs1);
1623 src2 = gen_load_fpr_F(dc, rs2);
1624 dst = gen_dest_fpr_F();
1626 gen(dst, src1, src2);
1628 gen_store_fpr_F(dc, rd, dst);
1632 static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1633 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1637 src = gen_load_fpr_D(dc, rs);
1638 dst = gen_dest_fpr_D();
1640 gen(dst, cpu_env, src);
1642 gen_store_fpr_D(dc, rd, dst);
1645 #ifdef TARGET_SPARC64
1646 static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1647 void (*gen)(TCGv_i64, TCGv_i64))
1651 src = gen_load_fpr_D(dc, rs);
1652 dst = gen_dest_fpr_D();
1656 gen_store_fpr_D(dc, rd, dst);
1660 static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1661 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1663 TCGv_i64 dst, src1, src2;
1665 src1 = gen_load_fpr_D(dc, rs1);
1666 src2 = gen_load_fpr_D(dc, rs2);
1667 dst = gen_dest_fpr_D();
1669 gen(dst, cpu_env, src1, src2);
1671 gen_store_fpr_D(dc, rd, dst);
1674 #ifdef TARGET_SPARC64
1675 static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1676 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1678 TCGv_i64 dst, src1, src2;
1680 src1 = gen_load_fpr_D(dc, rs1);
1681 src2 = gen_load_fpr_D(dc, rs2);
1682 dst = gen_dest_fpr_D();
1684 gen(dst, src1, src2);
1686 gen_store_fpr_D(dc, rd, dst);
1689 static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1690 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1692 TCGv_i64 dst, src1, src2;
1694 src1 = gen_load_fpr_D(dc, rs1);
1695 src2 = gen_load_fpr_D(dc, rs2);
1696 dst = gen_dest_fpr_D();
1698 gen(dst, cpu_gsr, src1, src2);
1700 gen_store_fpr_D(dc, rd, dst);
1703 static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1704 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1706 TCGv_i64 dst, src0, src1, src2;
1708 src1 = gen_load_fpr_D(dc, rs1);
1709 src2 = gen_load_fpr_D(dc, rs2);
1710 src0 = gen_load_fpr_D(dc, rd);
1711 dst = gen_dest_fpr_D();
1713 gen(dst, src0, src1, src2);
1715 gen_store_fpr_D(dc, rd, dst);
1719 static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1720 void (*gen)(TCGv_ptr))
1722 gen_op_load_fpr_QT1(QFPREG(rs));
1726 gen_op_store_QT0_fpr(QFPREG(rd));
1727 gen_update_fprs_dirty(QFPREG(rd));
1730 #ifdef TARGET_SPARC64
1731 static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1732 void (*gen)(TCGv_ptr))
1734 gen_op_load_fpr_QT1(QFPREG(rs));
1738 gen_op_store_QT0_fpr(QFPREG(rd));
1739 gen_update_fprs_dirty(QFPREG(rd));
1743 static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1744 void (*gen)(TCGv_ptr))
1746 gen_op_load_fpr_QT0(QFPREG(rs1));
1747 gen_op_load_fpr_QT1(QFPREG(rs2));
1751 gen_op_store_QT0_fpr(QFPREG(rd));
1752 gen_update_fprs_dirty(QFPREG(rd));
1755 static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1756 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1759 TCGv_i32 src1, src2;
1761 src1 = gen_load_fpr_F(dc, rs1);
1762 src2 = gen_load_fpr_F(dc, rs2);
1763 dst = gen_dest_fpr_D();
1765 gen(dst, cpu_env, src1, src2);
1767 gen_store_fpr_D(dc, rd, dst);
1770 static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1771 void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1773 TCGv_i64 src1, src2;
1775 src1 = gen_load_fpr_D(dc, rs1);
1776 src2 = gen_load_fpr_D(dc, rs2);
1778 gen(cpu_env, src1, src2);
1780 gen_op_store_QT0_fpr(QFPREG(rd));
1781 gen_update_fprs_dirty(QFPREG(rd));
1784 #ifdef TARGET_SPARC64
1785 static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1786 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1791 src = gen_load_fpr_F(dc, rs);
1792 dst = gen_dest_fpr_D();
1794 gen(dst, cpu_env, src);
1796 gen_store_fpr_D(dc, rd, dst);
1800 static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1801 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1806 src = gen_load_fpr_F(dc, rs);
1807 dst = gen_dest_fpr_D();
1809 gen(dst, cpu_env, src);
1811 gen_store_fpr_D(dc, rd, dst);
1814 static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1815 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1820 src = gen_load_fpr_D(dc, rs);
1821 dst = gen_dest_fpr_F();
1823 gen(dst, cpu_env, src);
1825 gen_store_fpr_F(dc, rd, dst);
1828 static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1829 void (*gen)(TCGv_i32, TCGv_ptr))
1833 gen_op_load_fpr_QT1(QFPREG(rs));
1834 dst = gen_dest_fpr_F();
1838 gen_store_fpr_F(dc, rd, dst);
1841 static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1842 void (*gen)(TCGv_i64, TCGv_ptr))
1846 gen_op_load_fpr_QT1(QFPREG(rs));
1847 dst = gen_dest_fpr_D();
1851 gen_store_fpr_D(dc, rd, dst);
1854 static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1855 void (*gen)(TCGv_ptr, TCGv_i32))
1859 src = gen_load_fpr_F(dc, rs);
1863 gen_op_store_QT0_fpr(QFPREG(rd));
1864 gen_update_fprs_dirty(QFPREG(rd));
1867 static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1868 void (*gen)(TCGv_ptr, TCGv_i64))
1872 src = gen_load_fpr_D(dc, rs);
1876 gen_op_store_QT0_fpr(QFPREG(rd));
1877 gen_update_fprs_dirty(QFPREG(rd));
1881 #ifdef TARGET_SPARC64
1882 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1888 r_asi = tcg_temp_new_i32();
1889 tcg_gen_mov_i32(r_asi, cpu_asi);
1891 asi = GET_FIELD(insn, 19, 26);
1892 r_asi = tcg_const_i32(asi);
1897 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1900 TCGv_i32 r_asi, r_size, r_sign;
1902 r_asi = gen_get_asi(insn, addr);
1903 r_size = tcg_const_i32(size);
1904 r_sign = tcg_const_i32(sign);
1905 gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_size, r_sign);
1906 tcg_temp_free_i32(r_sign);
1907 tcg_temp_free_i32(r_size);
1908 tcg_temp_free_i32(r_asi);
1911 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1913 TCGv_i32 r_asi, r_size;
1915 r_asi = gen_get_asi(insn, addr);
1916 r_size = tcg_const_i32(size);
1917 gen_helper_st_asi(cpu_env, addr, src, r_asi, r_size);
1918 tcg_temp_free_i32(r_size);
1919 tcg_temp_free_i32(r_asi);
1922 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1924 TCGv_i32 r_asi, r_size, r_rd;
1926 r_asi = gen_get_asi(insn, addr);
1927 r_size = tcg_const_i32(size);
1928 r_rd = tcg_const_i32(rd);
1929 gen_helper_ldf_asi(cpu_env, addr, r_asi, r_size, r_rd);
1930 tcg_temp_free_i32(r_rd);
1931 tcg_temp_free_i32(r_size);
1932 tcg_temp_free_i32(r_asi);
1935 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1937 TCGv_i32 r_asi, r_size, r_rd;
1939 r_asi = gen_get_asi(insn, addr);
1940 r_size = tcg_const_i32(size);
1941 r_rd = tcg_const_i32(rd);
1942 gen_helper_stf_asi(cpu_env, addr, r_asi, r_size, r_rd);
1943 tcg_temp_free_i32(r_rd);
1944 tcg_temp_free_i32(r_size);
1945 tcg_temp_free_i32(r_asi);
1948 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1950 TCGv_i32 r_asi, r_size, r_sign;
1952 r_asi = gen_get_asi(insn, addr);
1953 r_size = tcg_const_i32(4);
1954 r_sign = tcg_const_i32(0);
1955 gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
1956 tcg_temp_free_i32(r_sign);
1957 gen_helper_st_asi(cpu_env, addr, dst, r_asi, r_size);
1958 tcg_temp_free_i32(r_size);
1959 tcg_temp_free_i32(r_asi);
1960 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1963 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1965 TCGv_i32 r_asi, r_rd;
1967 r_asi = gen_get_asi(insn, addr);
1968 r_rd = tcg_const_i32(rd);
1969 gen_helper_ldda_asi(cpu_env, addr, r_asi, r_rd);
1970 tcg_temp_free_i32(r_rd);
1971 tcg_temp_free_i32(r_asi);
1974 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1976 TCGv_i32 r_asi, r_size;
1978 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1979 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1980 r_asi = gen_get_asi(insn, addr);
1981 r_size = tcg_const_i32(8);
1982 gen_helper_st_asi(cpu_env, addr, cpu_tmp64, r_asi, r_size);
1983 tcg_temp_free_i32(r_size);
1984 tcg_temp_free_i32(r_asi);
1987 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1993 r_val1 = tcg_temp_new();
1994 gen_movl_reg_TN(rd, r_val1);
1995 r_asi = gen_get_asi(insn, addr);
1996 gen_helper_cas_asi(dst, cpu_env, addr, r_val1, val2, r_asi);
1997 tcg_temp_free_i32(r_asi);
1998 tcg_temp_free(r_val1);
2001 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
2006 gen_movl_reg_TN(rd, cpu_tmp64);
2007 r_asi = gen_get_asi(insn, addr);
2008 gen_helper_casx_asi(dst, cpu_env, addr, cpu_tmp64, val2, r_asi);
2009 tcg_temp_free_i32(r_asi);
2012 #elif !defined(CONFIG_USER_ONLY)
2014 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
2017 TCGv_i32 r_asi, r_size, r_sign;
2019 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2020 r_size = tcg_const_i32(size);
2021 r_sign = tcg_const_i32(sign);
2022 gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2023 tcg_temp_free(r_sign);
2024 tcg_temp_free(r_size);
2025 tcg_temp_free(r_asi);
2026 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2029 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2031 TCGv_i32 r_asi, r_size;
2033 tcg_gen_extu_tl_i64(cpu_tmp64, src);
2034 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2035 r_size = tcg_const_i32(size);
2036 gen_helper_st_asi(cpu_env, addr, cpu_tmp64, r_asi, r_size);
2037 tcg_temp_free(r_size);
2038 tcg_temp_free(r_asi);
2041 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
2043 TCGv_i32 r_asi, r_size, r_sign;
2046 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2047 r_size = tcg_const_i32(4);
2048 r_sign = tcg_const_i32(0);
2049 gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2050 tcg_temp_free(r_sign);
2051 r_val = tcg_temp_new_i64();
2052 tcg_gen_extu_tl_i64(r_val, dst);
2053 gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2054 tcg_temp_free_i64(r_val);
2055 tcg_temp_free(r_size);
2056 tcg_temp_free(r_asi);
2057 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2060 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
2062 TCGv_i32 r_asi, r_size, r_sign;
2064 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2065 r_size = tcg_const_i32(8);
2066 r_sign = tcg_const_i32(0);
2067 gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2068 tcg_temp_free(r_sign);
2069 tcg_temp_free(r_size);
2070 tcg_temp_free(r_asi);
2071 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
2072 gen_movl_TN_reg(rd + 1, cpu_tmp0);
2073 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
2074 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
2075 gen_movl_TN_reg(rd, hi);
2078 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
2080 TCGv_i32 r_asi, r_size;
2082 gen_movl_reg_TN(rd + 1, cpu_tmp0);
2083 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
2084 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2085 r_size = tcg_const_i32(8);
2086 gen_helper_st_asi(cpu_env, addr, cpu_tmp64, r_asi, r_size);
2087 tcg_temp_free(r_size);
2088 tcg_temp_free(r_asi);
2092 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2093 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
2096 TCGv_i32 r_asi, r_size;
2098 gen_ld_asi(dst, addr, insn, 1, 0);
2100 r_val = tcg_const_i64(0xffULL);
2101 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2102 r_size = tcg_const_i32(1);
2103 gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2104 tcg_temp_free_i32(r_size);
2105 tcg_temp_free_i32(r_asi);
2106 tcg_temp_free_i64(r_val);
2110 static inline TCGv get_src1(unsigned int insn, TCGv def)
2115 rs1 = GET_FIELD(insn, 13, 17);
2117 tcg_gen_movi_tl(def, 0);
2118 } else if (rs1 < 8) {
2119 r_rs1 = cpu_gregs[rs1];
2121 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
2126 static inline TCGv get_src2(unsigned int insn, TCGv def)
2130 if (IS_IMM) { /* immediate */
2131 target_long simm = GET_FIELDs(insn, 19, 31);
2132 tcg_gen_movi_tl(def, simm);
2133 } else { /* register */
2134 unsigned int rs2 = GET_FIELD(insn, 27, 31);
2136 tcg_gen_movi_tl(def, 0);
2137 } else if (rs2 < 8) {
2138 r_rs2 = cpu_gregs[rs2];
2140 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
2146 #ifdef TARGET_SPARC64
2147 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2149 TCGv_i32 c32, zero, dst, s1, s2;
2151 /* We have two choices here: extend the 32 bit data and use movcond_i64,
2152 or fold the comparison down to 32 bits and use movcond_i32. Choose
2154 c32 = tcg_temp_new_i32();
2156 tcg_gen_trunc_i64_i32(c32, cmp->c1);
2158 TCGv_i64 c64 = tcg_temp_new_i64();
2159 tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2160 tcg_gen_trunc_i64_i32(c32, c64);
2161 tcg_temp_free_i64(c64);
2164 s1 = gen_load_fpr_F(dc, rs);
2165 s2 = gen_load_fpr_F(dc, rd);
2166 dst = gen_dest_fpr_F();
2167 zero = tcg_const_i32(0);
2169 tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2171 tcg_temp_free_i32(c32);
2172 tcg_temp_free_i32(zero);
2173 gen_store_fpr_F(dc, rd, dst);
2176 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2178 TCGv_i64 dst = gen_dest_fpr_D();
2179 tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2180 gen_load_fpr_D(dc, rs),
2181 gen_load_fpr_D(dc, rd));
2182 gen_store_fpr_D(dc, rd, dst);
2185 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2187 int qd = QFPREG(rd);
2188 int qs = QFPREG(rs);
2190 tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2191 cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2192 tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2193 cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2195 gen_update_fprs_dirty(qd);
2198 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
2200 TCGv_i32 r_tl = tcg_temp_new_i32();
2202 /* load env->tl into r_tl */
2203 tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2205 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2206 tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2208 /* calculate offset to current trap state from env->ts, reuse r_tl */
2209 tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2210 tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2212 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2214 TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2215 tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2216 tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2217 tcg_temp_free_ptr(r_tl_tmp);
2220 tcg_temp_free_i32(r_tl);
2223 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2224 int width, bool cc, bool left)
2226 TCGv lo1, lo2, t1, t2;
2227 uint64_t amask, tabl, tabr;
2228 int shift, imask, omask;
2231 tcg_gen_mov_tl(cpu_cc_src, s1);
2232 tcg_gen_mov_tl(cpu_cc_src2, s2);
2233 tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2234 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2235 dc->cc_op = CC_OP_SUB;
2238 /* Theory of operation: there are two tables, left and right (not to
2239 be confused with the left and right versions of the opcode). These
2240 are indexed by the low 3 bits of the inputs. To make things "easy",
2241 these tables are loaded into two constants, TABL and TABR below.
2242 The operation index = (input & imask) << shift calculates the index
2243 into the constant, while val = (table >> index) & omask calculates
2244 the value we're looking for. */
2251 tabl = 0x80c0e0f0f8fcfeffULL;
2252 tabr = 0xff7f3f1f0f070301ULL;
2254 tabl = 0x0103070f1f3f7fffULL;
2255 tabr = 0xfffefcf8f0e0c080ULL;
2275 tabl = (2 << 2) | 3;
2276 tabr = (3 << 2) | 1;
2278 tabl = (1 << 2) | 3;
2279 tabr = (3 << 2) | 2;
2286 lo1 = tcg_temp_new();
2287 lo2 = tcg_temp_new();
2288 tcg_gen_andi_tl(lo1, s1, imask);
2289 tcg_gen_andi_tl(lo2, s2, imask);
2290 tcg_gen_shli_tl(lo1, lo1, shift);
2291 tcg_gen_shli_tl(lo2, lo2, shift);
2293 t1 = tcg_const_tl(tabl);
2294 t2 = tcg_const_tl(tabr);
2295 tcg_gen_shr_tl(lo1, t1, lo1);
2296 tcg_gen_shr_tl(lo2, t2, lo2);
2297 tcg_gen_andi_tl(dst, lo1, omask);
2298 tcg_gen_andi_tl(lo2, lo2, omask);
2302 amask &= 0xffffffffULL;
2304 tcg_gen_andi_tl(s1, s1, amask);
2305 tcg_gen_andi_tl(s2, s2, amask);
2307 /* We want to compute
2308 dst = (s1 == s2 ? lo1 : lo1 & lo2).
2309 We've already done dst = lo1, so this reduces to
2310 dst &= (s1 == s2 ? -1 : lo2)
2315 tcg_gen_setcond_tl(TCG_COND_EQ, t1, s1, s2);
2316 tcg_gen_neg_tl(t1, t1);
2317 tcg_gen_or_tl(lo2, lo2, t1);
2318 tcg_gen_and_tl(dst, dst, lo2);
2326 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2328 TCGv tmp = tcg_temp_new();
2330 tcg_gen_add_tl(tmp, s1, s2);
2331 tcg_gen_andi_tl(dst, tmp, -8);
2333 tcg_gen_neg_tl(tmp, tmp);
2335 tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2340 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2344 t1 = tcg_temp_new();
2345 t2 = tcg_temp_new();
2346 shift = tcg_temp_new();
2348 tcg_gen_andi_tl(shift, gsr, 7);
2349 tcg_gen_shli_tl(shift, shift, 3);
2350 tcg_gen_shl_tl(t1, s1, shift);
2352 /* A shift of 64 does not produce 0 in TCG. Divide this into a
2353 shift of (up to 63) followed by a constant shift of 1. */
2354 tcg_gen_xori_tl(shift, shift, 63);
2355 tcg_gen_shr_tl(t2, s2, shift);
2356 tcg_gen_shri_tl(t2, t2, 1);
2358 tcg_gen_or_tl(dst, t1, t2);
2362 tcg_temp_free(shift);
2366 #define CHECK_IU_FEATURE(dc, FEATURE) \
2367 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2369 #define CHECK_FPU_FEATURE(dc, FEATURE) \
2370 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2373 /* before an instruction, dc->pc must be static */
2374 static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
2376 unsigned int opc, rs1, rs2, rd;
2377 TCGv cpu_src1, cpu_src2, cpu_tmp1, cpu_tmp2;
2378 TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
2379 TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
2382 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP | CPU_LOG_TB_OP_OPT))) {
2383 tcg_gen_debug_insn_start(dc->pc);
2386 opc = GET_FIELD(insn, 0, 1);
2388 rd = GET_FIELD(insn, 2, 6);
2390 cpu_tmp1 = cpu_src1 = tcg_temp_new();
2391 cpu_tmp2 = cpu_src2 = tcg_temp_new();
2394 case 0: /* branches/sethi */
2396 unsigned int xop = GET_FIELD(insn, 7, 9);
2399 #ifdef TARGET_SPARC64
2400 case 0x1: /* V9 BPcc */
2404 target = GET_FIELD_SP(insn, 0, 18);
2405 target = sign_extend(target, 19);
2407 cc = GET_FIELD_SP(insn, 20, 21);
2409 do_branch(dc, target, insn, 0);
2411 do_branch(dc, target, insn, 1);
2416 case 0x3: /* V9 BPr */
2418 target = GET_FIELD_SP(insn, 0, 13) |
2419 (GET_FIELD_SP(insn, 20, 21) << 14);
2420 target = sign_extend(target, 16);
2422 cpu_src1 = get_src1(insn, cpu_src1);
2423 do_branch_reg(dc, target, insn, cpu_src1);
2426 case 0x5: /* V9 FBPcc */
2428 int cc = GET_FIELD_SP(insn, 20, 21);
2429 if (gen_trap_ifnofpu(dc)) {
2432 target = GET_FIELD_SP(insn, 0, 18);
2433 target = sign_extend(target, 19);
2435 do_fbranch(dc, target, insn, cc);
2439 case 0x7: /* CBN+x */
2444 case 0x2: /* BN+x */
2446 target = GET_FIELD(insn, 10, 31);
2447 target = sign_extend(target, 22);
2449 do_branch(dc, target, insn, 0);
2452 case 0x6: /* FBN+x */
2454 if (gen_trap_ifnofpu(dc)) {
2457 target = GET_FIELD(insn, 10, 31);
2458 target = sign_extend(target, 22);
2460 do_fbranch(dc, target, insn, 0);
2463 case 0x4: /* SETHI */
2465 uint32_t value = GET_FIELD(insn, 10, 31);
2468 r_const = tcg_const_tl(value << 10);
2469 gen_movl_TN_reg(rd, r_const);
2470 tcg_temp_free(r_const);
2473 case 0x0: /* UNIMPL */
2482 target_long target = GET_FIELDs(insn, 2, 31) << 2;
2485 r_const = tcg_const_tl(dc->pc);
2486 gen_movl_TN_reg(15, r_const);
2487 tcg_temp_free(r_const);
2490 #ifdef TARGET_SPARC64
2491 if (unlikely(AM_CHECK(dc))) {
2492 target &= 0xffffffffULL;
2498 case 2: /* FPU & Logical Operations */
2500 unsigned int xop = GET_FIELD(insn, 7, 12);
2501 if (xop == 0x3a) { /* generate trap */
2502 int cond = GET_FIELD(insn, 3, 6);
2514 /* Conditional trap. */
2516 #ifdef TARGET_SPARC64
2518 int cc = GET_FIELD_SP(insn, 11, 12);
2520 gen_compare(&cmp, 0, cond, dc);
2521 } else if (cc == 2) {
2522 gen_compare(&cmp, 1, cond, dc);
2527 gen_compare(&cmp, 0, cond, dc);
2529 l1 = gen_new_label();
2530 tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
2531 cmp.c1, cmp.c2, l1);
2535 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
2536 ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
2538 /* Don't use the normal temporaries, as they may well have
2539 gone out of scope with the branch above. While we're
2540 doing that we might as well pre-truncate to 32-bit. */
2541 trap = tcg_temp_new_i32();
2543 rs1 = GET_FIELD_SP(insn, 14, 18);
2545 rs2 = GET_FIELD_SP(insn, 0, 6);
2547 tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
2548 /* Signal that the trap value is fully constant. */
2551 TCGv t1 = tcg_temp_new();
2552 gen_movl_reg_TN(rs1, t1);
2553 tcg_gen_trunc_tl_i32(trap, t1);
2555 tcg_gen_addi_i32(trap, trap, rs2);
2558 TCGv t1 = tcg_temp_new();
2559 TCGv t2 = tcg_temp_new();
2560 rs2 = GET_FIELD_SP(insn, 0, 4);
2561 gen_movl_reg_TN(rs1, t1);
2562 gen_movl_reg_TN(rs2, t2);
2563 tcg_gen_add_tl(t1, t1, t2);
2564 tcg_gen_trunc_tl_i32(trap, t1);
2569 tcg_gen_andi_i32(trap, trap, mask);
2570 tcg_gen_addi_i32(trap, trap, TT_TRAP);
2573 gen_helper_raise_exception(cpu_env, trap);
2574 tcg_temp_free_i32(trap);
2583 } else if (xop == 0x28) {
2584 rs1 = GET_FIELD(insn, 13, 17);
2587 #ifndef TARGET_SPARC64
2588 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2589 manual, rdy on the microSPARC
2591 case 0x0f: /* stbar in the SPARCv8 manual,
2592 rdy on the microSPARC II */
2593 case 0x10 ... 0x1f: /* implementation-dependent in the
2594 SPARCv8 manual, rdy on the
2597 if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2600 /* Read Asr17 for a Leon3 monoprocessor */
2601 r_const = tcg_const_tl((1 << 8)
2602 | (dc->def->nwindows - 1));
2603 gen_movl_TN_reg(rd, r_const);
2604 tcg_temp_free(r_const);
2608 gen_movl_TN_reg(rd, cpu_y);
2610 #ifdef TARGET_SPARC64
2611 case 0x2: /* V9 rdccr */
2612 gen_helper_compute_psr(cpu_env);
2613 gen_helper_rdccr(cpu_dst, cpu_env);
2614 gen_movl_TN_reg(rd, cpu_dst);
2616 case 0x3: /* V9 rdasi */
2617 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2618 gen_movl_TN_reg(rd, cpu_dst);
2620 case 0x4: /* V9 rdtick */
2624 r_tickptr = tcg_temp_new_ptr();
2625 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2626 offsetof(CPUSPARCState, tick));
2627 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2628 tcg_temp_free_ptr(r_tickptr);
2629 gen_movl_TN_reg(rd, cpu_dst);
2632 case 0x5: /* V9 rdpc */
2636 if (unlikely(AM_CHECK(dc))) {
2637 r_const = tcg_const_tl(dc->pc & 0xffffffffULL);
2639 r_const = tcg_const_tl(dc->pc);
2641 gen_movl_TN_reg(rd, r_const);
2642 tcg_temp_free(r_const);
2645 case 0x6: /* V9 rdfprs */
2646 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2647 gen_movl_TN_reg(rd, cpu_dst);
2649 case 0xf: /* V9 membar */
2650 break; /* no effect */
2651 case 0x13: /* Graphics Status */
2652 if (gen_trap_ifnofpu(dc)) {
2655 gen_movl_TN_reg(rd, cpu_gsr);
2657 case 0x16: /* Softint */
2658 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2659 gen_movl_TN_reg(rd, cpu_dst);
2661 case 0x17: /* Tick compare */
2662 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2664 case 0x18: /* System tick */
2668 r_tickptr = tcg_temp_new_ptr();
2669 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2670 offsetof(CPUSPARCState, stick));
2671 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2672 tcg_temp_free_ptr(r_tickptr);
2673 gen_movl_TN_reg(rd, cpu_dst);
2676 case 0x19: /* System tick compare */
2677 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2679 case 0x10: /* Performance Control */
2680 case 0x11: /* Performance Instrumentation Counter */
2681 case 0x12: /* Dispatch Control */
2682 case 0x14: /* Softint set, WO */
2683 case 0x15: /* Softint clear, WO */
2688 #if !defined(CONFIG_USER_ONLY)
2689 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2690 #ifndef TARGET_SPARC64
2691 if (!supervisor(dc))
2693 gen_helper_compute_psr(cpu_env);
2694 dc->cc_op = CC_OP_FLAGS;
2695 gen_helper_rdpsr(cpu_dst, cpu_env);
2697 CHECK_IU_FEATURE(dc, HYPV);
2698 if (!hypervisor(dc))
2700 rs1 = GET_FIELD(insn, 13, 17);
2703 // gen_op_rdhpstate();
2706 // gen_op_rdhtstate();
2709 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2712 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2715 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2717 case 31: // hstick_cmpr
2718 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2724 gen_movl_TN_reg(rd, cpu_dst);
2726 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2727 if (!supervisor(dc))
2729 #ifdef TARGET_SPARC64
2730 rs1 = GET_FIELD(insn, 13, 17);
2736 r_tsptr = tcg_temp_new_ptr();
2737 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2738 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2739 offsetof(trap_state, tpc));
2740 tcg_temp_free_ptr(r_tsptr);
2747 r_tsptr = tcg_temp_new_ptr();
2748 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2749 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2750 offsetof(trap_state, tnpc));
2751 tcg_temp_free_ptr(r_tsptr);
2758 r_tsptr = tcg_temp_new_ptr();
2759 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2760 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2761 offsetof(trap_state, tstate));
2762 tcg_temp_free_ptr(r_tsptr);
2769 r_tsptr = tcg_temp_new_ptr();
2770 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2771 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2772 offsetof(trap_state, tt));
2773 tcg_temp_free_ptr(r_tsptr);
2774 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2781 r_tickptr = tcg_temp_new_ptr();
2782 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2783 offsetof(CPUSPARCState, tick));
2784 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2785 gen_movl_TN_reg(rd, cpu_tmp0);
2786 tcg_temp_free_ptr(r_tickptr);
2790 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2793 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2794 offsetof(CPUSPARCState, pstate));
2795 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2798 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2799 offsetof(CPUSPARCState, tl));
2800 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2803 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2804 offsetof(CPUSPARCState, psrpil));
2805 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2808 gen_helper_rdcwp(cpu_tmp0, cpu_env);
2811 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2812 offsetof(CPUSPARCState, cansave));
2813 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2815 case 11: // canrestore
2816 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2817 offsetof(CPUSPARCState, canrestore));
2818 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2820 case 12: // cleanwin
2821 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2822 offsetof(CPUSPARCState, cleanwin));
2823 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2825 case 13: // otherwin
2826 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2827 offsetof(CPUSPARCState, otherwin));
2828 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2831 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2832 offsetof(CPUSPARCState, wstate));
2833 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2835 case 16: // UA2005 gl
2836 CHECK_IU_FEATURE(dc, GL);
2837 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2838 offsetof(CPUSPARCState, gl));
2839 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2841 case 26: // UA2005 strand status
2842 CHECK_IU_FEATURE(dc, HYPV);
2843 if (!hypervisor(dc))
2845 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2848 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2855 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2857 gen_movl_TN_reg(rd, cpu_tmp0);
2859 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2860 #ifdef TARGET_SPARC64
2862 gen_helper_flushw(cpu_env);
2864 if (!supervisor(dc))
2866 gen_movl_TN_reg(rd, cpu_tbr);
2870 } else if (xop == 0x34) { /* FPU Operations */
2871 if (gen_trap_ifnofpu(dc)) {
2874 gen_op_clear_ieee_excp_and_FTT();
2875 rs1 = GET_FIELD(insn, 13, 17);
2876 rs2 = GET_FIELD(insn, 27, 31);
2877 xop = GET_FIELD(insn, 18, 26);
2880 case 0x1: /* fmovs */
2881 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2882 gen_store_fpr_F(dc, rd, cpu_src1_32);
2884 case 0x5: /* fnegs */
2885 gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
2887 case 0x9: /* fabss */
2888 gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
2890 case 0x29: /* fsqrts */
2891 CHECK_FPU_FEATURE(dc, FSQRT);
2892 gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
2894 case 0x2a: /* fsqrtd */
2895 CHECK_FPU_FEATURE(dc, FSQRT);
2896 gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
2898 case 0x2b: /* fsqrtq */
2899 CHECK_FPU_FEATURE(dc, FLOAT128);
2900 gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
2902 case 0x41: /* fadds */
2903 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
2905 case 0x42: /* faddd */
2906 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
2908 case 0x43: /* faddq */
2909 CHECK_FPU_FEATURE(dc, FLOAT128);
2910 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
2912 case 0x45: /* fsubs */
2913 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
2915 case 0x46: /* fsubd */
2916 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
2918 case 0x47: /* fsubq */
2919 CHECK_FPU_FEATURE(dc, FLOAT128);
2920 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
2922 case 0x49: /* fmuls */
2923 CHECK_FPU_FEATURE(dc, FMUL);
2924 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
2926 case 0x4a: /* fmuld */
2927 CHECK_FPU_FEATURE(dc, FMUL);
2928 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
2930 case 0x4b: /* fmulq */
2931 CHECK_FPU_FEATURE(dc, FLOAT128);
2932 CHECK_FPU_FEATURE(dc, FMUL);
2933 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
2935 case 0x4d: /* fdivs */
2936 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
2938 case 0x4e: /* fdivd */
2939 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
2941 case 0x4f: /* fdivq */
2942 CHECK_FPU_FEATURE(dc, FLOAT128);
2943 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
2945 case 0x69: /* fsmuld */
2946 CHECK_FPU_FEATURE(dc, FSMULD);
2947 gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
2949 case 0x6e: /* fdmulq */
2950 CHECK_FPU_FEATURE(dc, FLOAT128);
2951 gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
2953 case 0xc4: /* fitos */
2954 gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
2956 case 0xc6: /* fdtos */
2957 gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
2959 case 0xc7: /* fqtos */
2960 CHECK_FPU_FEATURE(dc, FLOAT128);
2961 gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
2963 case 0xc8: /* fitod */
2964 gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
2966 case 0xc9: /* fstod */
2967 gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
2969 case 0xcb: /* fqtod */
2970 CHECK_FPU_FEATURE(dc, FLOAT128);
2971 gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
2973 case 0xcc: /* fitoq */
2974 CHECK_FPU_FEATURE(dc, FLOAT128);
2975 gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
2977 case 0xcd: /* fstoq */
2978 CHECK_FPU_FEATURE(dc, FLOAT128);
2979 gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
2981 case 0xce: /* fdtoq */
2982 CHECK_FPU_FEATURE(dc, FLOAT128);
2983 gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
2985 case 0xd1: /* fstoi */
2986 gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
2988 case 0xd2: /* fdtoi */
2989 gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
2991 case 0xd3: /* fqtoi */
2992 CHECK_FPU_FEATURE(dc, FLOAT128);
2993 gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
2995 #ifdef TARGET_SPARC64
2996 case 0x2: /* V9 fmovd */
2997 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
2998 gen_store_fpr_D(dc, rd, cpu_src1_64);
3000 case 0x3: /* V9 fmovq */
3001 CHECK_FPU_FEATURE(dc, FLOAT128);
3002 gen_move_Q(rd, rs2);
3004 case 0x6: /* V9 fnegd */
3005 gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3007 case 0x7: /* V9 fnegq */
3008 CHECK_FPU_FEATURE(dc, FLOAT128);
3009 gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3011 case 0xa: /* V9 fabsd */
3012 gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3014 case 0xb: /* V9 fabsq */
3015 CHECK_FPU_FEATURE(dc, FLOAT128);
3016 gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3018 case 0x81: /* V9 fstox */
3019 gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3021 case 0x82: /* V9 fdtox */
3022 gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3024 case 0x83: /* V9 fqtox */
3025 CHECK_FPU_FEATURE(dc, FLOAT128);
3026 gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3028 case 0x84: /* V9 fxtos */
3029 gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3031 case 0x88: /* V9 fxtod */
3032 gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3034 case 0x8c: /* V9 fxtoq */
3035 CHECK_FPU_FEATURE(dc, FLOAT128);
3036 gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3042 } else if (xop == 0x35) { /* FPU Operations */
3043 #ifdef TARGET_SPARC64
3046 if (gen_trap_ifnofpu(dc)) {
3049 gen_op_clear_ieee_excp_and_FTT();
3050 rs1 = GET_FIELD(insn, 13, 17);
3051 rs2 = GET_FIELD(insn, 27, 31);
3052 xop = GET_FIELD(insn, 18, 26);
3055 #ifdef TARGET_SPARC64
3059 cond = GET_FIELD_SP(insn, 14, 17); \
3060 cpu_src1 = get_src1(insn, cpu_src1); \
3061 gen_compare_reg(&cmp, cond, cpu_src1); \
3062 gen_fmov##sz(dc, &cmp, rd, rs2); \
3063 free_compare(&cmp); \
3066 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3069 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3072 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3073 CHECK_FPU_FEATURE(dc, FLOAT128);
3080 #ifdef TARGET_SPARC64
3081 #define FMOVCC(fcc, sz) \
3084 cond = GET_FIELD_SP(insn, 14, 17); \
3085 gen_fcompare(&cmp, fcc, cond); \
3086 gen_fmov##sz(dc, &cmp, rd, rs2); \
3087 free_compare(&cmp); \
3090 case 0x001: /* V9 fmovscc %fcc0 */
3093 case 0x002: /* V9 fmovdcc %fcc0 */
3096 case 0x003: /* V9 fmovqcc %fcc0 */
3097 CHECK_FPU_FEATURE(dc, FLOAT128);
3100 case 0x041: /* V9 fmovscc %fcc1 */
3103 case 0x042: /* V9 fmovdcc %fcc1 */
3106 case 0x043: /* V9 fmovqcc %fcc1 */
3107 CHECK_FPU_FEATURE(dc, FLOAT128);
3110 case 0x081: /* V9 fmovscc %fcc2 */
3113 case 0x082: /* V9 fmovdcc %fcc2 */
3116 case 0x083: /* V9 fmovqcc %fcc2 */
3117 CHECK_FPU_FEATURE(dc, FLOAT128);
3120 case 0x0c1: /* V9 fmovscc %fcc3 */
3123 case 0x0c2: /* V9 fmovdcc %fcc3 */
3126 case 0x0c3: /* V9 fmovqcc %fcc3 */
3127 CHECK_FPU_FEATURE(dc, FLOAT128);
3131 #define FMOVCC(xcc, sz) \
3134 cond = GET_FIELD_SP(insn, 14, 17); \
3135 gen_compare(&cmp, xcc, cond, dc); \
3136 gen_fmov##sz(dc, &cmp, rd, rs2); \
3137 free_compare(&cmp); \
3140 case 0x101: /* V9 fmovscc %icc */
3143 case 0x102: /* V9 fmovdcc %icc */
3146 case 0x103: /* V9 fmovqcc %icc */
3147 CHECK_FPU_FEATURE(dc, FLOAT128);
3150 case 0x181: /* V9 fmovscc %xcc */
3153 case 0x182: /* V9 fmovdcc %xcc */
3156 case 0x183: /* V9 fmovqcc %xcc */
3157 CHECK_FPU_FEATURE(dc, FLOAT128);
3162 case 0x51: /* fcmps, V9 %fcc */
3163 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3164 cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3165 gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3167 case 0x52: /* fcmpd, V9 %fcc */
3168 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3169 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3170 gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3172 case 0x53: /* fcmpq, V9 %fcc */
3173 CHECK_FPU_FEATURE(dc, FLOAT128);
3174 gen_op_load_fpr_QT0(QFPREG(rs1));
3175 gen_op_load_fpr_QT1(QFPREG(rs2));
3176 gen_op_fcmpq(rd & 3);
3178 case 0x55: /* fcmpes, V9 %fcc */
3179 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3180 cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3181 gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3183 case 0x56: /* fcmped, V9 %fcc */
3184 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3185 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3186 gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3188 case 0x57: /* fcmpeq, V9 %fcc */
3189 CHECK_FPU_FEATURE(dc, FLOAT128);
3190 gen_op_load_fpr_QT0(QFPREG(rs1));
3191 gen_op_load_fpr_QT1(QFPREG(rs2));
3192 gen_op_fcmpeq(rd & 3);
3197 } else if (xop == 0x2) {
3200 rs1 = GET_FIELD(insn, 13, 17);
3202 // or %g0, x, y -> mov T0, x; mov y, T0
3203 if (IS_IMM) { /* immediate */
3206 simm = GET_FIELDs(insn, 19, 31);
3207 r_const = tcg_const_tl(simm);
3208 gen_movl_TN_reg(rd, r_const);
3209 tcg_temp_free(r_const);
3210 } else { /* register */
3211 rs2 = GET_FIELD(insn, 27, 31);
3212 gen_movl_reg_TN(rs2, cpu_dst);
3213 gen_movl_TN_reg(rd, cpu_dst);
3216 cpu_src1 = get_src1(insn, cpu_src1);
3217 if (IS_IMM) { /* immediate */
3218 simm = GET_FIELDs(insn, 19, 31);
3219 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3220 gen_movl_TN_reg(rd, cpu_dst);
3221 } else { /* register */
3222 // or x, %g0, y -> mov T1, x; mov y, T1
3223 rs2 = GET_FIELD(insn, 27, 31);
3225 gen_movl_reg_TN(rs2, cpu_src2);
3226 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3227 gen_movl_TN_reg(rd, cpu_dst);
3229 gen_movl_TN_reg(rd, cpu_src1);
3232 #ifdef TARGET_SPARC64
3233 } else if (xop == 0x25) { /* sll, V9 sllx */
3234 cpu_src1 = get_src1(insn, cpu_src1);
3235 if (IS_IMM) { /* immediate */
3236 simm = GET_FIELDs(insn, 20, 31);
3237 if (insn & (1 << 12)) {
3238 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3240 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3242 } else { /* register */
3243 rs2 = GET_FIELD(insn, 27, 31);
3244 gen_movl_reg_TN(rs2, cpu_src2);
3245 if (insn & (1 << 12)) {
3246 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3248 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3250 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3252 gen_movl_TN_reg(rd, cpu_dst);
3253 } else if (xop == 0x26) { /* srl, V9 srlx */
3254 cpu_src1 = get_src1(insn, cpu_src1);
3255 if (IS_IMM) { /* immediate */
3256 simm = GET_FIELDs(insn, 20, 31);
3257 if (insn & (1 << 12)) {
3258 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3260 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3261 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3263 } else { /* register */
3264 rs2 = GET_FIELD(insn, 27, 31);
3265 gen_movl_reg_TN(rs2, cpu_src2);
3266 if (insn & (1 << 12)) {
3267 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3268 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3270 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3271 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3272 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3275 gen_movl_TN_reg(rd, cpu_dst);
3276 } else if (xop == 0x27) { /* sra, V9 srax */
3277 cpu_src1 = get_src1(insn, cpu_src1);
3278 if (IS_IMM) { /* immediate */
3279 simm = GET_FIELDs(insn, 20, 31);
3280 if (insn & (1 << 12)) {
3281 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3283 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3284 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3285 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3287 } else { /* register */
3288 rs2 = GET_FIELD(insn, 27, 31);
3289 gen_movl_reg_TN(rs2, cpu_src2);
3290 if (insn & (1 << 12)) {
3291 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3292 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3294 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3295 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3296 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3297 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3300 gen_movl_TN_reg(rd, cpu_dst);
3302 } else if (xop < 0x36) {
3304 cpu_src1 = get_src1(insn, cpu_src1);
3305 cpu_src2 = get_src2(insn, cpu_src2);
3306 switch (xop & ~0x10) {
3309 simm = GET_FIELDs(insn, 19, 31);
3311 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3312 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3313 dc->cc_op = CC_OP_ADD;
3315 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3319 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3320 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3321 dc->cc_op = CC_OP_ADD;
3323 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3329 simm = GET_FIELDs(insn, 19, 31);
3330 tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3332 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3335 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3336 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3337 dc->cc_op = CC_OP_LOGIC;
3342 simm = GET_FIELDs(insn, 19, 31);
3343 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3345 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3348 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3349 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3350 dc->cc_op = CC_OP_LOGIC;
3355 simm = GET_FIELDs(insn, 19, 31);
3356 tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3358 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3361 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3362 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3363 dc->cc_op = CC_OP_LOGIC;
3368 simm = GET_FIELDs(insn, 19, 31);
3370 gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
3372 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3376 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3377 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3378 dc->cc_op = CC_OP_SUB;
3380 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3384 case 0x5: /* andn */
3386 simm = GET_FIELDs(insn, 19, 31);
3387 tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3389 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3392 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3393 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3394 dc->cc_op = CC_OP_LOGIC;
3399 simm = GET_FIELDs(insn, 19, 31);
3400 tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3402 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3405 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3406 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3407 dc->cc_op = CC_OP_LOGIC;
3410 case 0x7: /* xorn */
3412 simm = GET_FIELDs(insn, 19, 31);
3413 tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3415 tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3416 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3419 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3420 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3421 dc->cc_op = CC_OP_LOGIC;
3424 case 0x8: /* addx, V9 addc */
3425 gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3428 #ifdef TARGET_SPARC64
3429 case 0x9: /* V9 mulx */
3431 simm = GET_FIELDs(insn, 19, 31);
3432 tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3434 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3438 case 0xa: /* umul */
3439 CHECK_IU_FEATURE(dc, MUL);
3440 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3442 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3443 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3444 dc->cc_op = CC_OP_LOGIC;
3447 case 0xb: /* smul */
3448 CHECK_IU_FEATURE(dc, MUL);
3449 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3451 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3452 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3453 dc->cc_op = CC_OP_LOGIC;
3456 case 0xc: /* subx, V9 subc */
3457 gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3460 #ifdef TARGET_SPARC64
3461 case 0xd: /* V9 udivx */
3462 gen_helper_udivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3465 case 0xe: /* udiv */
3466 CHECK_IU_FEATURE(dc, DIV);
3468 gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
3470 dc->cc_op = CC_OP_DIV;
3472 gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
3476 case 0xf: /* sdiv */
3477 CHECK_IU_FEATURE(dc, DIV);
3479 gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
3481 dc->cc_op = CC_OP_DIV;
3483 gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
3490 gen_movl_TN_reg(rd, cpu_dst);
3492 cpu_src1 = get_src1(insn, cpu_src1);
3493 cpu_src2 = get_src2(insn, cpu_src2);
3495 case 0x20: /* taddcc */
3496 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3497 gen_movl_TN_reg(rd, cpu_dst);
3498 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3499 dc->cc_op = CC_OP_TADD;
3501 case 0x21: /* tsubcc */
3502 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3503 gen_movl_TN_reg(rd, cpu_dst);
3504 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3505 dc->cc_op = CC_OP_TSUB;
3507 case 0x22: /* taddcctv */
3508 gen_helper_taddcctv(cpu_dst, cpu_env,
3509 cpu_src1, cpu_src2);
3510 gen_movl_TN_reg(rd, cpu_dst);
3511 dc->cc_op = CC_OP_TADDTV;
3513 case 0x23: /* tsubcctv */
3514 gen_helper_tsubcctv(cpu_dst, cpu_env,
3515 cpu_src1, cpu_src2);
3516 gen_movl_TN_reg(rd, cpu_dst);
3517 dc->cc_op = CC_OP_TSUBTV;
3519 case 0x24: /* mulscc */
3520 gen_helper_compute_psr(cpu_env);
3521 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3522 gen_movl_TN_reg(rd, cpu_dst);
3523 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3524 dc->cc_op = CC_OP_ADD;
3526 #ifndef TARGET_SPARC64
3527 case 0x25: /* sll */
3528 if (IS_IMM) { /* immediate */
3529 simm = GET_FIELDs(insn, 20, 31);
3530 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3531 } else { /* register */
3532 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3533 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3535 gen_movl_TN_reg(rd, cpu_dst);
3537 case 0x26: /* srl */
3538 if (IS_IMM) { /* immediate */
3539 simm = GET_FIELDs(insn, 20, 31);
3540 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3541 } else { /* register */
3542 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3543 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3545 gen_movl_TN_reg(rd, cpu_dst);
3547 case 0x27: /* sra */
3548 if (IS_IMM) { /* immediate */
3549 simm = GET_FIELDs(insn, 20, 31);
3550 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3551 } else { /* register */
3552 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3553 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3555 gen_movl_TN_reg(rd, cpu_dst);
3562 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3563 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3565 #ifndef TARGET_SPARC64
3566 case 0x01 ... 0x0f: /* undefined in the
3570 case 0x10 ... 0x1f: /* implementation-dependent
3576 case 0x2: /* V9 wrccr */
3577 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3578 gen_helper_wrccr(cpu_env, cpu_dst);
3579 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3580 dc->cc_op = CC_OP_FLAGS;
3582 case 0x3: /* V9 wrasi */
3583 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3584 tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3585 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3587 case 0x6: /* V9 wrfprs */
3588 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3589 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3595 case 0xf: /* V9 sir, nop if user */
3596 #if !defined(CONFIG_USER_ONLY)
3597 if (supervisor(dc)) {
3602 case 0x13: /* Graphics Status */
3603 if (gen_trap_ifnofpu(dc)) {
3606 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3608 case 0x14: /* Softint set */
3609 if (!supervisor(dc))
3611 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3612 gen_helper_set_softint(cpu_env, cpu_tmp64);
3614 case 0x15: /* Softint clear */
3615 if (!supervisor(dc))
3617 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3618 gen_helper_clear_softint(cpu_env, cpu_tmp64);
3620 case 0x16: /* Softint write */
3621 if (!supervisor(dc))
3623 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3624 gen_helper_write_softint(cpu_env, cpu_tmp64);
3626 case 0x17: /* Tick compare */
3627 #if !defined(CONFIG_USER_ONLY)
3628 if (!supervisor(dc))
3634 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3636 r_tickptr = tcg_temp_new_ptr();
3637 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3638 offsetof(CPUSPARCState, tick));
3639 gen_helper_tick_set_limit(r_tickptr,
3641 tcg_temp_free_ptr(r_tickptr);
3644 case 0x18: /* System tick */
3645 #if !defined(CONFIG_USER_ONLY)
3646 if (!supervisor(dc))
3652 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3654 r_tickptr = tcg_temp_new_ptr();
3655 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3656 offsetof(CPUSPARCState, stick));
3657 gen_helper_tick_set_count(r_tickptr,
3659 tcg_temp_free_ptr(r_tickptr);
3662 case 0x19: /* System tick compare */
3663 #if !defined(CONFIG_USER_ONLY)
3664 if (!supervisor(dc))
3670 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3672 r_tickptr = tcg_temp_new_ptr();
3673 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3674 offsetof(CPUSPARCState, stick));
3675 gen_helper_tick_set_limit(r_tickptr,
3677 tcg_temp_free_ptr(r_tickptr);
3681 case 0x10: /* Performance Control */
3682 case 0x11: /* Performance Instrumentation
3684 case 0x12: /* Dispatch Control */
3691 #if !defined(CONFIG_USER_ONLY)
3692 case 0x31: /* wrpsr, V9 saved, restored */
3694 if (!supervisor(dc))
3696 #ifdef TARGET_SPARC64
3699 gen_helper_saved(cpu_env);
3702 gen_helper_restored(cpu_env);
3704 case 2: /* UA2005 allclean */
3705 case 3: /* UA2005 otherw */
3706 case 4: /* UA2005 normalw */
3707 case 5: /* UA2005 invalw */
3713 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3714 gen_helper_wrpsr(cpu_env, cpu_dst);
3715 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3716 dc->cc_op = CC_OP_FLAGS;
3724 case 0x32: /* wrwim, V9 wrpr */
3726 if (!supervisor(dc))
3728 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3729 #ifdef TARGET_SPARC64
3735 r_tsptr = tcg_temp_new_ptr();
3736 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3737 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3738 offsetof(trap_state, tpc));
3739 tcg_temp_free_ptr(r_tsptr);
3746 r_tsptr = tcg_temp_new_ptr();
3747 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3748 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3749 offsetof(trap_state, tnpc));
3750 tcg_temp_free_ptr(r_tsptr);
3757 r_tsptr = tcg_temp_new_ptr();
3758 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3759 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3760 offsetof(trap_state,
3762 tcg_temp_free_ptr(r_tsptr);
3769 r_tsptr = tcg_temp_new_ptr();
3770 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3771 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3772 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3773 offsetof(trap_state, tt));
3774 tcg_temp_free_ptr(r_tsptr);
3781 r_tickptr = tcg_temp_new_ptr();
3782 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3783 offsetof(CPUSPARCState, tick));
3784 gen_helper_tick_set_count(r_tickptr,
3786 tcg_temp_free_ptr(r_tickptr);
3790 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3794 TCGv r_tmp = tcg_temp_local_new();
3796 tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3798 gen_helper_wrpstate(cpu_env, r_tmp);
3799 tcg_temp_free(r_tmp);
3800 dc->npc = DYNAMIC_PC;
3805 TCGv r_tmp = tcg_temp_local_new();
3807 tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3809 tcg_gen_trunc_tl_i32(cpu_tmp32, r_tmp);
3810 tcg_temp_free(r_tmp);
3811 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3812 offsetof(CPUSPARCState, tl));
3813 dc->npc = DYNAMIC_PC;
3817 gen_helper_wrpil(cpu_env, cpu_tmp0);
3820 gen_helper_wrcwp(cpu_env, cpu_tmp0);
3823 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3824 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3825 offsetof(CPUSPARCState,
3828 case 11: // canrestore
3829 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3830 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3831 offsetof(CPUSPARCState,
3834 case 12: // cleanwin
3835 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3836 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3837 offsetof(CPUSPARCState,
3840 case 13: // otherwin
3841 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3842 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3843 offsetof(CPUSPARCState,
3847 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3848 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3849 offsetof(CPUSPARCState,
3852 case 16: // UA2005 gl
3853 CHECK_IU_FEATURE(dc, GL);
3854 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3855 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3856 offsetof(CPUSPARCState, gl));
3858 case 26: // UA2005 strand status
3859 CHECK_IU_FEATURE(dc, HYPV);
3860 if (!hypervisor(dc))
3862 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3868 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3869 if (dc->def->nwindows != 32)
3870 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3871 (1 << dc->def->nwindows) - 1);
3872 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3876 case 0x33: /* wrtbr, UA2005 wrhpr */
3878 #ifndef TARGET_SPARC64
3879 if (!supervisor(dc))
3881 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3883 CHECK_IU_FEATURE(dc, HYPV);
3884 if (!hypervisor(dc))
3886 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3889 // XXX gen_op_wrhpstate();
3896 // XXX gen_op_wrhtstate();
3899 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3902 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3904 case 31: // hstick_cmpr
3908 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3909 r_tickptr = tcg_temp_new_ptr();
3910 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3911 offsetof(CPUSPARCState, hstick));
3912 gen_helper_tick_set_limit(r_tickptr,
3914 tcg_temp_free_ptr(r_tickptr);
3917 case 6: // hver readonly
3925 #ifdef TARGET_SPARC64
3926 case 0x2c: /* V9 movcc */
3928 int cc = GET_FIELD_SP(insn, 11, 12);
3929 int cond = GET_FIELD_SP(insn, 14, 17);
3932 if (insn & (1 << 18)) {
3934 gen_compare(&cmp, 0, cond, dc);
3935 } else if (cc == 2) {
3936 gen_compare(&cmp, 1, cond, dc);
3941 gen_fcompare(&cmp, cc, cond);
3944 /* The get_src2 above loaded the normal 13-bit
3945 immediate field, not the 11-bit field we have
3946 in movcc. But it did handle the reg case. */
3948 simm = GET_FIELD_SPs(insn, 0, 10);
3949 tcg_gen_movi_tl(cpu_src2, simm);
3952 gen_movl_reg_TN(rd, cpu_dst);
3953 tcg_gen_movcond_tl(cmp.cond, cpu_dst,
3957 gen_movl_TN_reg(rd, cpu_dst);
3960 case 0x2d: /* V9 sdivx */
3961 gen_helper_sdivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3962 gen_movl_TN_reg(rd, cpu_dst);
3964 case 0x2e: /* V9 popc */
3966 cpu_src2 = get_src2(insn, cpu_src2);
3967 gen_helper_popc(cpu_dst, cpu_src2);
3968 gen_movl_TN_reg(rd, cpu_dst);
3970 case 0x2f: /* V9 movr */
3972 int cond = GET_FIELD_SP(insn, 10, 12);
3975 gen_compare_reg(&cmp, cond, cpu_src1);
3977 /* The get_src2 above loaded the normal 13-bit
3978 immediate field, not the 10-bit field we have
3979 in movr. But it did handle the reg case. */
3981 simm = GET_FIELD_SPs(insn, 0, 9);
3982 tcg_gen_movi_tl(cpu_src2, simm);
3985 gen_movl_reg_TN(rd, cpu_dst);
3986 tcg_gen_movcond_tl(cmp.cond, cpu_dst,
3990 gen_movl_TN_reg(rd, cpu_dst);
3998 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3999 #ifdef TARGET_SPARC64
4000 int opf = GET_FIELD_SP(insn, 5, 13);
4001 rs1 = GET_FIELD(insn, 13, 17);
4002 rs2 = GET_FIELD(insn, 27, 31);
4003 if (gen_trap_ifnofpu(dc)) {
4008 case 0x000: /* VIS I edge8cc */
4009 CHECK_FPU_FEATURE(dc, VIS1);
4010 gen_movl_reg_TN(rs1, cpu_src1);
4011 gen_movl_reg_TN(rs2, cpu_src2);
4012 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4013 gen_movl_TN_reg(rd, cpu_dst);
4015 case 0x001: /* VIS II edge8n */
4016 CHECK_FPU_FEATURE(dc, VIS2);
4017 gen_movl_reg_TN(rs1, cpu_src1);
4018 gen_movl_reg_TN(rs2, cpu_src2);
4019 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4020 gen_movl_TN_reg(rd, cpu_dst);
4022 case 0x002: /* VIS I edge8lcc */
4023 CHECK_FPU_FEATURE(dc, VIS1);
4024 gen_movl_reg_TN(rs1, cpu_src1);
4025 gen_movl_reg_TN(rs2, cpu_src2);
4026 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4027 gen_movl_TN_reg(rd, cpu_dst);
4029 case 0x003: /* VIS II edge8ln */
4030 CHECK_FPU_FEATURE(dc, VIS2);
4031 gen_movl_reg_TN(rs1, cpu_src1);
4032 gen_movl_reg_TN(rs2, cpu_src2);
4033 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4034 gen_movl_TN_reg(rd, cpu_dst);
4036 case 0x004: /* VIS I edge16cc */
4037 CHECK_FPU_FEATURE(dc, VIS1);
4038 gen_movl_reg_TN(rs1, cpu_src1);
4039 gen_movl_reg_TN(rs2, cpu_src2);
4040 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4041 gen_movl_TN_reg(rd, cpu_dst);
4043 case 0x005: /* VIS II edge16n */
4044 CHECK_FPU_FEATURE(dc, VIS2);
4045 gen_movl_reg_TN(rs1, cpu_src1);
4046 gen_movl_reg_TN(rs2, cpu_src2);
4047 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4048 gen_movl_TN_reg(rd, cpu_dst);
4050 case 0x006: /* VIS I edge16lcc */
4051 CHECK_FPU_FEATURE(dc, VIS1);
4052 gen_movl_reg_TN(rs1, cpu_src1);
4053 gen_movl_reg_TN(rs2, cpu_src2);
4054 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4055 gen_movl_TN_reg(rd, cpu_dst);
4057 case 0x007: /* VIS II edge16ln */
4058 CHECK_FPU_FEATURE(dc, VIS2);
4059 gen_movl_reg_TN(rs1, cpu_src1);
4060 gen_movl_reg_TN(rs2, cpu_src2);
4061 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4062 gen_movl_TN_reg(rd, cpu_dst);
4064 case 0x008: /* VIS I edge32cc */
4065 CHECK_FPU_FEATURE(dc, VIS1);
4066 gen_movl_reg_TN(rs1, cpu_src1);
4067 gen_movl_reg_TN(rs2, cpu_src2);
4068 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4069 gen_movl_TN_reg(rd, cpu_dst);
4071 case 0x009: /* VIS II edge32n */
4072 CHECK_FPU_FEATURE(dc, VIS2);
4073 gen_movl_reg_TN(rs1, cpu_src1);
4074 gen_movl_reg_TN(rs2, cpu_src2);
4075 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4076 gen_movl_TN_reg(rd, cpu_dst);
4078 case 0x00a: /* VIS I edge32lcc */
4079 CHECK_FPU_FEATURE(dc, VIS1);
4080 gen_movl_reg_TN(rs1, cpu_src1);
4081 gen_movl_reg_TN(rs2, cpu_src2);
4082 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4083 gen_movl_TN_reg(rd, cpu_dst);
4085 case 0x00b: /* VIS II edge32ln */
4086 CHECK_FPU_FEATURE(dc, VIS2);
4087 gen_movl_reg_TN(rs1, cpu_src1);
4088 gen_movl_reg_TN(rs2, cpu_src2);
4089 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4090 gen_movl_TN_reg(rd, cpu_dst);
4092 case 0x010: /* VIS I array8 */
4093 CHECK_FPU_FEATURE(dc, VIS1);
4094 cpu_src1 = get_src1(insn, cpu_src1);
4095 gen_movl_reg_TN(rs2, cpu_src2);
4096 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4097 gen_movl_TN_reg(rd, cpu_dst);
4099 case 0x012: /* VIS I array16 */
4100 CHECK_FPU_FEATURE(dc, VIS1);
4101 cpu_src1 = get_src1(insn, cpu_src1);
4102 gen_movl_reg_TN(rs2, cpu_src2);
4103 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4104 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4105 gen_movl_TN_reg(rd, cpu_dst);
4107 case 0x014: /* VIS I array32 */
4108 CHECK_FPU_FEATURE(dc, VIS1);
4109 cpu_src1 = get_src1(insn, cpu_src1);
4110 gen_movl_reg_TN(rs2, cpu_src2);
4111 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4112 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4113 gen_movl_TN_reg(rd, cpu_dst);
4115 case 0x018: /* VIS I alignaddr */
4116 CHECK_FPU_FEATURE(dc, VIS1);
4117 cpu_src1 = get_src1(insn, cpu_src1);
4118 gen_movl_reg_TN(rs2, cpu_src2);
4119 gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4120 gen_movl_TN_reg(rd, cpu_dst);
4122 case 0x01a: /* VIS I alignaddrl */
4123 CHECK_FPU_FEATURE(dc, VIS1);
4124 cpu_src1 = get_src1(insn, cpu_src1);
4125 gen_movl_reg_TN(rs2, cpu_src2);
4126 gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4127 gen_movl_TN_reg(rd, cpu_dst);
4129 case 0x019: /* VIS II bmask */
4130 CHECK_FPU_FEATURE(dc, VIS2);
4131 cpu_src1 = get_src1(insn, cpu_src1);
4132 cpu_src2 = get_src1(insn, cpu_src2);
4133 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4134 tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4135 gen_movl_TN_reg(rd, cpu_dst);
4137 case 0x020: /* VIS I fcmple16 */
4138 CHECK_FPU_FEATURE(dc, VIS1);
4139 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4140 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4141 gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4142 gen_movl_TN_reg(rd, cpu_dst);
4144 case 0x022: /* VIS I fcmpne16 */
4145 CHECK_FPU_FEATURE(dc, VIS1);
4146 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4147 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4148 gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4149 gen_movl_TN_reg(rd, cpu_dst);
4151 case 0x024: /* VIS I fcmple32 */
4152 CHECK_FPU_FEATURE(dc, VIS1);
4153 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4154 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4155 gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4156 gen_movl_TN_reg(rd, cpu_dst);
4158 case 0x026: /* VIS I fcmpne32 */
4159 CHECK_FPU_FEATURE(dc, VIS1);
4160 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4161 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4162 gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4163 gen_movl_TN_reg(rd, cpu_dst);
4165 case 0x028: /* VIS I fcmpgt16 */
4166 CHECK_FPU_FEATURE(dc, VIS1);
4167 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4168 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4169 gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4170 gen_movl_TN_reg(rd, cpu_dst);
4172 case 0x02a: /* VIS I fcmpeq16 */
4173 CHECK_FPU_FEATURE(dc, VIS1);
4174 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4175 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4176 gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4177 gen_movl_TN_reg(rd, cpu_dst);
4179 case 0x02c: /* VIS I fcmpgt32 */
4180 CHECK_FPU_FEATURE(dc, VIS1);
4181 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4182 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4183 gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4184 gen_movl_TN_reg(rd, cpu_dst);
4186 case 0x02e: /* VIS I fcmpeq32 */
4187 CHECK_FPU_FEATURE(dc, VIS1);
4188 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4189 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4190 gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4191 gen_movl_TN_reg(rd, cpu_dst);
4193 case 0x031: /* VIS I fmul8x16 */
4194 CHECK_FPU_FEATURE(dc, VIS1);
4195 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4197 case 0x033: /* VIS I fmul8x16au */
4198 CHECK_FPU_FEATURE(dc, VIS1);
4199 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4201 case 0x035: /* VIS I fmul8x16al */
4202 CHECK_FPU_FEATURE(dc, VIS1);
4203 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4205 case 0x036: /* VIS I fmul8sux16 */
4206 CHECK_FPU_FEATURE(dc, VIS1);
4207 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4209 case 0x037: /* VIS I fmul8ulx16 */
4210 CHECK_FPU_FEATURE(dc, VIS1);
4211 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4213 case 0x038: /* VIS I fmuld8sux16 */
4214 CHECK_FPU_FEATURE(dc, VIS1);
4215 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4217 case 0x039: /* VIS I fmuld8ulx16 */
4218 CHECK_FPU_FEATURE(dc, VIS1);
4219 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4221 case 0x03a: /* VIS I fpack32 */
4222 CHECK_FPU_FEATURE(dc, VIS1);
4223 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4225 case 0x03b: /* VIS I fpack16 */
4226 CHECK_FPU_FEATURE(dc, VIS1);
4227 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4228 cpu_dst_32 = gen_dest_fpr_F();
4229 gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4230 gen_store_fpr_F(dc, rd, cpu_dst_32);
4232 case 0x03d: /* VIS I fpackfix */
4233 CHECK_FPU_FEATURE(dc, VIS1);
4234 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4235 cpu_dst_32 = gen_dest_fpr_F();
4236 gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4237 gen_store_fpr_F(dc, rd, cpu_dst_32);
4239 case 0x03e: /* VIS I pdist */
4240 CHECK_FPU_FEATURE(dc, VIS1);
4241 gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4243 case 0x048: /* VIS I faligndata */
4244 CHECK_FPU_FEATURE(dc, VIS1);
4245 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4247 case 0x04b: /* VIS I fpmerge */
4248 CHECK_FPU_FEATURE(dc, VIS1);
4249 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4251 case 0x04c: /* VIS II bshuffle */
4252 CHECK_FPU_FEATURE(dc, VIS2);
4253 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4255 case 0x04d: /* VIS I fexpand */
4256 CHECK_FPU_FEATURE(dc, VIS1);
4257 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4259 case 0x050: /* VIS I fpadd16 */
4260 CHECK_FPU_FEATURE(dc, VIS1);
4261 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4263 case 0x051: /* VIS I fpadd16s */
4264 CHECK_FPU_FEATURE(dc, VIS1);
4265 gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4267 case 0x052: /* VIS I fpadd32 */
4268 CHECK_FPU_FEATURE(dc, VIS1);
4269 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4271 case 0x053: /* VIS I fpadd32s */
4272 CHECK_FPU_FEATURE(dc, VIS1);
4273 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4275 case 0x054: /* VIS I fpsub16 */
4276 CHECK_FPU_FEATURE(dc, VIS1);
4277 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4279 case 0x055: /* VIS I fpsub16s */
4280 CHECK_FPU_FEATURE(dc, VIS1);
4281 gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4283 case 0x056: /* VIS I fpsub32 */
4284 CHECK_FPU_FEATURE(dc, VIS1);
4285 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4287 case 0x057: /* VIS I fpsub32s */
4288 CHECK_FPU_FEATURE(dc, VIS1);
4289 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4291 case 0x060: /* VIS I fzero */
4292 CHECK_FPU_FEATURE(dc, VIS1);
4293 cpu_dst_64 = gen_dest_fpr_D();
4294 tcg_gen_movi_i64(cpu_dst_64, 0);
4295 gen_store_fpr_D(dc, rd, cpu_dst_64);
4297 case 0x061: /* VIS I fzeros */
4298 CHECK_FPU_FEATURE(dc, VIS1);
4299 cpu_dst_32 = gen_dest_fpr_F();
4300 tcg_gen_movi_i32(cpu_dst_32, 0);
4301 gen_store_fpr_F(dc, rd, cpu_dst_32);
4303 case 0x062: /* VIS I fnor */
4304 CHECK_FPU_FEATURE(dc, VIS1);
4305 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4307 case 0x063: /* VIS I fnors */
4308 CHECK_FPU_FEATURE(dc, VIS1);
4309 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4311 case 0x064: /* VIS I fandnot2 */
4312 CHECK_FPU_FEATURE(dc, VIS1);
4313 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4315 case 0x065: /* VIS I fandnot2s */
4316 CHECK_FPU_FEATURE(dc, VIS1);
4317 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4319 case 0x066: /* VIS I fnot2 */
4320 CHECK_FPU_FEATURE(dc, VIS1);
4321 gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4323 case 0x067: /* VIS I fnot2s */
4324 CHECK_FPU_FEATURE(dc, VIS1);
4325 gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4327 case 0x068: /* VIS I fandnot1 */
4328 CHECK_FPU_FEATURE(dc, VIS1);
4329 gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4331 case 0x069: /* VIS I fandnot1s */
4332 CHECK_FPU_FEATURE(dc, VIS1);
4333 gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4335 case 0x06a: /* VIS I fnot1 */
4336 CHECK_FPU_FEATURE(dc, VIS1);
4337 gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4339 case 0x06b: /* VIS I fnot1s */
4340 CHECK_FPU_FEATURE(dc, VIS1);
4341 gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4343 case 0x06c: /* VIS I fxor */
4344 CHECK_FPU_FEATURE(dc, VIS1);
4345 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4347 case 0x06d: /* VIS I fxors */
4348 CHECK_FPU_FEATURE(dc, VIS1);
4349 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4351 case 0x06e: /* VIS I fnand */
4352 CHECK_FPU_FEATURE(dc, VIS1);
4353 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4355 case 0x06f: /* VIS I fnands */
4356 CHECK_FPU_FEATURE(dc, VIS1);
4357 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4359 case 0x070: /* VIS I fand */
4360 CHECK_FPU_FEATURE(dc, VIS1);
4361 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4363 case 0x071: /* VIS I fands */
4364 CHECK_FPU_FEATURE(dc, VIS1);
4365 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4367 case 0x072: /* VIS I fxnor */
4368 CHECK_FPU_FEATURE(dc, VIS1);
4369 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4371 case 0x073: /* VIS I fxnors */
4372 CHECK_FPU_FEATURE(dc, VIS1);
4373 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4375 case 0x074: /* VIS I fsrc1 */
4376 CHECK_FPU_FEATURE(dc, VIS1);
4377 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4378 gen_store_fpr_D(dc, rd, cpu_src1_64);
4380 case 0x075: /* VIS I fsrc1s */
4381 CHECK_FPU_FEATURE(dc, VIS1);
4382 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4383 gen_store_fpr_F(dc, rd, cpu_src1_32);
4385 case 0x076: /* VIS I fornot2 */
4386 CHECK_FPU_FEATURE(dc, VIS1);
4387 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4389 case 0x077: /* VIS I fornot2s */
4390 CHECK_FPU_FEATURE(dc, VIS1);
4391 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4393 case 0x078: /* VIS I fsrc2 */
4394 CHECK_FPU_FEATURE(dc, VIS1);
4395 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4396 gen_store_fpr_D(dc, rd, cpu_src1_64);
4398 case 0x079: /* VIS I fsrc2s */
4399 CHECK_FPU_FEATURE(dc, VIS1);
4400 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4401 gen_store_fpr_F(dc, rd, cpu_src1_32);
4403 case 0x07a: /* VIS I fornot1 */
4404 CHECK_FPU_FEATURE(dc, VIS1);
4405 gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4407 case 0x07b: /* VIS I fornot1s */
4408 CHECK_FPU_FEATURE(dc, VIS1);
4409 gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
4411 case 0x07c: /* VIS I for */
4412 CHECK_FPU_FEATURE(dc, VIS1);
4413 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
4415 case 0x07d: /* VIS I fors */
4416 CHECK_FPU_FEATURE(dc, VIS1);
4417 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
4419 case 0x07e: /* VIS I fone */
4420 CHECK_FPU_FEATURE(dc, VIS1);
4421 cpu_dst_64 = gen_dest_fpr_D();
4422 tcg_gen_movi_i64(cpu_dst_64, -1);
4423 gen_store_fpr_D(dc, rd, cpu_dst_64);
4425 case 0x07f: /* VIS I fones */
4426 CHECK_FPU_FEATURE(dc, VIS1);
4427 cpu_dst_32 = gen_dest_fpr_F();
4428 tcg_gen_movi_i32(cpu_dst_32, -1);
4429 gen_store_fpr_F(dc, rd, cpu_dst_32);
4431 case 0x080: /* VIS I shutdown */
4432 case 0x081: /* VIS II siam */
4441 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4442 #ifdef TARGET_SPARC64
4447 #ifdef TARGET_SPARC64
4448 } else if (xop == 0x39) { /* V9 return */
4452 cpu_src1 = get_src1(insn, cpu_src1);
4453 if (IS_IMM) { /* immediate */
4454 simm = GET_FIELDs(insn, 19, 31);
4455 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4456 } else { /* register */
4457 rs2 = GET_FIELD(insn, 27, 31);
4459 gen_movl_reg_TN(rs2, cpu_src2);
4460 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4462 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4464 gen_helper_restore(cpu_env);
4466 r_const = tcg_const_i32(3);
4467 gen_helper_check_align(cpu_env, cpu_dst, r_const);
4468 tcg_temp_free_i32(r_const);
4469 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4470 dc->npc = DYNAMIC_PC;
4474 cpu_src1 = get_src1(insn, cpu_src1);
4475 if (IS_IMM) { /* immediate */
4476 simm = GET_FIELDs(insn, 19, 31);
4477 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4478 } else { /* register */
4479 rs2 = GET_FIELD(insn, 27, 31);
4481 gen_movl_reg_TN(rs2, cpu_src2);
4482 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4484 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4487 case 0x38: /* jmpl */
4492 r_pc = tcg_const_tl(dc->pc);
4493 gen_movl_TN_reg(rd, r_pc);
4494 tcg_temp_free(r_pc);
4496 r_const = tcg_const_i32(3);
4497 gen_helper_check_align(cpu_env, cpu_dst, r_const);
4498 tcg_temp_free_i32(r_const);
4499 gen_address_mask(dc, cpu_dst);
4500 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4501 dc->npc = DYNAMIC_PC;
4504 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4505 case 0x39: /* rett, V9 return */
4509 if (!supervisor(dc))
4512 r_const = tcg_const_i32(3);
4513 gen_helper_check_align(cpu_env, cpu_dst, r_const);
4514 tcg_temp_free_i32(r_const);
4515 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4516 dc->npc = DYNAMIC_PC;
4517 gen_helper_rett(cpu_env);
4521 case 0x3b: /* flush */
4522 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4526 case 0x3c: /* save */
4528 gen_helper_save(cpu_env);
4529 gen_movl_TN_reg(rd, cpu_dst);
4531 case 0x3d: /* restore */
4533 gen_helper_restore(cpu_env);
4534 gen_movl_TN_reg(rd, cpu_dst);
4536 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4537 case 0x3e: /* V9 done/retry */
4541 if (!supervisor(dc))
4543 dc->npc = DYNAMIC_PC;
4544 dc->pc = DYNAMIC_PC;
4545 gen_helper_done(cpu_env);
4548 if (!supervisor(dc))
4550 dc->npc = DYNAMIC_PC;
4551 dc->pc = DYNAMIC_PC;
4552 gen_helper_retry(cpu_env);
4567 case 3: /* load/store instructions */
4569 unsigned int xop = GET_FIELD(insn, 7, 12);
4571 /* flush pending conditional evaluations before exposing
4573 if (dc->cc_op != CC_OP_FLAGS) {
4574 dc->cc_op = CC_OP_FLAGS;
4575 gen_helper_compute_psr(cpu_env);
4577 cpu_src1 = get_src1(insn, cpu_src1);
4578 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4579 rs2 = GET_FIELD(insn, 27, 31);
4580 gen_movl_reg_TN(rs2, cpu_src2);
4581 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4582 } else if (IS_IMM) { /* immediate */
4583 simm = GET_FIELDs(insn, 19, 31);
4584 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4585 } else { /* register */
4586 rs2 = GET_FIELD(insn, 27, 31);
4588 gen_movl_reg_TN(rs2, cpu_src2);
4589 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4591 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4593 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4594 (xop > 0x17 && xop <= 0x1d ) ||
4595 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4597 case 0x0: /* ld, V9 lduw, load unsigned word */
4598 gen_address_mask(dc, cpu_addr);
4599 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4601 case 0x1: /* ldub, load unsigned byte */
4602 gen_address_mask(dc, cpu_addr);
4603 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4605 case 0x2: /* lduh, load unsigned halfword */
4606 gen_address_mask(dc, cpu_addr);
4607 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4609 case 0x3: /* ldd, load double word */
4616 r_const = tcg_const_i32(7);
4617 /* XXX remove alignment check */
4618 gen_helper_check_align(cpu_env, cpu_addr, r_const);
4619 tcg_temp_free_i32(r_const);
4620 gen_address_mask(dc, cpu_addr);
4621 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4622 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4623 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4624 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4625 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4626 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4627 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4630 case 0x9: /* ldsb, load signed byte */
4631 gen_address_mask(dc, cpu_addr);
4632 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4634 case 0xa: /* ldsh, load signed halfword */
4635 gen_address_mask(dc, cpu_addr);
4636 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4638 case 0xd: /* ldstub -- XXX: should be atomically */
4642 gen_address_mask(dc, cpu_addr);
4643 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4644 r_const = tcg_const_tl(0xff);
4645 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4646 tcg_temp_free(r_const);
4649 case 0x0f: /* swap, swap register with memory. Also
4651 CHECK_IU_FEATURE(dc, SWAP);
4652 gen_movl_reg_TN(rd, cpu_val);
4653 gen_address_mask(dc, cpu_addr);
4654 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4655 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4656 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4658 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4659 case 0x10: /* lda, V9 lduwa, load word alternate */
4660 #ifndef TARGET_SPARC64
4663 if (!supervisor(dc))
4667 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4669 case 0x11: /* lduba, load unsigned byte alternate */
4670 #ifndef TARGET_SPARC64
4673 if (!supervisor(dc))
4677 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4679 case 0x12: /* lduha, load unsigned halfword alternate */
4680 #ifndef TARGET_SPARC64
4683 if (!supervisor(dc))
4687 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4689 case 0x13: /* ldda, load double word alternate */
4690 #ifndef TARGET_SPARC64
4693 if (!supervisor(dc))
4699 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4701 case 0x19: /* ldsba, load signed byte alternate */
4702 #ifndef TARGET_SPARC64
4705 if (!supervisor(dc))
4709 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4711 case 0x1a: /* ldsha, load signed halfword alternate */
4712 #ifndef TARGET_SPARC64
4715 if (!supervisor(dc))
4719 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4721 case 0x1d: /* ldstuba -- XXX: should be atomically */
4722 #ifndef TARGET_SPARC64
4725 if (!supervisor(dc))
4729 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4731 case 0x1f: /* swapa, swap reg with alt. memory. Also
4733 CHECK_IU_FEATURE(dc, SWAP);
4734 #ifndef TARGET_SPARC64
4737 if (!supervisor(dc))
4741 gen_movl_reg_TN(rd, cpu_val);
4742 gen_swap_asi(cpu_val, cpu_addr, insn);
4745 #ifndef TARGET_SPARC64
4746 case 0x30: /* ldc */
4747 case 0x31: /* ldcsr */
4748 case 0x33: /* lddc */
4752 #ifdef TARGET_SPARC64
4753 case 0x08: /* V9 ldsw */
4754 gen_address_mask(dc, cpu_addr);
4755 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4757 case 0x0b: /* V9 ldx */
4758 gen_address_mask(dc, cpu_addr);
4759 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4761 case 0x18: /* V9 ldswa */
4763 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4765 case 0x1b: /* V9 ldxa */
4767 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4769 case 0x2d: /* V9 prefetch, no effect */
4771 case 0x30: /* V9 ldfa */
4772 if (gen_trap_ifnofpu(dc)) {
4776 gen_ldf_asi(cpu_addr, insn, 4, rd);
4777 gen_update_fprs_dirty(rd);
4779 case 0x33: /* V9 lddfa */
4780 if (gen_trap_ifnofpu(dc)) {
4784 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4785 gen_update_fprs_dirty(DFPREG(rd));
4787 case 0x3d: /* V9 prefetcha, no effect */
4789 case 0x32: /* V9 ldqfa */
4790 CHECK_FPU_FEATURE(dc, FLOAT128);
4791 if (gen_trap_ifnofpu(dc)) {
4795 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4796 gen_update_fprs_dirty(QFPREG(rd));
4802 gen_movl_TN_reg(rd, cpu_val);
4803 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4806 } else if (xop >= 0x20 && xop < 0x24) {
4807 if (gen_trap_ifnofpu(dc)) {
4812 case 0x20: /* ldf, load fpreg */
4813 gen_address_mask(dc, cpu_addr);
4814 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4815 cpu_dst_32 = gen_dest_fpr_F();
4816 tcg_gen_trunc_tl_i32(cpu_dst_32, cpu_tmp0);
4817 gen_store_fpr_F(dc, rd, cpu_dst_32);
4819 case 0x21: /* ldfsr, V9 ldxfsr */
4820 #ifdef TARGET_SPARC64
4821 gen_address_mask(dc, cpu_addr);
4823 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4824 gen_helper_ldxfsr(cpu_env, cpu_tmp64);
4826 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4827 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
4828 gen_helper_ldfsr(cpu_env, cpu_tmp32);
4832 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4833 gen_helper_ldfsr(cpu_env, cpu_tmp32);
4837 case 0x22: /* ldqf, load quad fpreg */
4841 CHECK_FPU_FEATURE(dc, FLOAT128);
4842 r_const = tcg_const_i32(dc->mem_idx);
4843 gen_address_mask(dc, cpu_addr);
4844 gen_helper_ldqf(cpu_env, cpu_addr, r_const);
4845 tcg_temp_free_i32(r_const);
4846 gen_op_store_QT0_fpr(QFPREG(rd));
4847 gen_update_fprs_dirty(QFPREG(rd));
4850 case 0x23: /* lddf, load double fpreg */
4851 gen_address_mask(dc, cpu_addr);
4852 cpu_dst_64 = gen_dest_fpr_D();
4853 tcg_gen_qemu_ld64(cpu_dst_64, cpu_addr, dc->mem_idx);
4854 gen_store_fpr_D(dc, rd, cpu_dst_64);
4859 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4860 xop == 0xe || xop == 0x1e) {
4861 gen_movl_reg_TN(rd, cpu_val);
4863 case 0x4: /* st, store word */
4864 gen_address_mask(dc, cpu_addr);
4865 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4867 case 0x5: /* stb, store byte */
4868 gen_address_mask(dc, cpu_addr);
4869 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4871 case 0x6: /* sth, store halfword */
4872 gen_address_mask(dc, cpu_addr);
4873 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4875 case 0x7: /* std, store double word */
4882 gen_address_mask(dc, cpu_addr);
4883 r_const = tcg_const_i32(7);
4884 /* XXX remove alignment check */
4885 gen_helper_check_align(cpu_env, cpu_addr, r_const);
4886 tcg_temp_free_i32(r_const);
4887 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4888 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4889 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4892 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4893 case 0x14: /* sta, V9 stwa, store word alternate */
4894 #ifndef TARGET_SPARC64
4897 if (!supervisor(dc))
4901 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4902 dc->npc = DYNAMIC_PC;
4904 case 0x15: /* stba, store byte alternate */
4905 #ifndef TARGET_SPARC64
4908 if (!supervisor(dc))
4912 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4913 dc->npc = DYNAMIC_PC;
4915 case 0x16: /* stha, store halfword alternate */
4916 #ifndef TARGET_SPARC64
4919 if (!supervisor(dc))
4923 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4924 dc->npc = DYNAMIC_PC;
4926 case 0x17: /* stda, store double word alternate */
4927 #ifndef TARGET_SPARC64
4930 if (!supervisor(dc))
4937 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4941 #ifdef TARGET_SPARC64
4942 case 0x0e: /* V9 stx */
4943 gen_address_mask(dc, cpu_addr);
4944 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4946 case 0x1e: /* V9 stxa */
4948 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4949 dc->npc = DYNAMIC_PC;
4955 } else if (xop > 0x23 && xop < 0x28) {
4956 if (gen_trap_ifnofpu(dc)) {
4961 case 0x24: /* stf, store fpreg */
4962 gen_address_mask(dc, cpu_addr);
4963 cpu_src1_32 = gen_load_fpr_F(dc, rd);
4964 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_src1_32);
4965 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4967 case 0x25: /* stfsr, V9 stxfsr */
4968 #ifdef TARGET_SPARC64
4969 gen_address_mask(dc, cpu_addr);
4970 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUSPARCState, fsr));
4972 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4974 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4976 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fsr));
4977 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4981 #ifdef TARGET_SPARC64
4982 /* V9 stqf, store quad fpreg */
4986 CHECK_FPU_FEATURE(dc, FLOAT128);
4987 gen_op_load_fpr_QT0(QFPREG(rd));
4988 r_const = tcg_const_i32(dc->mem_idx);
4989 gen_address_mask(dc, cpu_addr);
4990 gen_helper_stqf(cpu_env, cpu_addr, r_const);
4991 tcg_temp_free_i32(r_const);
4994 #else /* !TARGET_SPARC64 */
4995 /* stdfq, store floating point queue */
4996 #if defined(CONFIG_USER_ONLY)
4999 if (!supervisor(dc))
5001 if (gen_trap_ifnofpu(dc)) {
5007 case 0x27: /* stdf, store double fpreg */
5008 gen_address_mask(dc, cpu_addr);
5009 cpu_src1_64 = gen_load_fpr_D(dc, rd);
5010 tcg_gen_qemu_st64(cpu_src1_64, cpu_addr, dc->mem_idx);
5015 } else if (xop > 0x33 && xop < 0x3f) {
5018 #ifdef TARGET_SPARC64
5019 case 0x34: /* V9 stfa */
5020 if (gen_trap_ifnofpu(dc)) {
5023 gen_stf_asi(cpu_addr, insn, 4, rd);
5025 case 0x36: /* V9 stqfa */
5029 CHECK_FPU_FEATURE(dc, FLOAT128);
5030 if (gen_trap_ifnofpu(dc)) {
5033 r_const = tcg_const_i32(7);
5034 gen_helper_check_align(cpu_env, cpu_addr, r_const);
5035 tcg_temp_free_i32(r_const);
5036 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
5039 case 0x37: /* V9 stdfa */
5040 if (gen_trap_ifnofpu(dc)) {
5043 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
5045 case 0x3c: /* V9 casa */
5046 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
5047 gen_movl_TN_reg(rd, cpu_val);
5049 case 0x3e: /* V9 casxa */
5050 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
5051 gen_movl_TN_reg(rd, cpu_val);
5054 case 0x34: /* stc */
5055 case 0x35: /* stcsr */
5056 case 0x36: /* stdcq */
5057 case 0x37: /* stdc */
5068 /* default case for non jump instructions */
5069 if (dc->npc == DYNAMIC_PC) {
5070 dc->pc = DYNAMIC_PC;
5072 } else if (dc->npc == JUMP_PC) {
5073 /* we can do a static jump */
5074 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5078 dc->npc = dc->npc + 4;
5087 r_const = tcg_const_i32(TT_ILL_INSN);
5088 gen_helper_raise_exception(cpu_env, r_const);
5089 tcg_temp_free_i32(r_const);
5098 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
5099 gen_helper_raise_exception(cpu_env, r_const);
5100 tcg_temp_free_i32(r_const);
5104 #if !defined(CONFIG_USER_ONLY)
5110 r_const = tcg_const_i32(TT_PRIV_INSN);
5111 gen_helper_raise_exception(cpu_env, r_const);
5112 tcg_temp_free_i32(r_const);
5119 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
5122 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5125 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
5129 #ifndef TARGET_SPARC64
5135 r_const = tcg_const_i32(TT_NCP_INSN);
5136 gen_helper_raise_exception(cpu_env, r_const);
5137 tcg_temp_free(r_const);
5143 tcg_temp_free(cpu_tmp1);
5144 tcg_temp_free(cpu_tmp2);
5145 if (dc->n_t32 != 0) {
5147 for (i = dc->n_t32 - 1; i >= 0; --i) {
5148 tcg_temp_free_i32(dc->t32[i]);
5154 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
5155 int spc, CPUSPARCState *env)
5157 target_ulong pc_start, last_pc;
5158 uint16_t *gen_opc_end;
5159 DisasContext dc1, *dc = &dc1;
5166 memset(dc, 0, sizeof(DisasContext));
5171 dc->npc = (target_ulong) tb->cs_base;
5172 dc->cc_op = CC_OP_DYNAMIC;
5173 dc->mem_idx = cpu_mmu_index(env);
5175 dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5176 dc->address_mask_32bit = tb_am_enabled(tb->flags);
5177 dc->singlestep = (env->singlestep_enabled || singlestep);
5178 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
5181 max_insns = tb->cflags & CF_COUNT_MASK;
5183 max_insns = CF_COUNT_MASK;
5186 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
5187 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
5188 if (bp->pc == dc->pc) {
5189 if (dc->pc != pc_start)
5191 gen_helper_debug(cpu_env);
5199 qemu_log("Search PC...\n");
5200 j = gen_opc_ptr - gen_opc_buf;
5204 gen_opc_instr_start[lj++] = 0;
5205 gen_opc_pc[lj] = dc->pc;
5206 gen_opc_npc[lj] = dc->npc;
5207 gen_opc_instr_start[lj] = 1;
5208 gen_opc_icount[lj] = num_insns;
5211 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
5214 insn = cpu_ldl_code(env, dc->pc);
5216 cpu_tmp0 = tcg_temp_new();
5217 cpu_tmp32 = tcg_temp_new_i32();
5218 cpu_tmp64 = tcg_temp_new_i64();
5219 cpu_dst = tcg_temp_new();
5220 cpu_val = tcg_temp_new();
5221 cpu_addr = tcg_temp_new();
5223 disas_sparc_insn(dc, insn);
5226 tcg_temp_free(cpu_addr);
5227 tcg_temp_free(cpu_val);
5228 tcg_temp_free(cpu_dst);
5229 tcg_temp_free_i64(cpu_tmp64);
5230 tcg_temp_free_i32(cpu_tmp32);
5231 tcg_temp_free(cpu_tmp0);
5235 /* if the next PC is different, we abort now */
5236 if (dc->pc != (last_pc + 4))
5238 /* if we reach a page boundary, we stop generation so that the
5239 PC of a TT_TFAULT exception is always in the right page */
5240 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5242 /* if single step mode, we generate only one instruction and
5243 generate an exception */
5244 if (dc->singlestep) {
5247 } while ((gen_opc_ptr < gen_opc_end) &&
5248 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5249 num_insns < max_insns);
5252 if (tb->cflags & CF_LAST_IO) {
5256 if (dc->pc != DYNAMIC_PC &&
5257 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5258 /* static PC and NPC: we can use direct chaining */
5259 gen_goto_tb(dc, 0, dc->pc, dc->npc);
5261 if (dc->pc != DYNAMIC_PC) {
5262 tcg_gen_movi_tl(cpu_pc, dc->pc);
5268 gen_icount_end(tb, num_insns);
5269 *gen_opc_ptr = INDEX_op_end;
5271 j = gen_opc_ptr - gen_opc_buf;
5274 gen_opc_instr_start[lj++] = 0;
5278 gen_opc_jump_pc[0] = dc->jump_pc[0];
5279 gen_opc_jump_pc[1] = dc->jump_pc[1];
5281 tb->size = last_pc + 4 - pc_start;
5282 tb->icount = num_insns;
5285 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5286 qemu_log("--------------\n");
5287 qemu_log("IN: %s\n", lookup_symbol(pc_start));
5288 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
5294 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5296 gen_intermediate_code_internal(tb, 0, env);
5299 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5301 gen_intermediate_code_internal(tb, 1, env);
5304 void gen_intermediate_code_init(CPUSPARCState *env)
5308 static const char * const gregnames[8] = {
5309 NULL, // g0 not used
5318 static const char * const fregnames[32] = {
5319 "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5320 "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5321 "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5322 "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5325 /* init various static tables */
5329 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5330 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5331 offsetof(CPUSPARCState, regwptr),
5333 #ifdef TARGET_SPARC64
5334 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, xcc),
5336 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, asi),
5338 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, fprs),
5340 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, gsr),
5342 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5343 offsetof(CPUSPARCState, tick_cmpr),
5345 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5346 offsetof(CPUSPARCState, stick_cmpr),
5348 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5349 offsetof(CPUSPARCState, hstick_cmpr),
5351 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hintp),
5353 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, htba),
5355 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hver),
5357 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5358 offsetof(CPUSPARCState, ssr), "ssr");
5359 cpu_ver = tcg_global_mem_new(TCG_AREG0,
5360 offsetof(CPUSPARCState, version), "ver");
5361 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5362 offsetof(CPUSPARCState, softint),
5365 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, wim),
5368 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cond),
5370 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_src),
5372 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5373 offsetof(CPUSPARCState, cc_src2),
5375 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_dst),
5377 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, cc_op),
5379 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, psr),
5381 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, fsr),
5383 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, pc),
5385 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, npc),
5387 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, y), "y");
5388 #ifndef CONFIG_USER_ONLY
5389 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, tbr),
5392 for (i = 1; i < 8; i++) {
5393 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5394 offsetof(CPUSPARCState, gregs[i]),
5397 for (i = 0; i < TARGET_DPREGS; i++) {
5398 cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
5399 offsetof(CPUSPARCState, fpr[i]),
5403 /* register helpers */
5405 #define GEN_HELPER 2
5410 void restore_state_to_opc(CPUSPARCState *env, TranslationBlock *tb, int pc_pos)
5413 env->pc = gen_opc_pc[pc_pos];
5414 npc = gen_opc_npc[pc_pos];
5416 /* dynamic NPC: already stored */
5417 } else if (npc == 2) {
5418 /* jump PC: use 'cond' and the jump targets of the translation */
5420 env->npc = gen_opc_jump_pc[0];
5422 env->npc = gen_opc_jump_pc[1];
5428 /* flush pending conditional evaluations before exposing cpu state */
5429 if (CC_OP != CC_OP_FLAGS) {
5430 helper_compute_psr(env);