5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
37 #define DYNAMIC_PC 1 /* dynamic pc value */
38 #define JUMP_PC 2 /* dynamic pc value which takes only two values
39 according to jump_pc[T2] */
41 /* global register indexes */
42 static TCGv_ptr cpu_env, cpu_regwptr;
43 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
44 static TCGv_i32 cpu_cc_op;
45 static TCGv_i32 cpu_psr;
46 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
48 #ifndef CONFIG_USER_ONLY
51 static TCGv cpu_cond, cpu_dst, cpu_addr, cpu_val;
53 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
55 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
56 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
57 static TCGv_i32 cpu_softint;
61 /* local register indexes (only used inside old micro ops) */
63 static TCGv_i32 cpu_tmp32;
64 static TCGv_i64 cpu_tmp64;
65 /* Floating point registers */
66 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
68 static target_ulong gen_opc_npc[OPC_BUF_SIZE];
69 static target_ulong gen_opc_jump_pc[2];
71 #include "gen-icount.h"
73 typedef struct DisasContext {
74 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
75 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
76 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
80 int address_mask_32bit;
82 uint32_t cc_op; /* current CC operation */
83 struct TranslationBlock *tb;
96 // This function uses non-native bit order
97 #define GET_FIELD(X, FROM, TO) \
98 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
100 // This function uses the order in the manuals, i.e. bit 0 is 2^0
101 #define GET_FIELD_SP(X, FROM, TO) \
102 GET_FIELD(X, 31 - (TO), 31 - (FROM))
104 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
105 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
107 #ifdef TARGET_SPARC64
108 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
109 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
111 #define DFPREG(r) (r & 0x1e)
112 #define QFPREG(r) (r & 0x1c)
115 #define UA2005_HTRAP_MASK 0xff
116 #define V8_TRAP_MASK 0x7f
118 static int sign_extend(int x, int len)
121 return (x << len) >> len;
124 #define IS_IMM (insn & (1<<13))
126 static inline void gen_update_fprs_dirty(int rd)
128 #if defined(TARGET_SPARC64)
129 tcg_gen_ori_i32(cpu_fprs, cpu_fprs, (rd < 32) ? 1 : 2);
133 /* floating point registers moves */
134 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
136 #if TCG_TARGET_REG_BITS == 32
138 return TCGV_LOW(cpu_fpr[src / 2]);
140 return TCGV_HIGH(cpu_fpr[src / 2]);
144 return MAKE_TCGV_I32(GET_TCGV_I64(cpu_fpr[src / 2]));
146 TCGv_i32 ret = tcg_temp_local_new_i32();
147 TCGv_i64 t = tcg_temp_new_i64();
149 tcg_gen_shri_i64(t, cpu_fpr[src / 2], 32);
150 tcg_gen_trunc_i64_i32(ret, t);
151 tcg_temp_free_i64(t);
153 dc->t32[dc->n_t32++] = ret;
154 assert(dc->n_t32 <= ARRAY_SIZE(dc->t32));
161 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
163 #if TCG_TARGET_REG_BITS == 32
165 tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
167 tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
170 TCGv_i64 t = MAKE_TCGV_I64(GET_TCGV_I32(v));
171 tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
172 (dst & 1 ? 0 : 32), 32);
174 gen_update_fprs_dirty(dst);
177 static TCGv_i32 gen_dest_fpr_F(void)
182 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
185 return cpu_fpr[src / 2];
188 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
191 tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
192 gen_update_fprs_dirty(dst);
195 static TCGv_i64 gen_dest_fpr_D(void)
200 static void gen_op_load_fpr_QT0(unsigned int src)
202 tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
203 offsetof(CPU_QuadU, ll.upper));
204 tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
205 offsetof(CPU_QuadU, ll.lower));
208 static void gen_op_load_fpr_QT1(unsigned int src)
210 tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
211 offsetof(CPU_QuadU, ll.upper));
212 tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
213 offsetof(CPU_QuadU, ll.lower));
216 static void gen_op_store_QT0_fpr(unsigned int dst)
218 tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
219 offsetof(CPU_QuadU, ll.upper));
220 tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
221 offsetof(CPU_QuadU, ll.lower));
224 #ifdef TARGET_SPARC64
225 static void gen_move_Q(unsigned int rd, unsigned int rs)
230 tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
231 tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
232 gen_update_fprs_dirty(rd);
237 #ifdef CONFIG_USER_ONLY
238 #define supervisor(dc) 0
239 #ifdef TARGET_SPARC64
240 #define hypervisor(dc) 0
243 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
244 #ifdef TARGET_SPARC64
245 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
250 #ifdef TARGET_SPARC64
252 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
254 #define AM_CHECK(dc) (1)
258 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
260 #ifdef TARGET_SPARC64
262 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
266 static inline void gen_movl_reg_TN(int reg, TCGv tn)
269 tcg_gen_movi_tl(tn, 0);
271 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
273 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
277 static inline void gen_movl_TN_reg(int reg, TCGv tn)
282 tcg_gen_mov_tl(cpu_gregs[reg], tn);
284 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
288 static inline void gen_goto_tb(DisasContext *s, int tb_num,
289 target_ulong pc, target_ulong npc)
291 TranslationBlock *tb;
294 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
295 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
297 /* jump to same page: we can use a direct jump */
298 tcg_gen_goto_tb(tb_num);
299 tcg_gen_movi_tl(cpu_pc, pc);
300 tcg_gen_movi_tl(cpu_npc, npc);
301 tcg_gen_exit_tb((tcg_target_long)tb + tb_num);
303 /* jump to another page: currently not optimized */
304 tcg_gen_movi_tl(cpu_pc, pc);
305 tcg_gen_movi_tl(cpu_npc, npc);
311 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
313 tcg_gen_extu_i32_tl(reg, src);
314 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
315 tcg_gen_andi_tl(reg, reg, 0x1);
318 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
320 tcg_gen_extu_i32_tl(reg, src);
321 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
322 tcg_gen_andi_tl(reg, reg, 0x1);
325 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
327 tcg_gen_extu_i32_tl(reg, src);
328 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
329 tcg_gen_andi_tl(reg, reg, 0x1);
332 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
334 tcg_gen_extu_i32_tl(reg, src);
335 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
336 tcg_gen_andi_tl(reg, reg, 0x1);
339 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
341 tcg_gen_mov_tl(cpu_cc_src, src1);
342 tcg_gen_movi_tl(cpu_cc_src2, src2);
343 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
344 tcg_gen_mov_tl(dst, cpu_cc_dst);
347 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
349 tcg_gen_mov_tl(cpu_cc_src, src1);
350 tcg_gen_mov_tl(cpu_cc_src2, src2);
351 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
352 tcg_gen_mov_tl(dst, cpu_cc_dst);
355 static TCGv_i32 gen_add32_carry32(void)
357 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
359 /* Carry is computed from a previous add: (dst < src) */
360 #if TARGET_LONG_BITS == 64
361 cc_src1_32 = tcg_temp_new_i32();
362 cc_src2_32 = tcg_temp_new_i32();
363 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_dst);
364 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src);
366 cc_src1_32 = cpu_cc_dst;
367 cc_src2_32 = cpu_cc_src;
370 carry_32 = tcg_temp_new_i32();
371 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
373 #if TARGET_LONG_BITS == 64
374 tcg_temp_free_i32(cc_src1_32);
375 tcg_temp_free_i32(cc_src2_32);
381 static TCGv_i32 gen_sub32_carry32(void)
383 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
385 /* Carry is computed from a previous borrow: (src1 < src2) */
386 #if TARGET_LONG_BITS == 64
387 cc_src1_32 = tcg_temp_new_i32();
388 cc_src2_32 = tcg_temp_new_i32();
389 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_src);
390 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src2);
392 cc_src1_32 = cpu_cc_src;
393 cc_src2_32 = cpu_cc_src2;
396 carry_32 = tcg_temp_new_i32();
397 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
399 #if TARGET_LONG_BITS == 64
400 tcg_temp_free_i32(cc_src1_32);
401 tcg_temp_free_i32(cc_src2_32);
407 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
408 TCGv src2, int update_cc)
416 /* Carry is known to be zero. Fall back to plain ADD. */
418 gen_op_add_cc(dst, src1, src2);
420 tcg_gen_add_tl(dst, src1, src2);
427 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
429 /* For 32-bit hosts, we can re-use the host's hardware carry
430 generation by using an ADD2 opcode. We discard the low
431 part of the output. Ideally we'd combine this operation
432 with the add that generated the carry in the first place. */
433 TCGv dst_low = tcg_temp_new();
434 tcg_gen_op6_i32(INDEX_op_add2_i32, dst_low, dst,
435 cpu_cc_src, src1, cpu_cc_src2, src2);
436 tcg_temp_free(dst_low);
440 carry_32 = gen_add32_carry32();
446 carry_32 = gen_sub32_carry32();
450 /* We need external help to produce the carry. */
451 carry_32 = tcg_temp_new_i32();
452 gen_helper_compute_C_icc(carry_32, cpu_env);
456 #if TARGET_LONG_BITS == 64
457 carry = tcg_temp_new();
458 tcg_gen_extu_i32_i64(carry, carry_32);
463 tcg_gen_add_tl(dst, src1, src2);
464 tcg_gen_add_tl(dst, dst, carry);
466 tcg_temp_free_i32(carry_32);
467 #if TARGET_LONG_BITS == 64
468 tcg_temp_free(carry);
471 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
475 tcg_gen_mov_tl(cpu_cc_src, src1);
476 tcg_gen_mov_tl(cpu_cc_src2, src2);
477 tcg_gen_mov_tl(cpu_cc_dst, dst);
478 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
479 dc->cc_op = CC_OP_ADDX;
483 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
485 tcg_gen_mov_tl(cpu_cc_src, src1);
486 tcg_gen_movi_tl(cpu_cc_src2, src2);
488 tcg_gen_mov_tl(cpu_cc_dst, src1);
489 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
490 dc->cc_op = CC_OP_LOGIC;
492 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
493 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
494 dc->cc_op = CC_OP_SUB;
496 tcg_gen_mov_tl(dst, cpu_cc_dst);
499 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
501 tcg_gen_mov_tl(cpu_cc_src, src1);
502 tcg_gen_mov_tl(cpu_cc_src2, src2);
503 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
504 tcg_gen_mov_tl(dst, cpu_cc_dst);
507 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
508 TCGv src2, int update_cc)
516 /* Carry is known to be zero. Fall back to plain SUB. */
518 gen_op_sub_cc(dst, src1, src2);
520 tcg_gen_sub_tl(dst, src1, src2);
527 carry_32 = gen_add32_carry32();
533 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
535 /* For 32-bit hosts, we can re-use the host's hardware carry
536 generation by using a SUB2 opcode. We discard the low
537 part of the output. Ideally we'd combine this operation
538 with the add that generated the carry in the first place. */
539 TCGv dst_low = tcg_temp_new();
540 tcg_gen_op6_i32(INDEX_op_sub2_i32, dst_low, dst,
541 cpu_cc_src, src1, cpu_cc_src2, src2);
542 tcg_temp_free(dst_low);
546 carry_32 = gen_sub32_carry32();
550 /* We need external help to produce the carry. */
551 carry_32 = tcg_temp_new_i32();
552 gen_helper_compute_C_icc(carry_32, cpu_env);
556 #if TARGET_LONG_BITS == 64
557 carry = tcg_temp_new();
558 tcg_gen_extu_i32_i64(carry, carry_32);
563 tcg_gen_sub_tl(dst, src1, src2);
564 tcg_gen_sub_tl(dst, dst, carry);
566 tcg_temp_free_i32(carry_32);
567 #if TARGET_LONG_BITS == 64
568 tcg_temp_free(carry);
571 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
575 tcg_gen_mov_tl(cpu_cc_src, src1);
576 tcg_gen_mov_tl(cpu_cc_src2, src2);
577 tcg_gen_mov_tl(cpu_cc_dst, dst);
578 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
579 dc->cc_op = CC_OP_SUBX;
583 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
587 r_temp = tcg_temp_new();
593 zero = tcg_const_tl(0);
594 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
595 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
596 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
597 tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
602 // env->y = (b2 << 31) | (env->y >> 1);
603 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
604 tcg_gen_shli_tl(r_temp, r_temp, 31);
605 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
606 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
607 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
608 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
611 gen_mov_reg_N(cpu_tmp0, cpu_psr);
612 gen_mov_reg_V(r_temp, cpu_psr);
613 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
614 tcg_temp_free(r_temp);
616 // T0 = (b1 << 31) | (T0 >> 1);
618 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
619 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
620 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
622 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
624 tcg_gen_mov_tl(dst, cpu_cc_dst);
627 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
629 TCGv_i32 r_src1, r_src2;
630 TCGv_i64 r_temp, r_temp2;
632 r_src1 = tcg_temp_new_i32();
633 r_src2 = tcg_temp_new_i32();
635 tcg_gen_trunc_tl_i32(r_src1, src1);
636 tcg_gen_trunc_tl_i32(r_src2, src2);
638 r_temp = tcg_temp_new_i64();
639 r_temp2 = tcg_temp_new_i64();
642 tcg_gen_ext_i32_i64(r_temp, r_src2);
643 tcg_gen_ext_i32_i64(r_temp2, r_src1);
645 tcg_gen_extu_i32_i64(r_temp, r_src2);
646 tcg_gen_extu_i32_i64(r_temp2, r_src1);
649 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
651 tcg_gen_shri_i64(r_temp, r_temp2, 32);
652 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
653 tcg_temp_free_i64(r_temp);
654 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
656 tcg_gen_trunc_i64_tl(dst, r_temp2);
658 tcg_temp_free_i64(r_temp2);
660 tcg_temp_free_i32(r_src1);
661 tcg_temp_free_i32(r_src2);
664 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
666 /* zero-extend truncated operands before multiplication */
667 gen_op_multiply(dst, src1, src2, 0);
670 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
672 /* sign-extend truncated operands before multiplication */
673 gen_op_multiply(dst, src1, src2, 1);
677 static inline void gen_op_eval_ba(TCGv dst)
679 tcg_gen_movi_tl(dst, 1);
683 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
685 gen_mov_reg_Z(dst, src);
689 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
691 gen_mov_reg_N(cpu_tmp0, src);
692 gen_mov_reg_V(dst, src);
693 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
694 gen_mov_reg_Z(cpu_tmp0, src);
695 tcg_gen_or_tl(dst, dst, cpu_tmp0);
699 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
701 gen_mov_reg_V(cpu_tmp0, src);
702 gen_mov_reg_N(dst, src);
703 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
707 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
709 gen_mov_reg_Z(cpu_tmp0, src);
710 gen_mov_reg_C(dst, src);
711 tcg_gen_or_tl(dst, dst, cpu_tmp0);
715 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
717 gen_mov_reg_C(dst, src);
721 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
723 gen_mov_reg_V(dst, src);
727 static inline void gen_op_eval_bn(TCGv dst)
729 tcg_gen_movi_tl(dst, 0);
733 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
735 gen_mov_reg_N(dst, src);
739 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
741 gen_mov_reg_Z(dst, src);
742 tcg_gen_xori_tl(dst, dst, 0x1);
746 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
748 gen_mov_reg_N(cpu_tmp0, src);
749 gen_mov_reg_V(dst, src);
750 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
751 gen_mov_reg_Z(cpu_tmp0, src);
752 tcg_gen_or_tl(dst, dst, cpu_tmp0);
753 tcg_gen_xori_tl(dst, dst, 0x1);
757 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
759 gen_mov_reg_V(cpu_tmp0, src);
760 gen_mov_reg_N(dst, src);
761 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
762 tcg_gen_xori_tl(dst, dst, 0x1);
766 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
768 gen_mov_reg_Z(cpu_tmp0, src);
769 gen_mov_reg_C(dst, src);
770 tcg_gen_or_tl(dst, dst, cpu_tmp0);
771 tcg_gen_xori_tl(dst, dst, 0x1);
775 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
777 gen_mov_reg_C(dst, src);
778 tcg_gen_xori_tl(dst, dst, 0x1);
782 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
784 gen_mov_reg_N(dst, src);
785 tcg_gen_xori_tl(dst, dst, 0x1);
789 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
791 gen_mov_reg_V(dst, src);
792 tcg_gen_xori_tl(dst, dst, 0x1);
796 FPSR bit field FCC1 | FCC0:
802 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
803 unsigned int fcc_offset)
805 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
806 tcg_gen_andi_tl(reg, reg, 0x1);
809 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
810 unsigned int fcc_offset)
812 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
813 tcg_gen_andi_tl(reg, reg, 0x1);
817 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
818 unsigned int fcc_offset)
820 gen_mov_reg_FCC0(dst, src, fcc_offset);
821 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
822 tcg_gen_or_tl(dst, dst, cpu_tmp0);
825 // 1 or 2: FCC0 ^ FCC1
826 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
827 unsigned int fcc_offset)
829 gen_mov_reg_FCC0(dst, src, fcc_offset);
830 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
831 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
835 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
836 unsigned int fcc_offset)
838 gen_mov_reg_FCC0(dst, src, fcc_offset);
842 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
843 unsigned int fcc_offset)
845 gen_mov_reg_FCC0(dst, src, fcc_offset);
846 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
847 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
848 tcg_gen_and_tl(dst, dst, cpu_tmp0);
852 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
853 unsigned int fcc_offset)
855 gen_mov_reg_FCC1(dst, src, fcc_offset);
859 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
860 unsigned int fcc_offset)
862 gen_mov_reg_FCC0(dst, src, fcc_offset);
863 tcg_gen_xori_tl(dst, dst, 0x1);
864 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
865 tcg_gen_and_tl(dst, dst, cpu_tmp0);
869 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
870 unsigned int fcc_offset)
872 gen_mov_reg_FCC0(dst, src, fcc_offset);
873 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
874 tcg_gen_and_tl(dst, dst, cpu_tmp0);
878 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
879 unsigned int fcc_offset)
881 gen_mov_reg_FCC0(dst, src, fcc_offset);
882 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
883 tcg_gen_or_tl(dst, dst, cpu_tmp0);
884 tcg_gen_xori_tl(dst, dst, 0x1);
887 // 0 or 3: !(FCC0 ^ FCC1)
888 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
889 unsigned int fcc_offset)
891 gen_mov_reg_FCC0(dst, src, fcc_offset);
892 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
893 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
894 tcg_gen_xori_tl(dst, dst, 0x1);
898 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
899 unsigned int fcc_offset)
901 gen_mov_reg_FCC0(dst, src, fcc_offset);
902 tcg_gen_xori_tl(dst, dst, 0x1);
905 // !1: !(FCC0 & !FCC1)
906 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
907 unsigned int fcc_offset)
909 gen_mov_reg_FCC0(dst, src, fcc_offset);
910 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
911 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
912 tcg_gen_and_tl(dst, dst, cpu_tmp0);
913 tcg_gen_xori_tl(dst, dst, 0x1);
917 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
918 unsigned int fcc_offset)
920 gen_mov_reg_FCC1(dst, src, fcc_offset);
921 tcg_gen_xori_tl(dst, dst, 0x1);
924 // !2: !(!FCC0 & FCC1)
925 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
926 unsigned int fcc_offset)
928 gen_mov_reg_FCC0(dst, src, fcc_offset);
929 tcg_gen_xori_tl(dst, dst, 0x1);
930 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
931 tcg_gen_and_tl(dst, dst, cpu_tmp0);
932 tcg_gen_xori_tl(dst, dst, 0x1);
935 // !3: !(FCC0 & FCC1)
936 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
937 unsigned int fcc_offset)
939 gen_mov_reg_FCC0(dst, src, fcc_offset);
940 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
941 tcg_gen_and_tl(dst, dst, cpu_tmp0);
942 tcg_gen_xori_tl(dst, dst, 0x1);
945 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
946 target_ulong pc2, TCGv r_cond)
950 l1 = gen_new_label();
952 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
954 gen_goto_tb(dc, 0, pc1, pc1 + 4);
957 gen_goto_tb(dc, 1, pc2, pc2 + 4);
960 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
961 target_ulong pc2, TCGv r_cond)
965 l1 = gen_new_label();
967 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
969 gen_goto_tb(dc, 0, pc2, pc1);
972 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
975 static inline void gen_generic_branch(DisasContext *dc)
977 TCGv npc0 = tcg_const_tl(dc->jump_pc[0]);
978 TCGv npc1 = tcg_const_tl(dc->jump_pc[1]);
979 TCGv zero = tcg_const_tl(0);
981 tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
988 /* call this function before using the condition register as it may
989 have been set for a jump */
990 static inline void flush_cond(DisasContext *dc)
992 if (dc->npc == JUMP_PC) {
993 gen_generic_branch(dc);
994 dc->npc = DYNAMIC_PC;
998 static inline void save_npc(DisasContext *dc)
1000 if (dc->npc == JUMP_PC) {
1001 gen_generic_branch(dc);
1002 dc->npc = DYNAMIC_PC;
1003 } else if (dc->npc != DYNAMIC_PC) {
1004 tcg_gen_movi_tl(cpu_npc, dc->npc);
1008 static inline void save_state(DisasContext *dc)
1010 tcg_gen_movi_tl(cpu_pc, dc->pc);
1011 /* flush pending conditional evaluations before exposing cpu state */
1012 if (dc->cc_op != CC_OP_FLAGS) {
1013 dc->cc_op = CC_OP_FLAGS;
1014 gen_helper_compute_psr(cpu_env);
1019 static inline void gen_mov_pc_npc(DisasContext *dc)
1021 if (dc->npc == JUMP_PC) {
1022 gen_generic_branch(dc);
1023 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1024 dc->pc = DYNAMIC_PC;
1025 } else if (dc->npc == DYNAMIC_PC) {
1026 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1027 dc->pc = DYNAMIC_PC;
1033 static inline void gen_op_next_insn(void)
1035 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1036 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1039 static void free_compare(DisasCompare *cmp)
1042 tcg_temp_free(cmp->c1);
1045 tcg_temp_free(cmp->c2);
1049 static void gen_compare(DisasCompare *cmp, unsigned int cc, unsigned int cond,
1055 /* For now we still generate a straight boolean result. */
1056 cmp->cond = TCG_COND_NE;
1057 cmp->is_bool = true;
1058 cmp->g1 = cmp->g2 = false;
1059 cmp->c1 = r_dst = tcg_temp_new();
1060 cmp->c2 = tcg_const_tl(0);
1062 #ifdef TARGET_SPARC64
1070 switch (dc->cc_op) {
1074 gen_helper_compute_psr(cpu_env);
1075 dc->cc_op = CC_OP_FLAGS;
1080 gen_op_eval_bn(r_dst);
1083 gen_op_eval_be(r_dst, r_src);
1086 gen_op_eval_ble(r_dst, r_src);
1089 gen_op_eval_bl(r_dst, r_src);
1092 gen_op_eval_bleu(r_dst, r_src);
1095 gen_op_eval_bcs(r_dst, r_src);
1098 gen_op_eval_bneg(r_dst, r_src);
1101 gen_op_eval_bvs(r_dst, r_src);
1104 gen_op_eval_ba(r_dst);
1107 gen_op_eval_bne(r_dst, r_src);
1110 gen_op_eval_bg(r_dst, r_src);
1113 gen_op_eval_bge(r_dst, r_src);
1116 gen_op_eval_bgu(r_dst, r_src);
1119 gen_op_eval_bcc(r_dst, r_src);
1122 gen_op_eval_bpos(r_dst, r_src);
1125 gen_op_eval_bvc(r_dst, r_src);
1130 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1132 unsigned int offset;
1135 /* For now we still generate a straight boolean result. */
1136 cmp->cond = TCG_COND_NE;
1137 cmp->is_bool = true;
1138 cmp->g1 = cmp->g2 = false;
1139 cmp->c1 = r_dst = tcg_temp_new();
1140 cmp->c2 = tcg_const_tl(0);
1160 gen_op_eval_bn(r_dst);
1163 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1166 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1169 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1172 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1175 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1178 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1181 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1184 gen_op_eval_ba(r_dst);
1187 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1190 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1193 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1196 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1199 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1202 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1205 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1210 static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1214 gen_compare(&cmp, cc, cond, dc);
1216 /* The interface is to return a boolean in r_dst. */
1218 tcg_gen_mov_tl(r_dst, cmp.c1);
1220 tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1226 static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1229 gen_fcompare(&cmp, cc, cond);
1231 /* The interface is to return a boolean in r_dst. */
1233 tcg_gen_mov_tl(r_dst, cmp.c1);
1235 tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1241 #ifdef TARGET_SPARC64
1243 static const int gen_tcg_cond_reg[8] = {
1254 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1256 cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1257 cmp->is_bool = false;
1261 cmp->c2 = tcg_const_tl(0);
1264 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1267 gen_compare_reg(&cmp, cond, r_src);
1269 /* The interface is to return a boolean in r_dst. */
1270 tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1276 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1278 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1279 target_ulong target = dc->pc + offset;
1281 #ifdef TARGET_SPARC64
1282 if (unlikely(AM_CHECK(dc))) {
1283 target &= 0xffffffffULL;
1287 /* unconditional not taken */
1289 dc->pc = dc->npc + 4;
1290 dc->npc = dc->pc + 4;
1293 dc->npc = dc->pc + 4;
1295 } else if (cond == 0x8) {
1296 /* unconditional taken */
1299 dc->npc = dc->pc + 4;
1303 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1307 gen_cond(cpu_cond, cc, cond, dc);
1309 gen_branch_a(dc, target, dc->npc, cpu_cond);
1313 dc->jump_pc[0] = target;
1314 if (unlikely(dc->npc == DYNAMIC_PC)) {
1315 dc->jump_pc[1] = DYNAMIC_PC;
1316 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1318 dc->jump_pc[1] = dc->npc + 4;
1325 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1327 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1328 target_ulong target = dc->pc + offset;
1330 #ifdef TARGET_SPARC64
1331 if (unlikely(AM_CHECK(dc))) {
1332 target &= 0xffffffffULL;
1336 /* unconditional not taken */
1338 dc->pc = dc->npc + 4;
1339 dc->npc = dc->pc + 4;
1342 dc->npc = dc->pc + 4;
1344 } else if (cond == 0x8) {
1345 /* unconditional taken */
1348 dc->npc = dc->pc + 4;
1352 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1356 gen_fcond(cpu_cond, cc, cond);
1358 gen_branch_a(dc, target, dc->npc, cpu_cond);
1362 dc->jump_pc[0] = target;
1363 if (unlikely(dc->npc == DYNAMIC_PC)) {
1364 dc->jump_pc[1] = DYNAMIC_PC;
1365 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1367 dc->jump_pc[1] = dc->npc + 4;
1374 #ifdef TARGET_SPARC64
1375 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1378 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1379 target_ulong target = dc->pc + offset;
1381 if (unlikely(AM_CHECK(dc))) {
1382 target &= 0xffffffffULL;
1385 gen_cond_reg(cpu_cond, cond, r_reg);
1387 gen_branch_a(dc, target, dc->npc, cpu_cond);
1391 dc->jump_pc[0] = target;
1392 if (unlikely(dc->npc == DYNAMIC_PC)) {
1393 dc->jump_pc[1] = DYNAMIC_PC;
1394 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1396 dc->jump_pc[1] = dc->npc + 4;
1402 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1406 gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1409 gen_helper_fcmps_fcc1(cpu_env, r_rs1, r_rs2);
1412 gen_helper_fcmps_fcc2(cpu_env, r_rs1, r_rs2);
1415 gen_helper_fcmps_fcc3(cpu_env, r_rs1, r_rs2);
1420 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1424 gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1427 gen_helper_fcmpd_fcc1(cpu_env, r_rs1, r_rs2);
1430 gen_helper_fcmpd_fcc2(cpu_env, r_rs1, r_rs2);
1433 gen_helper_fcmpd_fcc3(cpu_env, r_rs1, r_rs2);
1438 static inline void gen_op_fcmpq(int fccno)
1442 gen_helper_fcmpq(cpu_env);
1445 gen_helper_fcmpq_fcc1(cpu_env);
1448 gen_helper_fcmpq_fcc2(cpu_env);
1451 gen_helper_fcmpq_fcc3(cpu_env);
1456 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1460 gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1463 gen_helper_fcmpes_fcc1(cpu_env, r_rs1, r_rs2);
1466 gen_helper_fcmpes_fcc2(cpu_env, r_rs1, r_rs2);
1469 gen_helper_fcmpes_fcc3(cpu_env, r_rs1, r_rs2);
1474 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1478 gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1481 gen_helper_fcmped_fcc1(cpu_env, r_rs1, r_rs2);
1484 gen_helper_fcmped_fcc2(cpu_env, r_rs1, r_rs2);
1487 gen_helper_fcmped_fcc3(cpu_env, r_rs1, r_rs2);
1492 static inline void gen_op_fcmpeq(int fccno)
1496 gen_helper_fcmpeq(cpu_env);
1499 gen_helper_fcmpeq_fcc1(cpu_env);
1502 gen_helper_fcmpeq_fcc2(cpu_env);
1505 gen_helper_fcmpeq_fcc3(cpu_env);
1512 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1514 gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1517 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1519 gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1522 static inline void gen_op_fcmpq(int fccno)
1524 gen_helper_fcmpq(cpu_env);
1527 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1529 gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1532 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1534 gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1537 static inline void gen_op_fcmpeq(int fccno)
1539 gen_helper_fcmpeq(cpu_env);
1543 static inline void gen_op_fpexception_im(int fsr_flags)
1547 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1548 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1549 r_const = tcg_const_i32(TT_FP_EXCP);
1550 gen_helper_raise_exception(cpu_env, r_const);
1551 tcg_temp_free_i32(r_const);
1554 static int gen_trap_ifnofpu(DisasContext *dc)
1556 #if !defined(CONFIG_USER_ONLY)
1557 if (!dc->fpu_enabled) {
1561 r_const = tcg_const_i32(TT_NFPU_INSN);
1562 gen_helper_raise_exception(cpu_env, r_const);
1563 tcg_temp_free_i32(r_const);
1571 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1573 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1576 static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1577 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1581 src = gen_load_fpr_F(dc, rs);
1582 dst = gen_dest_fpr_F();
1584 gen(dst, cpu_env, src);
1586 gen_store_fpr_F(dc, rd, dst);
1589 static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1590 void (*gen)(TCGv_i32, TCGv_i32))
1594 src = gen_load_fpr_F(dc, rs);
1595 dst = gen_dest_fpr_F();
1599 gen_store_fpr_F(dc, rd, dst);
1602 static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1603 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1605 TCGv_i32 dst, src1, src2;
1607 src1 = gen_load_fpr_F(dc, rs1);
1608 src2 = gen_load_fpr_F(dc, rs2);
1609 dst = gen_dest_fpr_F();
1611 gen(dst, cpu_env, src1, src2);
1613 gen_store_fpr_F(dc, rd, dst);
1616 #ifdef TARGET_SPARC64
1617 static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1618 void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1620 TCGv_i32 dst, src1, src2;
1622 src1 = gen_load_fpr_F(dc, rs1);
1623 src2 = gen_load_fpr_F(dc, rs2);
1624 dst = gen_dest_fpr_F();
1626 gen(dst, src1, src2);
1628 gen_store_fpr_F(dc, rd, dst);
1632 static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1633 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1637 src = gen_load_fpr_D(dc, rs);
1638 dst = gen_dest_fpr_D();
1640 gen(dst, cpu_env, src);
1642 gen_store_fpr_D(dc, rd, dst);
1645 #ifdef TARGET_SPARC64
1646 static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1647 void (*gen)(TCGv_i64, TCGv_i64))
1651 src = gen_load_fpr_D(dc, rs);
1652 dst = gen_dest_fpr_D();
1656 gen_store_fpr_D(dc, rd, dst);
1660 static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1661 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1663 TCGv_i64 dst, src1, src2;
1665 src1 = gen_load_fpr_D(dc, rs1);
1666 src2 = gen_load_fpr_D(dc, rs2);
1667 dst = gen_dest_fpr_D();
1669 gen(dst, cpu_env, src1, src2);
1671 gen_store_fpr_D(dc, rd, dst);
1674 #ifdef TARGET_SPARC64
1675 static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1676 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1678 TCGv_i64 dst, src1, src2;
1680 src1 = gen_load_fpr_D(dc, rs1);
1681 src2 = gen_load_fpr_D(dc, rs2);
1682 dst = gen_dest_fpr_D();
1684 gen(dst, src1, src2);
1686 gen_store_fpr_D(dc, rd, dst);
1689 static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1690 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1692 TCGv_i64 dst, src1, src2;
1694 src1 = gen_load_fpr_D(dc, rs1);
1695 src2 = gen_load_fpr_D(dc, rs2);
1696 dst = gen_dest_fpr_D();
1698 gen(dst, cpu_gsr, src1, src2);
1700 gen_store_fpr_D(dc, rd, dst);
1703 static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1704 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1706 TCGv_i64 dst, src0, src1, src2;
1708 src1 = gen_load_fpr_D(dc, rs1);
1709 src2 = gen_load_fpr_D(dc, rs2);
1710 src0 = gen_load_fpr_D(dc, rd);
1711 dst = gen_dest_fpr_D();
1713 gen(dst, src0, src1, src2);
1715 gen_store_fpr_D(dc, rd, dst);
1719 static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1720 void (*gen)(TCGv_ptr))
1722 gen_op_load_fpr_QT1(QFPREG(rs));
1726 gen_op_store_QT0_fpr(QFPREG(rd));
1727 gen_update_fprs_dirty(QFPREG(rd));
1730 #ifdef TARGET_SPARC64
1731 static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1732 void (*gen)(TCGv_ptr))
1734 gen_op_load_fpr_QT1(QFPREG(rs));
1738 gen_op_store_QT0_fpr(QFPREG(rd));
1739 gen_update_fprs_dirty(QFPREG(rd));
1743 static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1744 void (*gen)(TCGv_ptr))
1746 gen_op_load_fpr_QT0(QFPREG(rs1));
1747 gen_op_load_fpr_QT1(QFPREG(rs2));
1751 gen_op_store_QT0_fpr(QFPREG(rd));
1752 gen_update_fprs_dirty(QFPREG(rd));
1755 static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1756 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1759 TCGv_i32 src1, src2;
1761 src1 = gen_load_fpr_F(dc, rs1);
1762 src2 = gen_load_fpr_F(dc, rs2);
1763 dst = gen_dest_fpr_D();
1765 gen(dst, cpu_env, src1, src2);
1767 gen_store_fpr_D(dc, rd, dst);
1770 static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1771 void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1773 TCGv_i64 src1, src2;
1775 src1 = gen_load_fpr_D(dc, rs1);
1776 src2 = gen_load_fpr_D(dc, rs2);
1778 gen(cpu_env, src1, src2);
1780 gen_op_store_QT0_fpr(QFPREG(rd));
1781 gen_update_fprs_dirty(QFPREG(rd));
1784 #ifdef TARGET_SPARC64
1785 static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1786 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1791 src = gen_load_fpr_F(dc, rs);
1792 dst = gen_dest_fpr_D();
1794 gen(dst, cpu_env, src);
1796 gen_store_fpr_D(dc, rd, dst);
1800 static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1801 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1806 src = gen_load_fpr_F(dc, rs);
1807 dst = gen_dest_fpr_D();
1809 gen(dst, cpu_env, src);
1811 gen_store_fpr_D(dc, rd, dst);
1814 static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1815 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1820 src = gen_load_fpr_D(dc, rs);
1821 dst = gen_dest_fpr_F();
1823 gen(dst, cpu_env, src);
1825 gen_store_fpr_F(dc, rd, dst);
1828 static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1829 void (*gen)(TCGv_i32, TCGv_ptr))
1833 gen_op_load_fpr_QT1(QFPREG(rs));
1834 dst = gen_dest_fpr_F();
1838 gen_store_fpr_F(dc, rd, dst);
1841 static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1842 void (*gen)(TCGv_i64, TCGv_ptr))
1846 gen_op_load_fpr_QT1(QFPREG(rs));
1847 dst = gen_dest_fpr_D();
1851 gen_store_fpr_D(dc, rd, dst);
1854 static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1855 void (*gen)(TCGv_ptr, TCGv_i32))
1859 src = gen_load_fpr_F(dc, rs);
1863 gen_op_store_QT0_fpr(QFPREG(rd));
1864 gen_update_fprs_dirty(QFPREG(rd));
1867 static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1868 void (*gen)(TCGv_ptr, TCGv_i64))
1872 src = gen_load_fpr_D(dc, rs);
1876 gen_op_store_QT0_fpr(QFPREG(rd));
1877 gen_update_fprs_dirty(QFPREG(rd));
1881 #ifdef TARGET_SPARC64
1882 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1888 r_asi = tcg_temp_new_i32();
1889 tcg_gen_mov_i32(r_asi, cpu_asi);
1891 asi = GET_FIELD(insn, 19, 26);
1892 r_asi = tcg_const_i32(asi);
1897 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1900 TCGv_i32 r_asi, r_size, r_sign;
1902 r_asi = gen_get_asi(insn, addr);
1903 r_size = tcg_const_i32(size);
1904 r_sign = tcg_const_i32(sign);
1905 gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_size, r_sign);
1906 tcg_temp_free_i32(r_sign);
1907 tcg_temp_free_i32(r_size);
1908 tcg_temp_free_i32(r_asi);
1911 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1913 TCGv_i32 r_asi, r_size;
1915 r_asi = gen_get_asi(insn, addr);
1916 r_size = tcg_const_i32(size);
1917 gen_helper_st_asi(cpu_env, addr, src, r_asi, r_size);
1918 tcg_temp_free_i32(r_size);
1919 tcg_temp_free_i32(r_asi);
1922 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1924 TCGv_i32 r_asi, r_size, r_rd;
1926 r_asi = gen_get_asi(insn, addr);
1927 r_size = tcg_const_i32(size);
1928 r_rd = tcg_const_i32(rd);
1929 gen_helper_ldf_asi(cpu_env, addr, r_asi, r_size, r_rd);
1930 tcg_temp_free_i32(r_rd);
1931 tcg_temp_free_i32(r_size);
1932 tcg_temp_free_i32(r_asi);
1935 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1937 TCGv_i32 r_asi, r_size, r_rd;
1939 r_asi = gen_get_asi(insn, addr);
1940 r_size = tcg_const_i32(size);
1941 r_rd = tcg_const_i32(rd);
1942 gen_helper_stf_asi(cpu_env, addr, r_asi, r_size, r_rd);
1943 tcg_temp_free_i32(r_rd);
1944 tcg_temp_free_i32(r_size);
1945 tcg_temp_free_i32(r_asi);
1948 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1950 TCGv_i32 r_asi, r_size, r_sign;
1952 r_asi = gen_get_asi(insn, addr);
1953 r_size = tcg_const_i32(4);
1954 r_sign = tcg_const_i32(0);
1955 gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
1956 tcg_temp_free_i32(r_sign);
1957 gen_helper_st_asi(cpu_env, addr, dst, r_asi, r_size);
1958 tcg_temp_free_i32(r_size);
1959 tcg_temp_free_i32(r_asi);
1960 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1963 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1965 TCGv_i32 r_asi, r_rd;
1967 r_asi = gen_get_asi(insn, addr);
1968 r_rd = tcg_const_i32(rd);
1969 gen_helper_ldda_asi(cpu_env, addr, r_asi, r_rd);
1970 tcg_temp_free_i32(r_rd);
1971 tcg_temp_free_i32(r_asi);
1974 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1976 TCGv_i32 r_asi, r_size;
1978 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1979 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1980 r_asi = gen_get_asi(insn, addr);
1981 r_size = tcg_const_i32(8);
1982 gen_helper_st_asi(cpu_env, addr, cpu_tmp64, r_asi, r_size);
1983 tcg_temp_free_i32(r_size);
1984 tcg_temp_free_i32(r_asi);
1987 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1993 r_val1 = tcg_temp_new();
1994 gen_movl_reg_TN(rd, r_val1);
1995 r_asi = gen_get_asi(insn, addr);
1996 gen_helper_cas_asi(dst, cpu_env, addr, r_val1, val2, r_asi);
1997 tcg_temp_free_i32(r_asi);
1998 tcg_temp_free(r_val1);
2001 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
2006 gen_movl_reg_TN(rd, cpu_tmp64);
2007 r_asi = gen_get_asi(insn, addr);
2008 gen_helper_casx_asi(dst, cpu_env, addr, cpu_tmp64, val2, r_asi);
2009 tcg_temp_free_i32(r_asi);
2012 #elif !defined(CONFIG_USER_ONLY)
2014 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
2017 TCGv_i32 r_asi, r_size, r_sign;
2019 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2020 r_size = tcg_const_i32(size);
2021 r_sign = tcg_const_i32(sign);
2022 gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2023 tcg_temp_free(r_sign);
2024 tcg_temp_free(r_size);
2025 tcg_temp_free(r_asi);
2026 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2029 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2031 TCGv_i32 r_asi, r_size;
2033 tcg_gen_extu_tl_i64(cpu_tmp64, src);
2034 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2035 r_size = tcg_const_i32(size);
2036 gen_helper_st_asi(cpu_env, addr, cpu_tmp64, r_asi, r_size);
2037 tcg_temp_free(r_size);
2038 tcg_temp_free(r_asi);
2041 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
2043 TCGv_i32 r_asi, r_size, r_sign;
2046 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2047 r_size = tcg_const_i32(4);
2048 r_sign = tcg_const_i32(0);
2049 gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2050 tcg_temp_free(r_sign);
2051 r_val = tcg_temp_new_i64();
2052 tcg_gen_extu_tl_i64(r_val, dst);
2053 gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2054 tcg_temp_free_i64(r_val);
2055 tcg_temp_free(r_size);
2056 tcg_temp_free(r_asi);
2057 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2060 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
2062 TCGv_i32 r_asi, r_size, r_sign;
2064 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2065 r_size = tcg_const_i32(8);
2066 r_sign = tcg_const_i32(0);
2067 gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2068 tcg_temp_free(r_sign);
2069 tcg_temp_free(r_size);
2070 tcg_temp_free(r_asi);
2071 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
2072 gen_movl_TN_reg(rd + 1, cpu_tmp0);
2073 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
2074 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
2075 gen_movl_TN_reg(rd, hi);
2078 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
2080 TCGv_i32 r_asi, r_size;
2082 gen_movl_reg_TN(rd + 1, cpu_tmp0);
2083 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
2084 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2085 r_size = tcg_const_i32(8);
2086 gen_helper_st_asi(cpu_env, addr, cpu_tmp64, r_asi, r_size);
2087 tcg_temp_free(r_size);
2088 tcg_temp_free(r_asi);
2092 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2093 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
2096 TCGv_i32 r_asi, r_size;
2098 gen_ld_asi(dst, addr, insn, 1, 0);
2100 r_val = tcg_const_i64(0xffULL);
2101 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2102 r_size = tcg_const_i32(1);
2103 gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2104 tcg_temp_free_i32(r_size);
2105 tcg_temp_free_i32(r_asi);
2106 tcg_temp_free_i64(r_val);
2110 static inline TCGv get_src1(unsigned int insn, TCGv def)
2115 rs1 = GET_FIELD(insn, 13, 17);
2117 tcg_gen_movi_tl(def, 0);
2118 } else if (rs1 < 8) {
2119 r_rs1 = cpu_gregs[rs1];
2121 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
2126 static inline TCGv get_src2(unsigned int insn, TCGv def)
2130 if (IS_IMM) { /* immediate */
2131 target_long simm = GET_FIELDs(insn, 19, 31);
2132 tcg_gen_movi_tl(def, simm);
2133 } else { /* register */
2134 unsigned int rs2 = GET_FIELD(insn, 27, 31);
2136 tcg_gen_movi_tl(def, 0);
2137 } else if (rs2 < 8) {
2138 r_rs2 = cpu_gregs[rs2];
2140 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
2146 #ifdef TARGET_SPARC64
2147 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2149 TCGv_i32 c32, zero, dst, s1, s2;
2151 /* We have two choices here: extend the 32 bit data and use movcond_i64,
2152 or fold the comparison down to 32 bits and use movcond_i32. Choose
2154 c32 = tcg_temp_new_i32();
2156 tcg_gen_trunc_i64_i32(c32, cmp->c1);
2158 TCGv_i64 c64 = tcg_temp_new_i64();
2159 tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2160 tcg_gen_trunc_i64_i32(c32, c64);
2161 tcg_temp_free_i64(c64);
2164 s1 = gen_load_fpr_F(dc, rs);
2165 s2 = gen_load_fpr_F(dc, rd);
2166 dst = gen_dest_fpr_F();
2167 zero = tcg_const_i32(0);
2169 tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2171 tcg_temp_free_i32(c32);
2172 tcg_temp_free_i32(zero);
2173 gen_store_fpr_F(dc, rd, dst);
2176 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2178 TCGv_i64 dst = gen_dest_fpr_D();
2179 tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2180 gen_load_fpr_D(dc, rs),
2181 gen_load_fpr_D(dc, rd));
2182 gen_store_fpr_D(dc, rd, dst);
2185 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2187 int qd = QFPREG(rd);
2188 int qs = QFPREG(rs);
2190 tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2191 cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2192 tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2193 cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2195 gen_update_fprs_dirty(qd);
2198 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
2200 TCGv_i32 r_tl = tcg_temp_new_i32();
2202 /* load env->tl into r_tl */
2203 tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2205 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2206 tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2208 /* calculate offset to current trap state from env->ts, reuse r_tl */
2209 tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2210 tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2212 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2214 TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2215 tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2216 tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2217 tcg_temp_free_ptr(r_tl_tmp);
2220 tcg_temp_free_i32(r_tl);
2223 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2224 int width, bool cc, bool left)
2226 TCGv lo1, lo2, t1, t2;
2227 uint64_t amask, tabl, tabr;
2228 int shift, imask, omask;
2231 tcg_gen_mov_tl(cpu_cc_src, s1);
2232 tcg_gen_mov_tl(cpu_cc_src2, s2);
2233 tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2234 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2235 dc->cc_op = CC_OP_SUB;
2238 /* Theory of operation: there are two tables, left and right (not to
2239 be confused with the left and right versions of the opcode). These
2240 are indexed by the low 3 bits of the inputs. To make things "easy",
2241 these tables are loaded into two constants, TABL and TABR below.
2242 The operation index = (input & imask) << shift calculates the index
2243 into the constant, while val = (table >> index) & omask calculates
2244 the value we're looking for. */
2251 tabl = 0x80c0e0f0f8fcfeffULL;
2252 tabr = 0xff7f3f1f0f070301ULL;
2254 tabl = 0x0103070f1f3f7fffULL;
2255 tabr = 0xfffefcf8f0e0c080ULL;
2275 tabl = (2 << 2) | 3;
2276 tabr = (3 << 2) | 1;
2278 tabl = (1 << 2) | 3;
2279 tabr = (3 << 2) | 2;
2286 lo1 = tcg_temp_new();
2287 lo2 = tcg_temp_new();
2288 tcg_gen_andi_tl(lo1, s1, imask);
2289 tcg_gen_andi_tl(lo2, s2, imask);
2290 tcg_gen_shli_tl(lo1, lo1, shift);
2291 tcg_gen_shli_tl(lo2, lo2, shift);
2293 t1 = tcg_const_tl(tabl);
2294 t2 = tcg_const_tl(tabr);
2295 tcg_gen_shr_tl(lo1, t1, lo1);
2296 tcg_gen_shr_tl(lo2, t2, lo2);
2297 tcg_gen_andi_tl(dst, lo1, omask);
2298 tcg_gen_andi_tl(lo2, lo2, omask);
2302 amask &= 0xffffffffULL;
2304 tcg_gen_andi_tl(s1, s1, amask);
2305 tcg_gen_andi_tl(s2, s2, amask);
2307 /* We want to compute
2308 dst = (s1 == s2 ? lo1 : lo1 & lo2).
2309 We've already done dst = lo1, so this reduces to
2310 dst &= (s1 == s2 ? -1 : lo2)
2315 tcg_gen_setcond_tl(TCG_COND_EQ, t1, s1, s2);
2316 tcg_gen_neg_tl(t1, t1);
2317 tcg_gen_or_tl(lo2, lo2, t1);
2318 tcg_gen_and_tl(dst, dst, lo2);
2326 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2328 TCGv tmp = tcg_temp_new();
2330 tcg_gen_add_tl(tmp, s1, s2);
2331 tcg_gen_andi_tl(dst, tmp, -8);
2333 tcg_gen_neg_tl(tmp, tmp);
2335 tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2340 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2344 t1 = tcg_temp_new();
2345 t2 = tcg_temp_new();
2346 shift = tcg_temp_new();
2348 tcg_gen_andi_tl(shift, gsr, 7);
2349 tcg_gen_shli_tl(shift, shift, 3);
2350 tcg_gen_shl_tl(t1, s1, shift);
2352 /* A shift of 64 does not produce 0 in TCG. Divide this into a
2353 shift of (up to 63) followed by a constant shift of 1. */
2354 tcg_gen_xori_tl(shift, shift, 63);
2355 tcg_gen_shr_tl(t2, s2, shift);
2356 tcg_gen_shri_tl(t2, t2, 1);
2358 tcg_gen_or_tl(dst, t1, t2);
2362 tcg_temp_free(shift);
2366 #define CHECK_IU_FEATURE(dc, FEATURE) \
2367 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2369 #define CHECK_FPU_FEATURE(dc, FEATURE) \
2370 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2373 /* before an instruction, dc->pc must be static */
2374 static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
2376 unsigned int opc, rs1, rs2, rd;
2377 TCGv cpu_src1, cpu_src2, cpu_tmp1, cpu_tmp2;
2378 TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
2379 TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
2382 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP | CPU_LOG_TB_OP_OPT))) {
2383 tcg_gen_debug_insn_start(dc->pc);
2386 opc = GET_FIELD(insn, 0, 1);
2388 rd = GET_FIELD(insn, 2, 6);
2390 cpu_tmp1 = cpu_src1 = tcg_temp_new();
2391 cpu_tmp2 = cpu_src2 = tcg_temp_new();
2394 case 0: /* branches/sethi */
2396 unsigned int xop = GET_FIELD(insn, 7, 9);
2399 #ifdef TARGET_SPARC64
2400 case 0x1: /* V9 BPcc */
2404 target = GET_FIELD_SP(insn, 0, 18);
2405 target = sign_extend(target, 19);
2407 cc = GET_FIELD_SP(insn, 20, 21);
2409 do_branch(dc, target, insn, 0);
2411 do_branch(dc, target, insn, 1);
2416 case 0x3: /* V9 BPr */
2418 target = GET_FIELD_SP(insn, 0, 13) |
2419 (GET_FIELD_SP(insn, 20, 21) << 14);
2420 target = sign_extend(target, 16);
2422 cpu_src1 = get_src1(insn, cpu_src1);
2423 do_branch_reg(dc, target, insn, cpu_src1);
2426 case 0x5: /* V9 FBPcc */
2428 int cc = GET_FIELD_SP(insn, 20, 21);
2429 if (gen_trap_ifnofpu(dc)) {
2432 target = GET_FIELD_SP(insn, 0, 18);
2433 target = sign_extend(target, 19);
2435 do_fbranch(dc, target, insn, cc);
2439 case 0x7: /* CBN+x */
2444 case 0x2: /* BN+x */
2446 target = GET_FIELD(insn, 10, 31);
2447 target = sign_extend(target, 22);
2449 do_branch(dc, target, insn, 0);
2452 case 0x6: /* FBN+x */
2454 if (gen_trap_ifnofpu(dc)) {
2457 target = GET_FIELD(insn, 10, 31);
2458 target = sign_extend(target, 22);
2460 do_fbranch(dc, target, insn, 0);
2463 case 0x4: /* SETHI */
2465 uint32_t value = GET_FIELD(insn, 10, 31);
2468 r_const = tcg_const_tl(value << 10);
2469 gen_movl_TN_reg(rd, r_const);
2470 tcg_temp_free(r_const);
2473 case 0x0: /* UNIMPL */
2482 target_long target = GET_FIELDs(insn, 2, 31) << 2;
2485 r_const = tcg_const_tl(dc->pc);
2486 gen_movl_TN_reg(15, r_const);
2487 tcg_temp_free(r_const);
2490 #ifdef TARGET_SPARC64
2491 if (unlikely(AM_CHECK(dc))) {
2492 target &= 0xffffffffULL;
2498 case 2: /* FPU & Logical Operations */
2500 unsigned int xop = GET_FIELD(insn, 7, 12);
2501 if (xop == 0x3a) { /* generate trap */
2502 int cond = GET_FIELD(insn, 3, 6);
2514 /* Conditional trap. */
2516 #ifdef TARGET_SPARC64
2518 int cc = GET_FIELD_SP(insn, 11, 12);
2520 gen_compare(&cmp, 0, cond, dc);
2521 } else if (cc == 2) {
2522 gen_compare(&cmp, 1, cond, dc);
2527 gen_compare(&cmp, 0, cond, dc);
2529 l1 = gen_new_label();
2530 tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
2531 cmp.c1, cmp.c2, l1);
2535 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
2536 ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
2538 /* Don't use the normal temporaries, as they may well have
2539 gone out of scope with the branch above. While we're
2540 doing that we might as well pre-truncate to 32-bit. */
2541 trap = tcg_temp_new_i32();
2543 rs1 = GET_FIELD_SP(insn, 14, 18);
2545 rs2 = GET_FIELD_SP(insn, 0, 6);
2547 tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
2548 /* Signal that the trap value is fully constant. */
2551 TCGv t1 = tcg_temp_new();
2552 gen_movl_reg_TN(rs1, t1);
2553 tcg_gen_trunc_tl_i32(trap, t1);
2555 tcg_gen_addi_i32(trap, trap, rs2);
2558 TCGv t1 = tcg_temp_new();
2559 TCGv t2 = tcg_temp_new();
2560 rs2 = GET_FIELD_SP(insn, 0, 4);
2561 gen_movl_reg_TN(rs1, t1);
2562 gen_movl_reg_TN(rs2, t2);
2563 tcg_gen_add_tl(t1, t1, t2);
2564 tcg_gen_trunc_tl_i32(trap, t1);
2569 tcg_gen_andi_i32(trap, trap, mask);
2570 tcg_gen_addi_i32(trap, trap, TT_TRAP);
2573 gen_helper_raise_exception(cpu_env, trap);
2574 tcg_temp_free_i32(trap);
2577 /* An unconditional trap ends the TB. */
2581 /* A conditional trap falls through to the next insn. */
2585 } else if (xop == 0x28) {
2586 rs1 = GET_FIELD(insn, 13, 17);
2589 #ifndef TARGET_SPARC64
2590 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2591 manual, rdy on the microSPARC
2593 case 0x0f: /* stbar in the SPARCv8 manual,
2594 rdy on the microSPARC II */
2595 case 0x10 ... 0x1f: /* implementation-dependent in the
2596 SPARCv8 manual, rdy on the
2599 if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2602 /* Read Asr17 for a Leon3 monoprocessor */
2603 r_const = tcg_const_tl((1 << 8)
2604 | (dc->def->nwindows - 1));
2605 gen_movl_TN_reg(rd, r_const);
2606 tcg_temp_free(r_const);
2610 gen_movl_TN_reg(rd, cpu_y);
2612 #ifdef TARGET_SPARC64
2613 case 0x2: /* V9 rdccr */
2614 gen_helper_compute_psr(cpu_env);
2615 gen_helper_rdccr(cpu_dst, cpu_env);
2616 gen_movl_TN_reg(rd, cpu_dst);
2618 case 0x3: /* V9 rdasi */
2619 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2620 gen_movl_TN_reg(rd, cpu_dst);
2622 case 0x4: /* V9 rdtick */
2626 r_tickptr = tcg_temp_new_ptr();
2627 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2628 offsetof(CPUSPARCState, tick));
2629 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2630 tcg_temp_free_ptr(r_tickptr);
2631 gen_movl_TN_reg(rd, cpu_dst);
2634 case 0x5: /* V9 rdpc */
2638 if (unlikely(AM_CHECK(dc))) {
2639 r_const = tcg_const_tl(dc->pc & 0xffffffffULL);
2641 r_const = tcg_const_tl(dc->pc);
2643 gen_movl_TN_reg(rd, r_const);
2644 tcg_temp_free(r_const);
2647 case 0x6: /* V9 rdfprs */
2648 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2649 gen_movl_TN_reg(rd, cpu_dst);
2651 case 0xf: /* V9 membar */
2652 break; /* no effect */
2653 case 0x13: /* Graphics Status */
2654 if (gen_trap_ifnofpu(dc)) {
2657 gen_movl_TN_reg(rd, cpu_gsr);
2659 case 0x16: /* Softint */
2660 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2661 gen_movl_TN_reg(rd, cpu_dst);
2663 case 0x17: /* Tick compare */
2664 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2666 case 0x18: /* System tick */
2670 r_tickptr = tcg_temp_new_ptr();
2671 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2672 offsetof(CPUSPARCState, stick));
2673 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2674 tcg_temp_free_ptr(r_tickptr);
2675 gen_movl_TN_reg(rd, cpu_dst);
2678 case 0x19: /* System tick compare */
2679 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2681 case 0x10: /* Performance Control */
2682 case 0x11: /* Performance Instrumentation Counter */
2683 case 0x12: /* Dispatch Control */
2684 case 0x14: /* Softint set, WO */
2685 case 0x15: /* Softint clear, WO */
2690 #if !defined(CONFIG_USER_ONLY)
2691 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2692 #ifndef TARGET_SPARC64
2693 if (!supervisor(dc))
2695 gen_helper_compute_psr(cpu_env);
2696 dc->cc_op = CC_OP_FLAGS;
2697 gen_helper_rdpsr(cpu_dst, cpu_env);
2699 CHECK_IU_FEATURE(dc, HYPV);
2700 if (!hypervisor(dc))
2702 rs1 = GET_FIELD(insn, 13, 17);
2705 // gen_op_rdhpstate();
2708 // gen_op_rdhtstate();
2711 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2714 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2717 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2719 case 31: // hstick_cmpr
2720 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2726 gen_movl_TN_reg(rd, cpu_dst);
2728 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2729 if (!supervisor(dc))
2731 #ifdef TARGET_SPARC64
2732 rs1 = GET_FIELD(insn, 13, 17);
2738 r_tsptr = tcg_temp_new_ptr();
2739 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2740 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2741 offsetof(trap_state, tpc));
2742 tcg_temp_free_ptr(r_tsptr);
2749 r_tsptr = tcg_temp_new_ptr();
2750 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2751 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2752 offsetof(trap_state, tnpc));
2753 tcg_temp_free_ptr(r_tsptr);
2760 r_tsptr = tcg_temp_new_ptr();
2761 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2762 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2763 offsetof(trap_state, tstate));
2764 tcg_temp_free_ptr(r_tsptr);
2771 r_tsptr = tcg_temp_new_ptr();
2772 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2773 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2774 offsetof(trap_state, tt));
2775 tcg_temp_free_ptr(r_tsptr);
2776 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2783 r_tickptr = tcg_temp_new_ptr();
2784 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2785 offsetof(CPUSPARCState, tick));
2786 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2787 gen_movl_TN_reg(rd, cpu_tmp0);
2788 tcg_temp_free_ptr(r_tickptr);
2792 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2795 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2796 offsetof(CPUSPARCState, pstate));
2797 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2800 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2801 offsetof(CPUSPARCState, tl));
2802 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2805 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2806 offsetof(CPUSPARCState, psrpil));
2807 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2810 gen_helper_rdcwp(cpu_tmp0, cpu_env);
2813 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2814 offsetof(CPUSPARCState, cansave));
2815 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2817 case 11: // canrestore
2818 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2819 offsetof(CPUSPARCState, canrestore));
2820 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2822 case 12: // cleanwin
2823 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2824 offsetof(CPUSPARCState, cleanwin));
2825 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2827 case 13: // otherwin
2828 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2829 offsetof(CPUSPARCState, otherwin));
2830 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2833 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2834 offsetof(CPUSPARCState, wstate));
2835 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2837 case 16: // UA2005 gl
2838 CHECK_IU_FEATURE(dc, GL);
2839 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2840 offsetof(CPUSPARCState, gl));
2841 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2843 case 26: // UA2005 strand status
2844 CHECK_IU_FEATURE(dc, HYPV);
2845 if (!hypervisor(dc))
2847 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2850 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2857 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2859 gen_movl_TN_reg(rd, cpu_tmp0);
2861 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2862 #ifdef TARGET_SPARC64
2864 gen_helper_flushw(cpu_env);
2866 if (!supervisor(dc))
2868 gen_movl_TN_reg(rd, cpu_tbr);
2872 } else if (xop == 0x34) { /* FPU Operations */
2873 if (gen_trap_ifnofpu(dc)) {
2876 gen_op_clear_ieee_excp_and_FTT();
2877 rs1 = GET_FIELD(insn, 13, 17);
2878 rs2 = GET_FIELD(insn, 27, 31);
2879 xop = GET_FIELD(insn, 18, 26);
2882 case 0x1: /* fmovs */
2883 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2884 gen_store_fpr_F(dc, rd, cpu_src1_32);
2886 case 0x5: /* fnegs */
2887 gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
2889 case 0x9: /* fabss */
2890 gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
2892 case 0x29: /* fsqrts */
2893 CHECK_FPU_FEATURE(dc, FSQRT);
2894 gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
2896 case 0x2a: /* fsqrtd */
2897 CHECK_FPU_FEATURE(dc, FSQRT);
2898 gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
2900 case 0x2b: /* fsqrtq */
2901 CHECK_FPU_FEATURE(dc, FLOAT128);
2902 gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
2904 case 0x41: /* fadds */
2905 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
2907 case 0x42: /* faddd */
2908 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
2910 case 0x43: /* faddq */
2911 CHECK_FPU_FEATURE(dc, FLOAT128);
2912 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
2914 case 0x45: /* fsubs */
2915 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
2917 case 0x46: /* fsubd */
2918 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
2920 case 0x47: /* fsubq */
2921 CHECK_FPU_FEATURE(dc, FLOAT128);
2922 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
2924 case 0x49: /* fmuls */
2925 CHECK_FPU_FEATURE(dc, FMUL);
2926 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
2928 case 0x4a: /* fmuld */
2929 CHECK_FPU_FEATURE(dc, FMUL);
2930 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
2932 case 0x4b: /* fmulq */
2933 CHECK_FPU_FEATURE(dc, FLOAT128);
2934 CHECK_FPU_FEATURE(dc, FMUL);
2935 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
2937 case 0x4d: /* fdivs */
2938 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
2940 case 0x4e: /* fdivd */
2941 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
2943 case 0x4f: /* fdivq */
2944 CHECK_FPU_FEATURE(dc, FLOAT128);
2945 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
2947 case 0x69: /* fsmuld */
2948 CHECK_FPU_FEATURE(dc, FSMULD);
2949 gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
2951 case 0x6e: /* fdmulq */
2952 CHECK_FPU_FEATURE(dc, FLOAT128);
2953 gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
2955 case 0xc4: /* fitos */
2956 gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
2958 case 0xc6: /* fdtos */
2959 gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
2961 case 0xc7: /* fqtos */
2962 CHECK_FPU_FEATURE(dc, FLOAT128);
2963 gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
2965 case 0xc8: /* fitod */
2966 gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
2968 case 0xc9: /* fstod */
2969 gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
2971 case 0xcb: /* fqtod */
2972 CHECK_FPU_FEATURE(dc, FLOAT128);
2973 gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
2975 case 0xcc: /* fitoq */
2976 CHECK_FPU_FEATURE(dc, FLOAT128);
2977 gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
2979 case 0xcd: /* fstoq */
2980 CHECK_FPU_FEATURE(dc, FLOAT128);
2981 gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
2983 case 0xce: /* fdtoq */
2984 CHECK_FPU_FEATURE(dc, FLOAT128);
2985 gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
2987 case 0xd1: /* fstoi */
2988 gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
2990 case 0xd2: /* fdtoi */
2991 gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
2993 case 0xd3: /* fqtoi */
2994 CHECK_FPU_FEATURE(dc, FLOAT128);
2995 gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
2997 #ifdef TARGET_SPARC64
2998 case 0x2: /* V9 fmovd */
2999 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3000 gen_store_fpr_D(dc, rd, cpu_src1_64);
3002 case 0x3: /* V9 fmovq */
3003 CHECK_FPU_FEATURE(dc, FLOAT128);
3004 gen_move_Q(rd, rs2);
3006 case 0x6: /* V9 fnegd */
3007 gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3009 case 0x7: /* V9 fnegq */
3010 CHECK_FPU_FEATURE(dc, FLOAT128);
3011 gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3013 case 0xa: /* V9 fabsd */
3014 gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3016 case 0xb: /* V9 fabsq */
3017 CHECK_FPU_FEATURE(dc, FLOAT128);
3018 gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3020 case 0x81: /* V9 fstox */
3021 gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3023 case 0x82: /* V9 fdtox */
3024 gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3026 case 0x83: /* V9 fqtox */
3027 CHECK_FPU_FEATURE(dc, FLOAT128);
3028 gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3030 case 0x84: /* V9 fxtos */
3031 gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3033 case 0x88: /* V9 fxtod */
3034 gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3036 case 0x8c: /* V9 fxtoq */
3037 CHECK_FPU_FEATURE(dc, FLOAT128);
3038 gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3044 } else if (xop == 0x35) { /* FPU Operations */
3045 #ifdef TARGET_SPARC64
3048 if (gen_trap_ifnofpu(dc)) {
3051 gen_op_clear_ieee_excp_and_FTT();
3052 rs1 = GET_FIELD(insn, 13, 17);
3053 rs2 = GET_FIELD(insn, 27, 31);
3054 xop = GET_FIELD(insn, 18, 26);
3057 #ifdef TARGET_SPARC64
3061 cond = GET_FIELD_SP(insn, 14, 17); \
3062 cpu_src1 = get_src1(insn, cpu_src1); \
3063 gen_compare_reg(&cmp, cond, cpu_src1); \
3064 gen_fmov##sz(dc, &cmp, rd, rs2); \
3065 free_compare(&cmp); \
3068 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3071 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3074 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3075 CHECK_FPU_FEATURE(dc, FLOAT128);
3082 #ifdef TARGET_SPARC64
3083 #define FMOVCC(fcc, sz) \
3086 cond = GET_FIELD_SP(insn, 14, 17); \
3087 gen_fcompare(&cmp, fcc, cond); \
3088 gen_fmov##sz(dc, &cmp, rd, rs2); \
3089 free_compare(&cmp); \
3092 case 0x001: /* V9 fmovscc %fcc0 */
3095 case 0x002: /* V9 fmovdcc %fcc0 */
3098 case 0x003: /* V9 fmovqcc %fcc0 */
3099 CHECK_FPU_FEATURE(dc, FLOAT128);
3102 case 0x041: /* V9 fmovscc %fcc1 */
3105 case 0x042: /* V9 fmovdcc %fcc1 */
3108 case 0x043: /* V9 fmovqcc %fcc1 */
3109 CHECK_FPU_FEATURE(dc, FLOAT128);
3112 case 0x081: /* V9 fmovscc %fcc2 */
3115 case 0x082: /* V9 fmovdcc %fcc2 */
3118 case 0x083: /* V9 fmovqcc %fcc2 */
3119 CHECK_FPU_FEATURE(dc, FLOAT128);
3122 case 0x0c1: /* V9 fmovscc %fcc3 */
3125 case 0x0c2: /* V9 fmovdcc %fcc3 */
3128 case 0x0c3: /* V9 fmovqcc %fcc3 */
3129 CHECK_FPU_FEATURE(dc, FLOAT128);
3133 #define FMOVCC(xcc, sz) \
3136 cond = GET_FIELD_SP(insn, 14, 17); \
3137 gen_compare(&cmp, xcc, cond, dc); \
3138 gen_fmov##sz(dc, &cmp, rd, rs2); \
3139 free_compare(&cmp); \
3142 case 0x101: /* V9 fmovscc %icc */
3145 case 0x102: /* V9 fmovdcc %icc */
3148 case 0x103: /* V9 fmovqcc %icc */
3149 CHECK_FPU_FEATURE(dc, FLOAT128);
3152 case 0x181: /* V9 fmovscc %xcc */
3155 case 0x182: /* V9 fmovdcc %xcc */
3158 case 0x183: /* V9 fmovqcc %xcc */
3159 CHECK_FPU_FEATURE(dc, FLOAT128);
3164 case 0x51: /* fcmps, V9 %fcc */
3165 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3166 cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3167 gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3169 case 0x52: /* fcmpd, V9 %fcc */
3170 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3171 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3172 gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3174 case 0x53: /* fcmpq, V9 %fcc */
3175 CHECK_FPU_FEATURE(dc, FLOAT128);
3176 gen_op_load_fpr_QT0(QFPREG(rs1));
3177 gen_op_load_fpr_QT1(QFPREG(rs2));
3178 gen_op_fcmpq(rd & 3);
3180 case 0x55: /* fcmpes, V9 %fcc */
3181 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3182 cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3183 gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3185 case 0x56: /* fcmped, V9 %fcc */
3186 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3187 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3188 gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3190 case 0x57: /* fcmpeq, V9 %fcc */
3191 CHECK_FPU_FEATURE(dc, FLOAT128);
3192 gen_op_load_fpr_QT0(QFPREG(rs1));
3193 gen_op_load_fpr_QT1(QFPREG(rs2));
3194 gen_op_fcmpeq(rd & 3);
3199 } else if (xop == 0x2) {
3202 rs1 = GET_FIELD(insn, 13, 17);
3204 // or %g0, x, y -> mov T0, x; mov y, T0
3205 if (IS_IMM) { /* immediate */
3208 simm = GET_FIELDs(insn, 19, 31);
3209 r_const = tcg_const_tl(simm);
3210 gen_movl_TN_reg(rd, r_const);
3211 tcg_temp_free(r_const);
3212 } else { /* register */
3213 rs2 = GET_FIELD(insn, 27, 31);
3214 gen_movl_reg_TN(rs2, cpu_dst);
3215 gen_movl_TN_reg(rd, cpu_dst);
3218 cpu_src1 = get_src1(insn, cpu_src1);
3219 if (IS_IMM) { /* immediate */
3220 simm = GET_FIELDs(insn, 19, 31);
3221 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3222 gen_movl_TN_reg(rd, cpu_dst);
3223 } else { /* register */
3224 // or x, %g0, y -> mov T1, x; mov y, T1
3225 rs2 = GET_FIELD(insn, 27, 31);
3227 gen_movl_reg_TN(rs2, cpu_src2);
3228 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3229 gen_movl_TN_reg(rd, cpu_dst);
3231 gen_movl_TN_reg(rd, cpu_src1);
3234 #ifdef TARGET_SPARC64
3235 } else if (xop == 0x25) { /* sll, V9 sllx */
3236 cpu_src1 = get_src1(insn, cpu_src1);
3237 if (IS_IMM) { /* immediate */
3238 simm = GET_FIELDs(insn, 20, 31);
3239 if (insn & (1 << 12)) {
3240 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3242 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3244 } else { /* register */
3245 rs2 = GET_FIELD(insn, 27, 31);
3246 gen_movl_reg_TN(rs2, cpu_src2);
3247 if (insn & (1 << 12)) {
3248 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3250 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3252 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3254 gen_movl_TN_reg(rd, cpu_dst);
3255 } else if (xop == 0x26) { /* srl, V9 srlx */
3256 cpu_src1 = get_src1(insn, cpu_src1);
3257 if (IS_IMM) { /* immediate */
3258 simm = GET_FIELDs(insn, 20, 31);
3259 if (insn & (1 << 12)) {
3260 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3262 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3263 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3265 } else { /* register */
3266 rs2 = GET_FIELD(insn, 27, 31);
3267 gen_movl_reg_TN(rs2, cpu_src2);
3268 if (insn & (1 << 12)) {
3269 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3270 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3272 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3273 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3274 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3277 gen_movl_TN_reg(rd, cpu_dst);
3278 } else if (xop == 0x27) { /* sra, V9 srax */
3279 cpu_src1 = get_src1(insn, cpu_src1);
3280 if (IS_IMM) { /* immediate */
3281 simm = GET_FIELDs(insn, 20, 31);
3282 if (insn & (1 << 12)) {
3283 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3285 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3286 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3287 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3289 } else { /* register */
3290 rs2 = GET_FIELD(insn, 27, 31);
3291 gen_movl_reg_TN(rs2, cpu_src2);
3292 if (insn & (1 << 12)) {
3293 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3294 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3296 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3297 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3298 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3299 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3302 gen_movl_TN_reg(rd, cpu_dst);
3304 } else if (xop < 0x36) {
3306 cpu_src1 = get_src1(insn, cpu_src1);
3307 cpu_src2 = get_src2(insn, cpu_src2);
3308 switch (xop & ~0x10) {
3311 simm = GET_FIELDs(insn, 19, 31);
3313 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3314 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3315 dc->cc_op = CC_OP_ADD;
3317 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3321 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3322 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3323 dc->cc_op = CC_OP_ADD;
3325 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3331 simm = GET_FIELDs(insn, 19, 31);
3332 tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3334 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3337 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3338 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3339 dc->cc_op = CC_OP_LOGIC;
3344 simm = GET_FIELDs(insn, 19, 31);
3345 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3347 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3350 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3351 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3352 dc->cc_op = CC_OP_LOGIC;
3357 simm = GET_FIELDs(insn, 19, 31);
3358 tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3360 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3363 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3364 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3365 dc->cc_op = CC_OP_LOGIC;
3370 simm = GET_FIELDs(insn, 19, 31);
3372 gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
3374 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3378 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3379 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3380 dc->cc_op = CC_OP_SUB;
3382 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3386 case 0x5: /* andn */
3388 simm = GET_FIELDs(insn, 19, 31);
3389 tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3391 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3394 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3395 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3396 dc->cc_op = CC_OP_LOGIC;
3401 simm = GET_FIELDs(insn, 19, 31);
3402 tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3404 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3407 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3408 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3409 dc->cc_op = CC_OP_LOGIC;
3412 case 0x7: /* xorn */
3414 simm = GET_FIELDs(insn, 19, 31);
3415 tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3417 tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3418 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3421 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3422 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3423 dc->cc_op = CC_OP_LOGIC;
3426 case 0x8: /* addx, V9 addc */
3427 gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3430 #ifdef TARGET_SPARC64
3431 case 0x9: /* V9 mulx */
3433 simm = GET_FIELDs(insn, 19, 31);
3434 tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3436 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3440 case 0xa: /* umul */
3441 CHECK_IU_FEATURE(dc, MUL);
3442 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3444 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3445 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3446 dc->cc_op = CC_OP_LOGIC;
3449 case 0xb: /* smul */
3450 CHECK_IU_FEATURE(dc, MUL);
3451 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3453 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3454 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3455 dc->cc_op = CC_OP_LOGIC;
3458 case 0xc: /* subx, V9 subc */
3459 gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3462 #ifdef TARGET_SPARC64
3463 case 0xd: /* V9 udivx */
3464 gen_helper_udivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3467 case 0xe: /* udiv */
3468 CHECK_IU_FEATURE(dc, DIV);
3470 gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
3472 dc->cc_op = CC_OP_DIV;
3474 gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
3478 case 0xf: /* sdiv */
3479 CHECK_IU_FEATURE(dc, DIV);
3481 gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
3483 dc->cc_op = CC_OP_DIV;
3485 gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
3492 gen_movl_TN_reg(rd, cpu_dst);
3494 cpu_src1 = get_src1(insn, cpu_src1);
3495 cpu_src2 = get_src2(insn, cpu_src2);
3497 case 0x20: /* taddcc */
3498 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3499 gen_movl_TN_reg(rd, cpu_dst);
3500 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3501 dc->cc_op = CC_OP_TADD;
3503 case 0x21: /* tsubcc */
3504 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3505 gen_movl_TN_reg(rd, cpu_dst);
3506 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3507 dc->cc_op = CC_OP_TSUB;
3509 case 0x22: /* taddcctv */
3510 gen_helper_taddcctv(cpu_dst, cpu_env,
3511 cpu_src1, cpu_src2);
3512 gen_movl_TN_reg(rd, cpu_dst);
3513 dc->cc_op = CC_OP_TADDTV;
3515 case 0x23: /* tsubcctv */
3516 gen_helper_tsubcctv(cpu_dst, cpu_env,
3517 cpu_src1, cpu_src2);
3518 gen_movl_TN_reg(rd, cpu_dst);
3519 dc->cc_op = CC_OP_TSUBTV;
3521 case 0x24: /* mulscc */
3522 gen_helper_compute_psr(cpu_env);
3523 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3524 gen_movl_TN_reg(rd, cpu_dst);
3525 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3526 dc->cc_op = CC_OP_ADD;
3528 #ifndef TARGET_SPARC64
3529 case 0x25: /* sll */
3530 if (IS_IMM) { /* immediate */
3531 simm = GET_FIELDs(insn, 20, 31);
3532 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3533 } else { /* register */
3534 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3535 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3537 gen_movl_TN_reg(rd, cpu_dst);
3539 case 0x26: /* srl */
3540 if (IS_IMM) { /* immediate */
3541 simm = GET_FIELDs(insn, 20, 31);
3542 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3543 } else { /* register */
3544 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3545 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3547 gen_movl_TN_reg(rd, cpu_dst);
3549 case 0x27: /* sra */
3550 if (IS_IMM) { /* immediate */
3551 simm = GET_FIELDs(insn, 20, 31);
3552 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3553 } else { /* register */
3554 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3555 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3557 gen_movl_TN_reg(rd, cpu_dst);
3564 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3565 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3567 #ifndef TARGET_SPARC64
3568 case 0x01 ... 0x0f: /* undefined in the
3572 case 0x10 ... 0x1f: /* implementation-dependent
3578 case 0x2: /* V9 wrccr */
3579 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3580 gen_helper_wrccr(cpu_env, cpu_dst);
3581 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3582 dc->cc_op = CC_OP_FLAGS;
3584 case 0x3: /* V9 wrasi */
3585 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3586 tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3587 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3589 case 0x6: /* V9 wrfprs */
3590 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3591 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3597 case 0xf: /* V9 sir, nop if user */
3598 #if !defined(CONFIG_USER_ONLY)
3599 if (supervisor(dc)) {
3604 case 0x13: /* Graphics Status */
3605 if (gen_trap_ifnofpu(dc)) {
3608 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3610 case 0x14: /* Softint set */
3611 if (!supervisor(dc))
3613 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3614 gen_helper_set_softint(cpu_env, cpu_tmp64);
3616 case 0x15: /* Softint clear */
3617 if (!supervisor(dc))
3619 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3620 gen_helper_clear_softint(cpu_env, cpu_tmp64);
3622 case 0x16: /* Softint write */
3623 if (!supervisor(dc))
3625 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3626 gen_helper_write_softint(cpu_env, cpu_tmp64);
3628 case 0x17: /* Tick compare */
3629 #if !defined(CONFIG_USER_ONLY)
3630 if (!supervisor(dc))
3636 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3638 r_tickptr = tcg_temp_new_ptr();
3639 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3640 offsetof(CPUSPARCState, tick));
3641 gen_helper_tick_set_limit(r_tickptr,
3643 tcg_temp_free_ptr(r_tickptr);
3646 case 0x18: /* System tick */
3647 #if !defined(CONFIG_USER_ONLY)
3648 if (!supervisor(dc))
3654 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3656 r_tickptr = tcg_temp_new_ptr();
3657 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3658 offsetof(CPUSPARCState, stick));
3659 gen_helper_tick_set_count(r_tickptr,
3661 tcg_temp_free_ptr(r_tickptr);
3664 case 0x19: /* System tick compare */
3665 #if !defined(CONFIG_USER_ONLY)
3666 if (!supervisor(dc))
3672 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3674 r_tickptr = tcg_temp_new_ptr();
3675 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3676 offsetof(CPUSPARCState, stick));
3677 gen_helper_tick_set_limit(r_tickptr,
3679 tcg_temp_free_ptr(r_tickptr);
3683 case 0x10: /* Performance Control */
3684 case 0x11: /* Performance Instrumentation
3686 case 0x12: /* Dispatch Control */
3693 #if !defined(CONFIG_USER_ONLY)
3694 case 0x31: /* wrpsr, V9 saved, restored */
3696 if (!supervisor(dc))
3698 #ifdef TARGET_SPARC64
3701 gen_helper_saved(cpu_env);
3704 gen_helper_restored(cpu_env);
3706 case 2: /* UA2005 allclean */
3707 case 3: /* UA2005 otherw */
3708 case 4: /* UA2005 normalw */
3709 case 5: /* UA2005 invalw */
3715 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3716 gen_helper_wrpsr(cpu_env, cpu_dst);
3717 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3718 dc->cc_op = CC_OP_FLAGS;
3726 case 0x32: /* wrwim, V9 wrpr */
3728 if (!supervisor(dc))
3730 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3731 #ifdef TARGET_SPARC64
3737 r_tsptr = tcg_temp_new_ptr();
3738 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3739 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3740 offsetof(trap_state, tpc));
3741 tcg_temp_free_ptr(r_tsptr);
3748 r_tsptr = tcg_temp_new_ptr();
3749 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3750 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3751 offsetof(trap_state, tnpc));
3752 tcg_temp_free_ptr(r_tsptr);
3759 r_tsptr = tcg_temp_new_ptr();
3760 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3761 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3762 offsetof(trap_state,
3764 tcg_temp_free_ptr(r_tsptr);
3771 r_tsptr = tcg_temp_new_ptr();
3772 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3773 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3774 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3775 offsetof(trap_state, tt));
3776 tcg_temp_free_ptr(r_tsptr);
3783 r_tickptr = tcg_temp_new_ptr();
3784 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3785 offsetof(CPUSPARCState, tick));
3786 gen_helper_tick_set_count(r_tickptr,
3788 tcg_temp_free_ptr(r_tickptr);
3792 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3796 TCGv r_tmp = tcg_temp_local_new();
3798 tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3800 gen_helper_wrpstate(cpu_env, r_tmp);
3801 tcg_temp_free(r_tmp);
3802 dc->npc = DYNAMIC_PC;
3807 TCGv r_tmp = tcg_temp_local_new();
3809 tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3811 tcg_gen_trunc_tl_i32(cpu_tmp32, r_tmp);
3812 tcg_temp_free(r_tmp);
3813 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3814 offsetof(CPUSPARCState, tl));
3815 dc->npc = DYNAMIC_PC;
3819 gen_helper_wrpil(cpu_env, cpu_tmp0);
3822 gen_helper_wrcwp(cpu_env, cpu_tmp0);
3825 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3826 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3827 offsetof(CPUSPARCState,
3830 case 11: // canrestore
3831 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3832 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3833 offsetof(CPUSPARCState,
3836 case 12: // cleanwin
3837 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3838 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3839 offsetof(CPUSPARCState,
3842 case 13: // otherwin
3843 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3844 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3845 offsetof(CPUSPARCState,
3849 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3850 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3851 offsetof(CPUSPARCState,
3854 case 16: // UA2005 gl
3855 CHECK_IU_FEATURE(dc, GL);
3856 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3857 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3858 offsetof(CPUSPARCState, gl));
3860 case 26: // UA2005 strand status
3861 CHECK_IU_FEATURE(dc, HYPV);
3862 if (!hypervisor(dc))
3864 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3870 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3871 if (dc->def->nwindows != 32)
3872 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3873 (1 << dc->def->nwindows) - 1);
3874 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3878 case 0x33: /* wrtbr, UA2005 wrhpr */
3880 #ifndef TARGET_SPARC64
3881 if (!supervisor(dc))
3883 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3885 CHECK_IU_FEATURE(dc, HYPV);
3886 if (!hypervisor(dc))
3888 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3891 // XXX gen_op_wrhpstate();
3898 // XXX gen_op_wrhtstate();
3901 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3904 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3906 case 31: // hstick_cmpr
3910 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3911 r_tickptr = tcg_temp_new_ptr();
3912 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3913 offsetof(CPUSPARCState, hstick));
3914 gen_helper_tick_set_limit(r_tickptr,
3916 tcg_temp_free_ptr(r_tickptr);
3919 case 6: // hver readonly
3927 #ifdef TARGET_SPARC64
3928 case 0x2c: /* V9 movcc */
3930 int cc = GET_FIELD_SP(insn, 11, 12);
3931 int cond = GET_FIELD_SP(insn, 14, 17);
3934 if (insn & (1 << 18)) {
3936 gen_compare(&cmp, 0, cond, dc);
3937 } else if (cc == 2) {
3938 gen_compare(&cmp, 1, cond, dc);
3943 gen_fcompare(&cmp, cc, cond);
3946 /* The get_src2 above loaded the normal 13-bit
3947 immediate field, not the 11-bit field we have
3948 in movcc. But it did handle the reg case. */
3950 simm = GET_FIELD_SPs(insn, 0, 10);
3951 tcg_gen_movi_tl(cpu_src2, simm);
3954 gen_movl_reg_TN(rd, cpu_dst);
3955 tcg_gen_movcond_tl(cmp.cond, cpu_dst,
3959 gen_movl_TN_reg(rd, cpu_dst);
3962 case 0x2d: /* V9 sdivx */
3963 gen_helper_sdivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3964 gen_movl_TN_reg(rd, cpu_dst);
3966 case 0x2e: /* V9 popc */
3968 cpu_src2 = get_src2(insn, cpu_src2);
3969 gen_helper_popc(cpu_dst, cpu_src2);
3970 gen_movl_TN_reg(rd, cpu_dst);
3972 case 0x2f: /* V9 movr */
3974 int cond = GET_FIELD_SP(insn, 10, 12);
3977 gen_compare_reg(&cmp, cond, cpu_src1);
3979 /* The get_src2 above loaded the normal 13-bit
3980 immediate field, not the 10-bit field we have
3981 in movr. But it did handle the reg case. */
3983 simm = GET_FIELD_SPs(insn, 0, 9);
3984 tcg_gen_movi_tl(cpu_src2, simm);
3987 gen_movl_reg_TN(rd, cpu_dst);
3988 tcg_gen_movcond_tl(cmp.cond, cpu_dst,
3992 gen_movl_TN_reg(rd, cpu_dst);
4000 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4001 #ifdef TARGET_SPARC64
4002 int opf = GET_FIELD_SP(insn, 5, 13);
4003 rs1 = GET_FIELD(insn, 13, 17);
4004 rs2 = GET_FIELD(insn, 27, 31);
4005 if (gen_trap_ifnofpu(dc)) {
4010 case 0x000: /* VIS I edge8cc */
4011 CHECK_FPU_FEATURE(dc, VIS1);
4012 gen_movl_reg_TN(rs1, cpu_src1);
4013 gen_movl_reg_TN(rs2, cpu_src2);
4014 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4015 gen_movl_TN_reg(rd, cpu_dst);
4017 case 0x001: /* VIS II edge8n */
4018 CHECK_FPU_FEATURE(dc, VIS2);
4019 gen_movl_reg_TN(rs1, cpu_src1);
4020 gen_movl_reg_TN(rs2, cpu_src2);
4021 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4022 gen_movl_TN_reg(rd, cpu_dst);
4024 case 0x002: /* VIS I edge8lcc */
4025 CHECK_FPU_FEATURE(dc, VIS1);
4026 gen_movl_reg_TN(rs1, cpu_src1);
4027 gen_movl_reg_TN(rs2, cpu_src2);
4028 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4029 gen_movl_TN_reg(rd, cpu_dst);
4031 case 0x003: /* VIS II edge8ln */
4032 CHECK_FPU_FEATURE(dc, VIS2);
4033 gen_movl_reg_TN(rs1, cpu_src1);
4034 gen_movl_reg_TN(rs2, cpu_src2);
4035 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4036 gen_movl_TN_reg(rd, cpu_dst);
4038 case 0x004: /* VIS I edge16cc */
4039 CHECK_FPU_FEATURE(dc, VIS1);
4040 gen_movl_reg_TN(rs1, cpu_src1);
4041 gen_movl_reg_TN(rs2, cpu_src2);
4042 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4043 gen_movl_TN_reg(rd, cpu_dst);
4045 case 0x005: /* VIS II edge16n */
4046 CHECK_FPU_FEATURE(dc, VIS2);
4047 gen_movl_reg_TN(rs1, cpu_src1);
4048 gen_movl_reg_TN(rs2, cpu_src2);
4049 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4050 gen_movl_TN_reg(rd, cpu_dst);
4052 case 0x006: /* VIS I edge16lcc */
4053 CHECK_FPU_FEATURE(dc, VIS1);
4054 gen_movl_reg_TN(rs1, cpu_src1);
4055 gen_movl_reg_TN(rs2, cpu_src2);
4056 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4057 gen_movl_TN_reg(rd, cpu_dst);
4059 case 0x007: /* VIS II edge16ln */
4060 CHECK_FPU_FEATURE(dc, VIS2);
4061 gen_movl_reg_TN(rs1, cpu_src1);
4062 gen_movl_reg_TN(rs2, cpu_src2);
4063 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4064 gen_movl_TN_reg(rd, cpu_dst);
4066 case 0x008: /* VIS I edge32cc */
4067 CHECK_FPU_FEATURE(dc, VIS1);
4068 gen_movl_reg_TN(rs1, cpu_src1);
4069 gen_movl_reg_TN(rs2, cpu_src2);
4070 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4071 gen_movl_TN_reg(rd, cpu_dst);
4073 case 0x009: /* VIS II edge32n */
4074 CHECK_FPU_FEATURE(dc, VIS2);
4075 gen_movl_reg_TN(rs1, cpu_src1);
4076 gen_movl_reg_TN(rs2, cpu_src2);
4077 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4078 gen_movl_TN_reg(rd, cpu_dst);
4080 case 0x00a: /* VIS I edge32lcc */
4081 CHECK_FPU_FEATURE(dc, VIS1);
4082 gen_movl_reg_TN(rs1, cpu_src1);
4083 gen_movl_reg_TN(rs2, cpu_src2);
4084 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4085 gen_movl_TN_reg(rd, cpu_dst);
4087 case 0x00b: /* VIS II edge32ln */
4088 CHECK_FPU_FEATURE(dc, VIS2);
4089 gen_movl_reg_TN(rs1, cpu_src1);
4090 gen_movl_reg_TN(rs2, cpu_src2);
4091 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4092 gen_movl_TN_reg(rd, cpu_dst);
4094 case 0x010: /* VIS I array8 */
4095 CHECK_FPU_FEATURE(dc, VIS1);
4096 cpu_src1 = get_src1(insn, cpu_src1);
4097 gen_movl_reg_TN(rs2, cpu_src2);
4098 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4099 gen_movl_TN_reg(rd, cpu_dst);
4101 case 0x012: /* VIS I array16 */
4102 CHECK_FPU_FEATURE(dc, VIS1);
4103 cpu_src1 = get_src1(insn, cpu_src1);
4104 gen_movl_reg_TN(rs2, cpu_src2);
4105 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4106 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4107 gen_movl_TN_reg(rd, cpu_dst);
4109 case 0x014: /* VIS I array32 */
4110 CHECK_FPU_FEATURE(dc, VIS1);
4111 cpu_src1 = get_src1(insn, cpu_src1);
4112 gen_movl_reg_TN(rs2, cpu_src2);
4113 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4114 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4115 gen_movl_TN_reg(rd, cpu_dst);
4117 case 0x018: /* VIS I alignaddr */
4118 CHECK_FPU_FEATURE(dc, VIS1);
4119 cpu_src1 = get_src1(insn, cpu_src1);
4120 gen_movl_reg_TN(rs2, cpu_src2);
4121 gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4122 gen_movl_TN_reg(rd, cpu_dst);
4124 case 0x01a: /* VIS I alignaddrl */
4125 CHECK_FPU_FEATURE(dc, VIS1);
4126 cpu_src1 = get_src1(insn, cpu_src1);
4127 gen_movl_reg_TN(rs2, cpu_src2);
4128 gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4129 gen_movl_TN_reg(rd, cpu_dst);
4131 case 0x019: /* VIS II bmask */
4132 CHECK_FPU_FEATURE(dc, VIS2);
4133 cpu_src1 = get_src1(insn, cpu_src1);
4134 cpu_src2 = get_src1(insn, cpu_src2);
4135 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4136 tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4137 gen_movl_TN_reg(rd, cpu_dst);
4139 case 0x020: /* VIS I fcmple16 */
4140 CHECK_FPU_FEATURE(dc, VIS1);
4141 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4142 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4143 gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4144 gen_movl_TN_reg(rd, cpu_dst);
4146 case 0x022: /* VIS I fcmpne16 */
4147 CHECK_FPU_FEATURE(dc, VIS1);
4148 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4149 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4150 gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4151 gen_movl_TN_reg(rd, cpu_dst);
4153 case 0x024: /* VIS I fcmple32 */
4154 CHECK_FPU_FEATURE(dc, VIS1);
4155 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4156 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4157 gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4158 gen_movl_TN_reg(rd, cpu_dst);
4160 case 0x026: /* VIS I fcmpne32 */
4161 CHECK_FPU_FEATURE(dc, VIS1);
4162 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4163 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4164 gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4165 gen_movl_TN_reg(rd, cpu_dst);
4167 case 0x028: /* VIS I fcmpgt16 */
4168 CHECK_FPU_FEATURE(dc, VIS1);
4169 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4170 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4171 gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4172 gen_movl_TN_reg(rd, cpu_dst);
4174 case 0x02a: /* VIS I fcmpeq16 */
4175 CHECK_FPU_FEATURE(dc, VIS1);
4176 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4177 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4178 gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4179 gen_movl_TN_reg(rd, cpu_dst);
4181 case 0x02c: /* VIS I fcmpgt32 */
4182 CHECK_FPU_FEATURE(dc, VIS1);
4183 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4184 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4185 gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4186 gen_movl_TN_reg(rd, cpu_dst);
4188 case 0x02e: /* VIS I fcmpeq32 */
4189 CHECK_FPU_FEATURE(dc, VIS1);
4190 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4191 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4192 gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4193 gen_movl_TN_reg(rd, cpu_dst);
4195 case 0x031: /* VIS I fmul8x16 */
4196 CHECK_FPU_FEATURE(dc, VIS1);
4197 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4199 case 0x033: /* VIS I fmul8x16au */
4200 CHECK_FPU_FEATURE(dc, VIS1);
4201 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4203 case 0x035: /* VIS I fmul8x16al */
4204 CHECK_FPU_FEATURE(dc, VIS1);
4205 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4207 case 0x036: /* VIS I fmul8sux16 */
4208 CHECK_FPU_FEATURE(dc, VIS1);
4209 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4211 case 0x037: /* VIS I fmul8ulx16 */
4212 CHECK_FPU_FEATURE(dc, VIS1);
4213 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4215 case 0x038: /* VIS I fmuld8sux16 */
4216 CHECK_FPU_FEATURE(dc, VIS1);
4217 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4219 case 0x039: /* VIS I fmuld8ulx16 */
4220 CHECK_FPU_FEATURE(dc, VIS1);
4221 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4223 case 0x03a: /* VIS I fpack32 */
4224 CHECK_FPU_FEATURE(dc, VIS1);
4225 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4227 case 0x03b: /* VIS I fpack16 */
4228 CHECK_FPU_FEATURE(dc, VIS1);
4229 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4230 cpu_dst_32 = gen_dest_fpr_F();
4231 gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4232 gen_store_fpr_F(dc, rd, cpu_dst_32);
4234 case 0x03d: /* VIS I fpackfix */
4235 CHECK_FPU_FEATURE(dc, VIS1);
4236 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4237 cpu_dst_32 = gen_dest_fpr_F();
4238 gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4239 gen_store_fpr_F(dc, rd, cpu_dst_32);
4241 case 0x03e: /* VIS I pdist */
4242 CHECK_FPU_FEATURE(dc, VIS1);
4243 gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4245 case 0x048: /* VIS I faligndata */
4246 CHECK_FPU_FEATURE(dc, VIS1);
4247 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4249 case 0x04b: /* VIS I fpmerge */
4250 CHECK_FPU_FEATURE(dc, VIS1);
4251 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4253 case 0x04c: /* VIS II bshuffle */
4254 CHECK_FPU_FEATURE(dc, VIS2);
4255 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4257 case 0x04d: /* VIS I fexpand */
4258 CHECK_FPU_FEATURE(dc, VIS1);
4259 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4261 case 0x050: /* VIS I fpadd16 */
4262 CHECK_FPU_FEATURE(dc, VIS1);
4263 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4265 case 0x051: /* VIS I fpadd16s */
4266 CHECK_FPU_FEATURE(dc, VIS1);
4267 gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4269 case 0x052: /* VIS I fpadd32 */
4270 CHECK_FPU_FEATURE(dc, VIS1);
4271 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4273 case 0x053: /* VIS I fpadd32s */
4274 CHECK_FPU_FEATURE(dc, VIS1);
4275 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4277 case 0x054: /* VIS I fpsub16 */
4278 CHECK_FPU_FEATURE(dc, VIS1);
4279 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4281 case 0x055: /* VIS I fpsub16s */
4282 CHECK_FPU_FEATURE(dc, VIS1);
4283 gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4285 case 0x056: /* VIS I fpsub32 */
4286 CHECK_FPU_FEATURE(dc, VIS1);
4287 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4289 case 0x057: /* VIS I fpsub32s */
4290 CHECK_FPU_FEATURE(dc, VIS1);
4291 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4293 case 0x060: /* VIS I fzero */
4294 CHECK_FPU_FEATURE(dc, VIS1);
4295 cpu_dst_64 = gen_dest_fpr_D();
4296 tcg_gen_movi_i64(cpu_dst_64, 0);
4297 gen_store_fpr_D(dc, rd, cpu_dst_64);
4299 case 0x061: /* VIS I fzeros */
4300 CHECK_FPU_FEATURE(dc, VIS1);
4301 cpu_dst_32 = gen_dest_fpr_F();
4302 tcg_gen_movi_i32(cpu_dst_32, 0);
4303 gen_store_fpr_F(dc, rd, cpu_dst_32);
4305 case 0x062: /* VIS I fnor */
4306 CHECK_FPU_FEATURE(dc, VIS1);
4307 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4309 case 0x063: /* VIS I fnors */
4310 CHECK_FPU_FEATURE(dc, VIS1);
4311 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4313 case 0x064: /* VIS I fandnot2 */
4314 CHECK_FPU_FEATURE(dc, VIS1);
4315 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4317 case 0x065: /* VIS I fandnot2s */
4318 CHECK_FPU_FEATURE(dc, VIS1);
4319 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4321 case 0x066: /* VIS I fnot2 */
4322 CHECK_FPU_FEATURE(dc, VIS1);
4323 gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4325 case 0x067: /* VIS I fnot2s */
4326 CHECK_FPU_FEATURE(dc, VIS1);
4327 gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4329 case 0x068: /* VIS I fandnot1 */
4330 CHECK_FPU_FEATURE(dc, VIS1);
4331 gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4333 case 0x069: /* VIS I fandnot1s */
4334 CHECK_FPU_FEATURE(dc, VIS1);
4335 gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4337 case 0x06a: /* VIS I fnot1 */
4338 CHECK_FPU_FEATURE(dc, VIS1);
4339 gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4341 case 0x06b: /* VIS I fnot1s */
4342 CHECK_FPU_FEATURE(dc, VIS1);
4343 gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4345 case 0x06c: /* VIS I fxor */
4346 CHECK_FPU_FEATURE(dc, VIS1);
4347 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4349 case 0x06d: /* VIS I fxors */
4350 CHECK_FPU_FEATURE(dc, VIS1);
4351 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4353 case 0x06e: /* VIS I fnand */
4354 CHECK_FPU_FEATURE(dc, VIS1);
4355 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4357 case 0x06f: /* VIS I fnands */
4358 CHECK_FPU_FEATURE(dc, VIS1);
4359 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4361 case 0x070: /* VIS I fand */
4362 CHECK_FPU_FEATURE(dc, VIS1);
4363 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4365 case 0x071: /* VIS I fands */
4366 CHECK_FPU_FEATURE(dc, VIS1);
4367 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4369 case 0x072: /* VIS I fxnor */
4370 CHECK_FPU_FEATURE(dc, VIS1);
4371 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4373 case 0x073: /* VIS I fxnors */
4374 CHECK_FPU_FEATURE(dc, VIS1);
4375 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4377 case 0x074: /* VIS I fsrc1 */
4378 CHECK_FPU_FEATURE(dc, VIS1);
4379 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4380 gen_store_fpr_D(dc, rd, cpu_src1_64);
4382 case 0x075: /* VIS I fsrc1s */
4383 CHECK_FPU_FEATURE(dc, VIS1);
4384 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4385 gen_store_fpr_F(dc, rd, cpu_src1_32);
4387 case 0x076: /* VIS I fornot2 */
4388 CHECK_FPU_FEATURE(dc, VIS1);
4389 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4391 case 0x077: /* VIS I fornot2s */
4392 CHECK_FPU_FEATURE(dc, VIS1);
4393 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4395 case 0x078: /* VIS I fsrc2 */
4396 CHECK_FPU_FEATURE(dc, VIS1);
4397 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4398 gen_store_fpr_D(dc, rd, cpu_src1_64);
4400 case 0x079: /* VIS I fsrc2s */
4401 CHECK_FPU_FEATURE(dc, VIS1);
4402 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4403 gen_store_fpr_F(dc, rd, cpu_src1_32);
4405 case 0x07a: /* VIS I fornot1 */
4406 CHECK_FPU_FEATURE(dc, VIS1);
4407 gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4409 case 0x07b: /* VIS I fornot1s */
4410 CHECK_FPU_FEATURE(dc, VIS1);
4411 gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
4413 case 0x07c: /* VIS I for */
4414 CHECK_FPU_FEATURE(dc, VIS1);
4415 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
4417 case 0x07d: /* VIS I fors */
4418 CHECK_FPU_FEATURE(dc, VIS1);
4419 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
4421 case 0x07e: /* VIS I fone */
4422 CHECK_FPU_FEATURE(dc, VIS1);
4423 cpu_dst_64 = gen_dest_fpr_D();
4424 tcg_gen_movi_i64(cpu_dst_64, -1);
4425 gen_store_fpr_D(dc, rd, cpu_dst_64);
4427 case 0x07f: /* VIS I fones */
4428 CHECK_FPU_FEATURE(dc, VIS1);
4429 cpu_dst_32 = gen_dest_fpr_F();
4430 tcg_gen_movi_i32(cpu_dst_32, -1);
4431 gen_store_fpr_F(dc, rd, cpu_dst_32);
4433 case 0x080: /* VIS I shutdown */
4434 case 0x081: /* VIS II siam */
4443 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4444 #ifdef TARGET_SPARC64
4449 #ifdef TARGET_SPARC64
4450 } else if (xop == 0x39) { /* V9 return */
4454 cpu_src1 = get_src1(insn, cpu_src1);
4455 if (IS_IMM) { /* immediate */
4456 simm = GET_FIELDs(insn, 19, 31);
4457 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4458 } else { /* register */
4459 rs2 = GET_FIELD(insn, 27, 31);
4461 gen_movl_reg_TN(rs2, cpu_src2);
4462 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4464 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4466 gen_helper_restore(cpu_env);
4468 r_const = tcg_const_i32(3);
4469 gen_helper_check_align(cpu_env, cpu_dst, r_const);
4470 tcg_temp_free_i32(r_const);
4471 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4472 dc->npc = DYNAMIC_PC;
4476 cpu_src1 = get_src1(insn, cpu_src1);
4477 if (IS_IMM) { /* immediate */
4478 simm = GET_FIELDs(insn, 19, 31);
4479 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4480 } else { /* register */
4481 rs2 = GET_FIELD(insn, 27, 31);
4483 gen_movl_reg_TN(rs2, cpu_src2);
4484 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4486 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4489 case 0x38: /* jmpl */
4494 r_pc = tcg_const_tl(dc->pc);
4495 gen_movl_TN_reg(rd, r_pc);
4496 tcg_temp_free(r_pc);
4498 r_const = tcg_const_i32(3);
4499 gen_helper_check_align(cpu_env, cpu_dst, r_const);
4500 tcg_temp_free_i32(r_const);
4501 gen_address_mask(dc, cpu_dst);
4502 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4503 dc->npc = DYNAMIC_PC;
4506 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4507 case 0x39: /* rett, V9 return */
4511 if (!supervisor(dc))
4514 r_const = tcg_const_i32(3);
4515 gen_helper_check_align(cpu_env, cpu_dst, r_const);
4516 tcg_temp_free_i32(r_const);
4517 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4518 dc->npc = DYNAMIC_PC;
4519 gen_helper_rett(cpu_env);
4523 case 0x3b: /* flush */
4524 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4528 case 0x3c: /* save */
4530 gen_helper_save(cpu_env);
4531 gen_movl_TN_reg(rd, cpu_dst);
4533 case 0x3d: /* restore */
4535 gen_helper_restore(cpu_env);
4536 gen_movl_TN_reg(rd, cpu_dst);
4538 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4539 case 0x3e: /* V9 done/retry */
4543 if (!supervisor(dc))
4545 dc->npc = DYNAMIC_PC;
4546 dc->pc = DYNAMIC_PC;
4547 gen_helper_done(cpu_env);
4550 if (!supervisor(dc))
4552 dc->npc = DYNAMIC_PC;
4553 dc->pc = DYNAMIC_PC;
4554 gen_helper_retry(cpu_env);
4569 case 3: /* load/store instructions */
4571 unsigned int xop = GET_FIELD(insn, 7, 12);
4573 /* flush pending conditional evaluations before exposing
4575 if (dc->cc_op != CC_OP_FLAGS) {
4576 dc->cc_op = CC_OP_FLAGS;
4577 gen_helper_compute_psr(cpu_env);
4579 cpu_src1 = get_src1(insn, cpu_src1);
4580 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4581 rs2 = GET_FIELD(insn, 27, 31);
4582 gen_movl_reg_TN(rs2, cpu_src2);
4583 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4584 } else if (IS_IMM) { /* immediate */
4585 simm = GET_FIELDs(insn, 19, 31);
4586 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4587 } else { /* register */
4588 rs2 = GET_FIELD(insn, 27, 31);
4590 gen_movl_reg_TN(rs2, cpu_src2);
4591 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4593 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4595 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4596 (xop > 0x17 && xop <= 0x1d ) ||
4597 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4599 case 0x0: /* ld, V9 lduw, load unsigned word */
4600 gen_address_mask(dc, cpu_addr);
4601 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4603 case 0x1: /* ldub, load unsigned byte */
4604 gen_address_mask(dc, cpu_addr);
4605 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4607 case 0x2: /* lduh, load unsigned halfword */
4608 gen_address_mask(dc, cpu_addr);
4609 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4611 case 0x3: /* ldd, load double word */
4618 r_const = tcg_const_i32(7);
4619 /* XXX remove alignment check */
4620 gen_helper_check_align(cpu_env, cpu_addr, r_const);
4621 tcg_temp_free_i32(r_const);
4622 gen_address_mask(dc, cpu_addr);
4623 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4624 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4625 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4626 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4627 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4628 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4629 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4632 case 0x9: /* ldsb, load signed byte */
4633 gen_address_mask(dc, cpu_addr);
4634 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4636 case 0xa: /* ldsh, load signed halfword */
4637 gen_address_mask(dc, cpu_addr);
4638 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4640 case 0xd: /* ldstub -- XXX: should be atomically */
4644 gen_address_mask(dc, cpu_addr);
4645 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4646 r_const = tcg_const_tl(0xff);
4647 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4648 tcg_temp_free(r_const);
4651 case 0x0f: /* swap, swap register with memory. Also
4653 CHECK_IU_FEATURE(dc, SWAP);
4654 gen_movl_reg_TN(rd, cpu_val);
4655 gen_address_mask(dc, cpu_addr);
4656 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4657 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4658 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4660 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4661 case 0x10: /* lda, V9 lduwa, load word alternate */
4662 #ifndef TARGET_SPARC64
4665 if (!supervisor(dc))
4669 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4671 case 0x11: /* lduba, load unsigned byte alternate */
4672 #ifndef TARGET_SPARC64
4675 if (!supervisor(dc))
4679 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4681 case 0x12: /* lduha, load unsigned halfword alternate */
4682 #ifndef TARGET_SPARC64
4685 if (!supervisor(dc))
4689 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4691 case 0x13: /* ldda, load double word alternate */
4692 #ifndef TARGET_SPARC64
4695 if (!supervisor(dc))
4701 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4703 case 0x19: /* ldsba, load signed byte alternate */
4704 #ifndef TARGET_SPARC64
4707 if (!supervisor(dc))
4711 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4713 case 0x1a: /* ldsha, load signed halfword alternate */
4714 #ifndef TARGET_SPARC64
4717 if (!supervisor(dc))
4721 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4723 case 0x1d: /* ldstuba -- XXX: should be atomically */
4724 #ifndef TARGET_SPARC64
4727 if (!supervisor(dc))
4731 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4733 case 0x1f: /* swapa, swap reg with alt. memory. Also
4735 CHECK_IU_FEATURE(dc, SWAP);
4736 #ifndef TARGET_SPARC64
4739 if (!supervisor(dc))
4743 gen_movl_reg_TN(rd, cpu_val);
4744 gen_swap_asi(cpu_val, cpu_addr, insn);
4747 #ifndef TARGET_SPARC64
4748 case 0x30: /* ldc */
4749 case 0x31: /* ldcsr */
4750 case 0x33: /* lddc */
4754 #ifdef TARGET_SPARC64
4755 case 0x08: /* V9 ldsw */
4756 gen_address_mask(dc, cpu_addr);
4757 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4759 case 0x0b: /* V9 ldx */
4760 gen_address_mask(dc, cpu_addr);
4761 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4763 case 0x18: /* V9 ldswa */
4765 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4767 case 0x1b: /* V9 ldxa */
4769 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4771 case 0x2d: /* V9 prefetch, no effect */
4773 case 0x30: /* V9 ldfa */
4774 if (gen_trap_ifnofpu(dc)) {
4778 gen_ldf_asi(cpu_addr, insn, 4, rd);
4779 gen_update_fprs_dirty(rd);
4781 case 0x33: /* V9 lddfa */
4782 if (gen_trap_ifnofpu(dc)) {
4786 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4787 gen_update_fprs_dirty(DFPREG(rd));
4789 case 0x3d: /* V9 prefetcha, no effect */
4791 case 0x32: /* V9 ldqfa */
4792 CHECK_FPU_FEATURE(dc, FLOAT128);
4793 if (gen_trap_ifnofpu(dc)) {
4797 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4798 gen_update_fprs_dirty(QFPREG(rd));
4804 gen_movl_TN_reg(rd, cpu_val);
4805 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4808 } else if (xop >= 0x20 && xop < 0x24) {
4809 if (gen_trap_ifnofpu(dc)) {
4814 case 0x20: /* ldf, load fpreg */
4815 gen_address_mask(dc, cpu_addr);
4816 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4817 cpu_dst_32 = gen_dest_fpr_F();
4818 tcg_gen_trunc_tl_i32(cpu_dst_32, cpu_tmp0);
4819 gen_store_fpr_F(dc, rd, cpu_dst_32);
4821 case 0x21: /* ldfsr, V9 ldxfsr */
4822 #ifdef TARGET_SPARC64
4823 gen_address_mask(dc, cpu_addr);
4825 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4826 gen_helper_ldxfsr(cpu_env, cpu_tmp64);
4828 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4829 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
4830 gen_helper_ldfsr(cpu_env, cpu_tmp32);
4834 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4835 gen_helper_ldfsr(cpu_env, cpu_tmp32);
4839 case 0x22: /* ldqf, load quad fpreg */
4843 CHECK_FPU_FEATURE(dc, FLOAT128);
4844 r_const = tcg_const_i32(dc->mem_idx);
4845 gen_address_mask(dc, cpu_addr);
4846 gen_helper_ldqf(cpu_env, cpu_addr, r_const);
4847 tcg_temp_free_i32(r_const);
4848 gen_op_store_QT0_fpr(QFPREG(rd));
4849 gen_update_fprs_dirty(QFPREG(rd));
4852 case 0x23: /* lddf, load double fpreg */
4853 gen_address_mask(dc, cpu_addr);
4854 cpu_dst_64 = gen_dest_fpr_D();
4855 tcg_gen_qemu_ld64(cpu_dst_64, cpu_addr, dc->mem_idx);
4856 gen_store_fpr_D(dc, rd, cpu_dst_64);
4861 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4862 xop == 0xe || xop == 0x1e) {
4863 gen_movl_reg_TN(rd, cpu_val);
4865 case 0x4: /* st, store word */
4866 gen_address_mask(dc, cpu_addr);
4867 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4869 case 0x5: /* stb, store byte */
4870 gen_address_mask(dc, cpu_addr);
4871 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4873 case 0x6: /* sth, store halfword */
4874 gen_address_mask(dc, cpu_addr);
4875 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4877 case 0x7: /* std, store double word */
4884 gen_address_mask(dc, cpu_addr);
4885 r_const = tcg_const_i32(7);
4886 /* XXX remove alignment check */
4887 gen_helper_check_align(cpu_env, cpu_addr, r_const);
4888 tcg_temp_free_i32(r_const);
4889 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4890 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4891 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4894 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4895 case 0x14: /* sta, V9 stwa, store word alternate */
4896 #ifndef TARGET_SPARC64
4899 if (!supervisor(dc))
4903 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4904 dc->npc = DYNAMIC_PC;
4906 case 0x15: /* stba, store byte alternate */
4907 #ifndef TARGET_SPARC64
4910 if (!supervisor(dc))
4914 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4915 dc->npc = DYNAMIC_PC;
4917 case 0x16: /* stha, store halfword alternate */
4918 #ifndef TARGET_SPARC64
4921 if (!supervisor(dc))
4925 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4926 dc->npc = DYNAMIC_PC;
4928 case 0x17: /* stda, store double word alternate */
4929 #ifndef TARGET_SPARC64
4932 if (!supervisor(dc))
4939 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4943 #ifdef TARGET_SPARC64
4944 case 0x0e: /* V9 stx */
4945 gen_address_mask(dc, cpu_addr);
4946 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4948 case 0x1e: /* V9 stxa */
4950 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4951 dc->npc = DYNAMIC_PC;
4957 } else if (xop > 0x23 && xop < 0x28) {
4958 if (gen_trap_ifnofpu(dc)) {
4963 case 0x24: /* stf, store fpreg */
4964 gen_address_mask(dc, cpu_addr);
4965 cpu_src1_32 = gen_load_fpr_F(dc, rd);
4966 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_src1_32);
4967 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4969 case 0x25: /* stfsr, V9 stxfsr */
4970 #ifdef TARGET_SPARC64
4971 gen_address_mask(dc, cpu_addr);
4972 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUSPARCState, fsr));
4974 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4976 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4978 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fsr));
4979 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4983 #ifdef TARGET_SPARC64
4984 /* V9 stqf, store quad fpreg */
4988 CHECK_FPU_FEATURE(dc, FLOAT128);
4989 gen_op_load_fpr_QT0(QFPREG(rd));
4990 r_const = tcg_const_i32(dc->mem_idx);
4991 gen_address_mask(dc, cpu_addr);
4992 gen_helper_stqf(cpu_env, cpu_addr, r_const);
4993 tcg_temp_free_i32(r_const);
4996 #else /* !TARGET_SPARC64 */
4997 /* stdfq, store floating point queue */
4998 #if defined(CONFIG_USER_ONLY)
5001 if (!supervisor(dc))
5003 if (gen_trap_ifnofpu(dc)) {
5009 case 0x27: /* stdf, store double fpreg */
5010 gen_address_mask(dc, cpu_addr);
5011 cpu_src1_64 = gen_load_fpr_D(dc, rd);
5012 tcg_gen_qemu_st64(cpu_src1_64, cpu_addr, dc->mem_idx);
5017 } else if (xop > 0x33 && xop < 0x3f) {
5020 #ifdef TARGET_SPARC64
5021 case 0x34: /* V9 stfa */
5022 if (gen_trap_ifnofpu(dc)) {
5025 gen_stf_asi(cpu_addr, insn, 4, rd);
5027 case 0x36: /* V9 stqfa */
5031 CHECK_FPU_FEATURE(dc, FLOAT128);
5032 if (gen_trap_ifnofpu(dc)) {
5035 r_const = tcg_const_i32(7);
5036 gen_helper_check_align(cpu_env, cpu_addr, r_const);
5037 tcg_temp_free_i32(r_const);
5038 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
5041 case 0x37: /* V9 stdfa */
5042 if (gen_trap_ifnofpu(dc)) {
5045 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
5047 case 0x3c: /* V9 casa */
5048 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
5049 gen_movl_TN_reg(rd, cpu_val);
5051 case 0x3e: /* V9 casxa */
5052 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
5053 gen_movl_TN_reg(rd, cpu_val);
5056 case 0x34: /* stc */
5057 case 0x35: /* stcsr */
5058 case 0x36: /* stdcq */
5059 case 0x37: /* stdc */
5070 /* default case for non jump instructions */
5071 if (dc->npc == DYNAMIC_PC) {
5072 dc->pc = DYNAMIC_PC;
5074 } else if (dc->npc == JUMP_PC) {
5075 /* we can do a static jump */
5076 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5080 dc->npc = dc->npc + 4;
5089 r_const = tcg_const_i32(TT_ILL_INSN);
5090 gen_helper_raise_exception(cpu_env, r_const);
5091 tcg_temp_free_i32(r_const);
5100 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
5101 gen_helper_raise_exception(cpu_env, r_const);
5102 tcg_temp_free_i32(r_const);
5106 #if !defined(CONFIG_USER_ONLY)
5112 r_const = tcg_const_i32(TT_PRIV_INSN);
5113 gen_helper_raise_exception(cpu_env, r_const);
5114 tcg_temp_free_i32(r_const);
5121 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
5124 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5127 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
5131 #ifndef TARGET_SPARC64
5137 r_const = tcg_const_i32(TT_NCP_INSN);
5138 gen_helper_raise_exception(cpu_env, r_const);
5139 tcg_temp_free(r_const);
5145 tcg_temp_free(cpu_tmp1);
5146 tcg_temp_free(cpu_tmp2);
5147 if (dc->n_t32 != 0) {
5149 for (i = dc->n_t32 - 1; i >= 0; --i) {
5150 tcg_temp_free_i32(dc->t32[i]);
5156 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
5157 int spc, CPUSPARCState *env)
5159 target_ulong pc_start, last_pc;
5160 uint16_t *gen_opc_end;
5161 DisasContext dc1, *dc = &dc1;
5168 memset(dc, 0, sizeof(DisasContext));
5173 dc->npc = (target_ulong) tb->cs_base;
5174 dc->cc_op = CC_OP_DYNAMIC;
5175 dc->mem_idx = cpu_mmu_index(env);
5177 dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5178 dc->address_mask_32bit = tb_am_enabled(tb->flags);
5179 dc->singlestep = (env->singlestep_enabled || singlestep);
5180 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
5183 max_insns = tb->cflags & CF_COUNT_MASK;
5185 max_insns = CF_COUNT_MASK;
5188 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
5189 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
5190 if (bp->pc == dc->pc) {
5191 if (dc->pc != pc_start)
5193 gen_helper_debug(cpu_env);
5201 qemu_log("Search PC...\n");
5202 j = gen_opc_ptr - gen_opc_buf;
5206 gen_opc_instr_start[lj++] = 0;
5207 gen_opc_pc[lj] = dc->pc;
5208 gen_opc_npc[lj] = dc->npc;
5209 gen_opc_instr_start[lj] = 1;
5210 gen_opc_icount[lj] = num_insns;
5213 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
5216 insn = cpu_ldl_code(env, dc->pc);
5218 cpu_tmp0 = tcg_temp_new();
5219 cpu_tmp32 = tcg_temp_new_i32();
5220 cpu_tmp64 = tcg_temp_new_i64();
5221 cpu_dst = tcg_temp_new();
5222 cpu_val = tcg_temp_new();
5223 cpu_addr = tcg_temp_new();
5225 disas_sparc_insn(dc, insn);
5228 tcg_temp_free(cpu_addr);
5229 tcg_temp_free(cpu_val);
5230 tcg_temp_free(cpu_dst);
5231 tcg_temp_free_i64(cpu_tmp64);
5232 tcg_temp_free_i32(cpu_tmp32);
5233 tcg_temp_free(cpu_tmp0);
5237 /* if the next PC is different, we abort now */
5238 if (dc->pc != (last_pc + 4))
5240 /* if we reach a page boundary, we stop generation so that the
5241 PC of a TT_TFAULT exception is always in the right page */
5242 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5244 /* if single step mode, we generate only one instruction and
5245 generate an exception */
5246 if (dc->singlestep) {
5249 } while ((gen_opc_ptr < gen_opc_end) &&
5250 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5251 num_insns < max_insns);
5254 if (tb->cflags & CF_LAST_IO) {
5258 if (dc->pc != DYNAMIC_PC &&
5259 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5260 /* static PC and NPC: we can use direct chaining */
5261 gen_goto_tb(dc, 0, dc->pc, dc->npc);
5263 if (dc->pc != DYNAMIC_PC) {
5264 tcg_gen_movi_tl(cpu_pc, dc->pc);
5270 gen_icount_end(tb, num_insns);
5271 *gen_opc_ptr = INDEX_op_end;
5273 j = gen_opc_ptr - gen_opc_buf;
5276 gen_opc_instr_start[lj++] = 0;
5280 gen_opc_jump_pc[0] = dc->jump_pc[0];
5281 gen_opc_jump_pc[1] = dc->jump_pc[1];
5283 tb->size = last_pc + 4 - pc_start;
5284 tb->icount = num_insns;
5287 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5288 qemu_log("--------------\n");
5289 qemu_log("IN: %s\n", lookup_symbol(pc_start));
5290 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
5296 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5298 gen_intermediate_code_internal(tb, 0, env);
5301 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5303 gen_intermediate_code_internal(tb, 1, env);
5306 void gen_intermediate_code_init(CPUSPARCState *env)
5310 static const char * const gregnames[8] = {
5311 NULL, // g0 not used
5320 static const char * const fregnames[32] = {
5321 "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5322 "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5323 "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5324 "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5327 /* init various static tables */
5331 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5332 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5333 offsetof(CPUSPARCState, regwptr),
5335 #ifdef TARGET_SPARC64
5336 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, xcc),
5338 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, asi),
5340 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, fprs),
5342 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, gsr),
5344 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5345 offsetof(CPUSPARCState, tick_cmpr),
5347 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5348 offsetof(CPUSPARCState, stick_cmpr),
5350 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5351 offsetof(CPUSPARCState, hstick_cmpr),
5353 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hintp),
5355 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, htba),
5357 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hver),
5359 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5360 offsetof(CPUSPARCState, ssr), "ssr");
5361 cpu_ver = tcg_global_mem_new(TCG_AREG0,
5362 offsetof(CPUSPARCState, version), "ver");
5363 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5364 offsetof(CPUSPARCState, softint),
5367 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, wim),
5370 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cond),
5372 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_src),
5374 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5375 offsetof(CPUSPARCState, cc_src2),
5377 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_dst),
5379 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, cc_op),
5381 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, psr),
5383 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, fsr),
5385 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, pc),
5387 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, npc),
5389 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, y), "y");
5390 #ifndef CONFIG_USER_ONLY
5391 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, tbr),
5394 for (i = 1; i < 8; i++) {
5395 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5396 offsetof(CPUSPARCState, gregs[i]),
5399 for (i = 0; i < TARGET_DPREGS; i++) {
5400 cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
5401 offsetof(CPUSPARCState, fpr[i]),
5405 /* register helpers */
5407 #define GEN_HELPER 2
5412 void restore_state_to_opc(CPUSPARCState *env, TranslationBlock *tb, int pc_pos)
5415 env->pc = gen_opc_pc[pc_pos];
5416 npc = gen_opc_npc[pc_pos];
5418 /* dynamic NPC: already stored */
5419 } else if (npc == 2) {
5420 /* jump PC: use 'cond' and the jump targets of the translation */
5422 env->npc = gen_opc_jump_pc[0];
5424 env->npc = gen_opc_jump_pc[1];
5430 /* flush pending conditional evaluations before exposing cpu state */
5431 if (CC_OP != CC_OP_FLAGS) {
5432 helper_compute_psr(env);