5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
37 #define DYNAMIC_PC 1 /* dynamic pc value */
38 #define JUMP_PC 2 /* dynamic pc value which takes only two values
39 according to jump_pc[T2] */
41 /* global register indexes */
42 static TCGv_ptr cpu_env, cpu_regwptr;
43 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
44 static TCGv_i32 cpu_cc_op;
45 static TCGv_i32 cpu_psr;
46 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
48 #ifndef CONFIG_USER_ONLY
51 static TCGv cpu_cond, cpu_dst, cpu_addr, cpu_val;
53 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
55 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
56 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
57 static TCGv_i32 cpu_softint;
61 /* local register indexes (only used inside old micro ops) */
63 static TCGv_i32 cpu_tmp32;
64 static TCGv_i64 cpu_tmp64;
65 /* Floating point registers */
66 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
68 static target_ulong gen_opc_npc[OPC_BUF_SIZE];
69 static target_ulong gen_opc_jump_pc[2];
71 #include "gen-icount.h"
73 typedef struct DisasContext {
74 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
75 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
76 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
80 int address_mask_32bit;
82 uint32_t cc_op; /* current CC operation */
83 struct TranslationBlock *tb;
89 // This function uses non-native bit order
90 #define GET_FIELD(X, FROM, TO) \
91 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
93 // This function uses the order in the manuals, i.e. bit 0 is 2^0
94 #define GET_FIELD_SP(X, FROM, TO) \
95 GET_FIELD(X, 31 - (TO), 31 - (FROM))
97 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
98 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
100 #ifdef TARGET_SPARC64
101 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
102 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
104 #define DFPREG(r) (r & 0x1e)
105 #define QFPREG(r) (r & 0x1c)
108 #define UA2005_HTRAP_MASK 0xff
109 #define V8_TRAP_MASK 0x7f
111 static int sign_extend(int x, int len)
114 return (x << len) >> len;
117 #define IS_IMM (insn & (1<<13))
119 static inline void gen_update_fprs_dirty(int rd)
121 #if defined(TARGET_SPARC64)
122 tcg_gen_ori_i32(cpu_fprs, cpu_fprs, (rd < 32) ? 1 : 2);
126 /* floating point registers moves */
127 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
129 #if TCG_TARGET_REG_BITS == 32
131 return TCGV_LOW(cpu_fpr[src / 2]);
133 return TCGV_HIGH(cpu_fpr[src / 2]);
137 return MAKE_TCGV_I32(GET_TCGV_I64(cpu_fpr[src / 2]));
139 TCGv_i32 ret = tcg_temp_local_new_i32();
140 TCGv_i64 t = tcg_temp_new_i64();
142 tcg_gen_shri_i64(t, cpu_fpr[src / 2], 32);
143 tcg_gen_trunc_i64_i32(ret, t);
144 tcg_temp_free_i64(t);
146 dc->t32[dc->n_t32++] = ret;
147 assert(dc->n_t32 <= ARRAY_SIZE(dc->t32));
154 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
156 #if TCG_TARGET_REG_BITS == 32
158 tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
160 tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
163 TCGv_i64 t = MAKE_TCGV_I64(GET_TCGV_I32(v));
164 tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
165 (dst & 1 ? 0 : 32), 32);
167 gen_update_fprs_dirty(dst);
170 static TCGv_i32 gen_dest_fpr_F(void)
175 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
178 return cpu_fpr[src / 2];
181 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
184 tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
185 gen_update_fprs_dirty(dst);
188 static TCGv_i64 gen_dest_fpr_D(void)
193 static void gen_op_load_fpr_QT0(unsigned int src)
195 tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
196 offsetof(CPU_QuadU, ll.upper));
197 tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
198 offsetof(CPU_QuadU, ll.lower));
201 static void gen_op_load_fpr_QT1(unsigned int src)
203 tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
204 offsetof(CPU_QuadU, ll.upper));
205 tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
206 offsetof(CPU_QuadU, ll.lower));
209 static void gen_op_store_QT0_fpr(unsigned int dst)
211 tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
212 offsetof(CPU_QuadU, ll.upper));
213 tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
214 offsetof(CPU_QuadU, ll.lower));
217 #ifdef TARGET_SPARC64
218 static void gen_move_Q(unsigned int rd, unsigned int rs)
223 tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
224 tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
225 gen_update_fprs_dirty(rd);
230 #ifdef CONFIG_USER_ONLY
231 #define supervisor(dc) 0
232 #ifdef TARGET_SPARC64
233 #define hypervisor(dc) 0
236 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
237 #ifdef TARGET_SPARC64
238 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
243 #ifdef TARGET_SPARC64
245 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
247 #define AM_CHECK(dc) (1)
251 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
253 #ifdef TARGET_SPARC64
255 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
259 static inline void gen_movl_reg_TN(int reg, TCGv tn)
262 tcg_gen_movi_tl(tn, 0);
264 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
266 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
270 static inline void gen_movl_TN_reg(int reg, TCGv tn)
275 tcg_gen_mov_tl(cpu_gregs[reg], tn);
277 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
281 static inline void gen_goto_tb(DisasContext *s, int tb_num,
282 target_ulong pc, target_ulong npc)
284 TranslationBlock *tb;
287 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
288 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
290 /* jump to same page: we can use a direct jump */
291 tcg_gen_goto_tb(tb_num);
292 tcg_gen_movi_tl(cpu_pc, pc);
293 tcg_gen_movi_tl(cpu_npc, npc);
294 tcg_gen_exit_tb((tcg_target_long)tb + tb_num);
296 /* jump to another page: currently not optimized */
297 tcg_gen_movi_tl(cpu_pc, pc);
298 tcg_gen_movi_tl(cpu_npc, npc);
304 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
306 tcg_gen_extu_i32_tl(reg, src);
307 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
308 tcg_gen_andi_tl(reg, reg, 0x1);
311 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
313 tcg_gen_extu_i32_tl(reg, src);
314 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
315 tcg_gen_andi_tl(reg, reg, 0x1);
318 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
320 tcg_gen_extu_i32_tl(reg, src);
321 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
322 tcg_gen_andi_tl(reg, reg, 0x1);
325 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
327 tcg_gen_extu_i32_tl(reg, src);
328 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
329 tcg_gen_andi_tl(reg, reg, 0x1);
332 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
338 l1 = gen_new_label();
340 r_temp = tcg_temp_new();
341 tcg_gen_xor_tl(r_temp, src1, src2);
342 tcg_gen_not_tl(r_temp, r_temp);
343 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
344 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
345 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
346 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
347 r_const = tcg_const_i32(TT_TOVF);
348 gen_helper_raise_exception(cpu_env, r_const);
349 tcg_temp_free_i32(r_const);
351 tcg_temp_free(r_temp);
354 static inline void gen_tag_tv(TCGv src1, TCGv src2)
359 l1 = gen_new_label();
360 tcg_gen_or_tl(cpu_tmp0, src1, src2);
361 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
362 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
363 r_const = tcg_const_i32(TT_TOVF);
364 gen_helper_raise_exception(cpu_env, r_const);
365 tcg_temp_free_i32(r_const);
369 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
371 tcg_gen_mov_tl(cpu_cc_src, src1);
372 tcg_gen_movi_tl(cpu_cc_src2, src2);
373 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
374 tcg_gen_mov_tl(dst, cpu_cc_dst);
377 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
379 tcg_gen_mov_tl(cpu_cc_src, src1);
380 tcg_gen_mov_tl(cpu_cc_src2, src2);
381 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
382 tcg_gen_mov_tl(dst, cpu_cc_dst);
385 static TCGv_i32 gen_add32_carry32(void)
387 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
389 /* Carry is computed from a previous add: (dst < src) */
390 #if TARGET_LONG_BITS == 64
391 cc_src1_32 = tcg_temp_new_i32();
392 cc_src2_32 = tcg_temp_new_i32();
393 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_dst);
394 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src);
396 cc_src1_32 = cpu_cc_dst;
397 cc_src2_32 = cpu_cc_src;
400 carry_32 = tcg_temp_new_i32();
401 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
403 #if TARGET_LONG_BITS == 64
404 tcg_temp_free_i32(cc_src1_32);
405 tcg_temp_free_i32(cc_src2_32);
411 static TCGv_i32 gen_sub32_carry32(void)
413 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
415 /* Carry is computed from a previous borrow: (src1 < src2) */
416 #if TARGET_LONG_BITS == 64
417 cc_src1_32 = tcg_temp_new_i32();
418 cc_src2_32 = tcg_temp_new_i32();
419 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_src);
420 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src2);
422 cc_src1_32 = cpu_cc_src;
423 cc_src2_32 = cpu_cc_src2;
426 carry_32 = tcg_temp_new_i32();
427 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
429 #if TARGET_LONG_BITS == 64
430 tcg_temp_free_i32(cc_src1_32);
431 tcg_temp_free_i32(cc_src2_32);
437 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
438 TCGv src2, int update_cc)
446 /* Carry is known to be zero. Fall back to plain ADD. */
448 gen_op_add_cc(dst, src1, src2);
450 tcg_gen_add_tl(dst, src1, src2);
457 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
459 /* For 32-bit hosts, we can re-use the host's hardware carry
460 generation by using an ADD2 opcode. We discard the low
461 part of the output. Ideally we'd combine this operation
462 with the add that generated the carry in the first place. */
463 TCGv dst_low = tcg_temp_new();
464 tcg_gen_op6_i32(INDEX_op_add2_i32, dst_low, dst,
465 cpu_cc_src, src1, cpu_cc_src2, src2);
466 tcg_temp_free(dst_low);
470 carry_32 = gen_add32_carry32();
476 carry_32 = gen_sub32_carry32();
480 /* We need external help to produce the carry. */
481 carry_32 = tcg_temp_new_i32();
482 gen_helper_compute_C_icc(carry_32, cpu_env);
486 #if TARGET_LONG_BITS == 64
487 carry = tcg_temp_new();
488 tcg_gen_extu_i32_i64(carry, carry_32);
493 tcg_gen_add_tl(dst, src1, src2);
494 tcg_gen_add_tl(dst, dst, carry);
496 tcg_temp_free_i32(carry_32);
497 #if TARGET_LONG_BITS == 64
498 tcg_temp_free(carry);
501 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
505 tcg_gen_mov_tl(cpu_cc_src, src1);
506 tcg_gen_mov_tl(cpu_cc_src2, src2);
507 tcg_gen_mov_tl(cpu_cc_dst, dst);
508 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
509 dc->cc_op = CC_OP_ADDX;
513 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
515 tcg_gen_mov_tl(cpu_cc_src, src1);
516 tcg_gen_mov_tl(cpu_cc_src2, src2);
517 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
518 tcg_gen_mov_tl(dst, cpu_cc_dst);
521 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
523 tcg_gen_mov_tl(cpu_cc_src, src1);
524 tcg_gen_mov_tl(cpu_cc_src2, src2);
525 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
526 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
527 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
528 tcg_gen_mov_tl(dst, cpu_cc_dst);
531 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
537 l1 = gen_new_label();
539 r_temp = tcg_temp_new();
540 tcg_gen_xor_tl(r_temp, src1, src2);
541 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
542 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
543 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
544 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
545 r_const = tcg_const_i32(TT_TOVF);
546 gen_helper_raise_exception(cpu_env, r_const);
547 tcg_temp_free_i32(r_const);
549 tcg_temp_free(r_temp);
552 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
554 tcg_gen_mov_tl(cpu_cc_src, src1);
555 tcg_gen_movi_tl(cpu_cc_src2, src2);
557 tcg_gen_mov_tl(cpu_cc_dst, src1);
558 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
559 dc->cc_op = CC_OP_LOGIC;
561 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
562 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
563 dc->cc_op = CC_OP_SUB;
565 tcg_gen_mov_tl(dst, cpu_cc_dst);
568 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
570 tcg_gen_mov_tl(cpu_cc_src, src1);
571 tcg_gen_mov_tl(cpu_cc_src2, src2);
572 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
573 tcg_gen_mov_tl(dst, cpu_cc_dst);
576 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
577 TCGv src2, int update_cc)
585 /* Carry is known to be zero. Fall back to plain SUB. */
587 gen_op_sub_cc(dst, src1, src2);
589 tcg_gen_sub_tl(dst, src1, src2);
596 carry_32 = gen_add32_carry32();
602 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
604 /* For 32-bit hosts, we can re-use the host's hardware carry
605 generation by using a SUB2 opcode. We discard the low
606 part of the output. Ideally we'd combine this operation
607 with the add that generated the carry in the first place. */
608 TCGv dst_low = tcg_temp_new();
609 tcg_gen_op6_i32(INDEX_op_sub2_i32, dst_low, dst,
610 cpu_cc_src, src1, cpu_cc_src2, src2);
611 tcg_temp_free(dst_low);
615 carry_32 = gen_sub32_carry32();
619 /* We need external help to produce the carry. */
620 carry_32 = tcg_temp_new_i32();
621 gen_helper_compute_C_icc(carry_32, cpu_env);
625 #if TARGET_LONG_BITS == 64
626 carry = tcg_temp_new();
627 tcg_gen_extu_i32_i64(carry, carry_32);
632 tcg_gen_sub_tl(dst, src1, src2);
633 tcg_gen_sub_tl(dst, dst, carry);
635 tcg_temp_free_i32(carry_32);
636 #if TARGET_LONG_BITS == 64
637 tcg_temp_free(carry);
640 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
644 tcg_gen_mov_tl(cpu_cc_src, src1);
645 tcg_gen_mov_tl(cpu_cc_src2, src2);
646 tcg_gen_mov_tl(cpu_cc_dst, dst);
647 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
648 dc->cc_op = CC_OP_SUBX;
652 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
654 tcg_gen_mov_tl(cpu_cc_src, src1);
655 tcg_gen_mov_tl(cpu_cc_src2, src2);
656 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
657 tcg_gen_mov_tl(dst, cpu_cc_dst);
660 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
662 tcg_gen_mov_tl(cpu_cc_src, src1);
663 tcg_gen_mov_tl(cpu_cc_src2, src2);
664 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
665 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
666 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
667 tcg_gen_mov_tl(dst, cpu_cc_dst);
670 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
675 l1 = gen_new_label();
676 r_temp = tcg_temp_new();
682 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
683 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
684 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
685 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
686 tcg_gen_movi_tl(cpu_cc_src2, 0);
690 // env->y = (b2 << 31) | (env->y >> 1);
691 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
692 tcg_gen_shli_tl(r_temp, r_temp, 31);
693 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
694 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
695 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
696 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
699 gen_mov_reg_N(cpu_tmp0, cpu_psr);
700 gen_mov_reg_V(r_temp, cpu_psr);
701 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
702 tcg_temp_free(r_temp);
704 // T0 = (b1 << 31) | (T0 >> 1);
706 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
707 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
708 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
710 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
712 tcg_gen_mov_tl(dst, cpu_cc_dst);
715 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
717 TCGv_i32 r_src1, r_src2;
718 TCGv_i64 r_temp, r_temp2;
720 r_src1 = tcg_temp_new_i32();
721 r_src2 = tcg_temp_new_i32();
723 tcg_gen_trunc_tl_i32(r_src1, src1);
724 tcg_gen_trunc_tl_i32(r_src2, src2);
726 r_temp = tcg_temp_new_i64();
727 r_temp2 = tcg_temp_new_i64();
730 tcg_gen_ext_i32_i64(r_temp, r_src2);
731 tcg_gen_ext_i32_i64(r_temp2, r_src1);
733 tcg_gen_extu_i32_i64(r_temp, r_src2);
734 tcg_gen_extu_i32_i64(r_temp2, r_src1);
737 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
739 tcg_gen_shri_i64(r_temp, r_temp2, 32);
740 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
741 tcg_temp_free_i64(r_temp);
742 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
744 tcg_gen_trunc_i64_tl(dst, r_temp2);
746 tcg_temp_free_i64(r_temp2);
748 tcg_temp_free_i32(r_src1);
749 tcg_temp_free_i32(r_src2);
752 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
754 /* zero-extend truncated operands before multiplication */
755 gen_op_multiply(dst, src1, src2, 0);
758 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
760 /* sign-extend truncated operands before multiplication */
761 gen_op_multiply(dst, src1, src2, 1);
764 #ifdef TARGET_SPARC64
765 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
770 l1 = gen_new_label();
771 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
772 r_const = tcg_const_i32(TT_DIV_ZERO);
773 gen_helper_raise_exception(cpu_env, r_const);
774 tcg_temp_free_i32(r_const);
778 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
781 TCGv r_temp1, r_temp2;
783 l1 = gen_new_label();
784 l2 = gen_new_label();
785 r_temp1 = tcg_temp_local_new();
786 r_temp2 = tcg_temp_local_new();
787 tcg_gen_mov_tl(r_temp1, src1);
788 tcg_gen_mov_tl(r_temp2, src2);
789 gen_trap_ifdivzero_tl(r_temp2);
790 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp1, INT64_MIN, l1);
791 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp2, -1, l1);
792 tcg_gen_movi_i64(dst, INT64_MIN);
795 tcg_gen_div_i64(dst, r_temp1, r_temp2);
797 tcg_temp_free(r_temp1);
798 tcg_temp_free(r_temp2);
803 static inline void gen_op_eval_ba(TCGv dst)
805 tcg_gen_movi_tl(dst, 1);
809 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
811 gen_mov_reg_Z(dst, src);
815 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
817 gen_mov_reg_N(cpu_tmp0, src);
818 gen_mov_reg_V(dst, src);
819 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
820 gen_mov_reg_Z(cpu_tmp0, src);
821 tcg_gen_or_tl(dst, dst, cpu_tmp0);
825 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
827 gen_mov_reg_V(cpu_tmp0, src);
828 gen_mov_reg_N(dst, src);
829 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
833 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
835 gen_mov_reg_Z(cpu_tmp0, src);
836 gen_mov_reg_C(dst, src);
837 tcg_gen_or_tl(dst, dst, cpu_tmp0);
841 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
843 gen_mov_reg_C(dst, src);
847 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
849 gen_mov_reg_V(dst, src);
853 static inline void gen_op_eval_bn(TCGv dst)
855 tcg_gen_movi_tl(dst, 0);
859 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
861 gen_mov_reg_N(dst, src);
865 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
867 gen_mov_reg_Z(dst, src);
868 tcg_gen_xori_tl(dst, dst, 0x1);
872 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
874 gen_mov_reg_N(cpu_tmp0, src);
875 gen_mov_reg_V(dst, src);
876 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
877 gen_mov_reg_Z(cpu_tmp0, src);
878 tcg_gen_or_tl(dst, dst, cpu_tmp0);
879 tcg_gen_xori_tl(dst, dst, 0x1);
883 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
885 gen_mov_reg_V(cpu_tmp0, src);
886 gen_mov_reg_N(dst, src);
887 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
888 tcg_gen_xori_tl(dst, dst, 0x1);
892 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
894 gen_mov_reg_Z(cpu_tmp0, src);
895 gen_mov_reg_C(dst, src);
896 tcg_gen_or_tl(dst, dst, cpu_tmp0);
897 tcg_gen_xori_tl(dst, dst, 0x1);
901 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
903 gen_mov_reg_C(dst, src);
904 tcg_gen_xori_tl(dst, dst, 0x1);
908 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
910 gen_mov_reg_N(dst, src);
911 tcg_gen_xori_tl(dst, dst, 0x1);
915 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
917 gen_mov_reg_V(dst, src);
918 tcg_gen_xori_tl(dst, dst, 0x1);
922 FPSR bit field FCC1 | FCC0:
928 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
929 unsigned int fcc_offset)
931 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
932 tcg_gen_andi_tl(reg, reg, 0x1);
935 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
936 unsigned int fcc_offset)
938 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
939 tcg_gen_andi_tl(reg, reg, 0x1);
943 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
944 unsigned int fcc_offset)
946 gen_mov_reg_FCC0(dst, src, fcc_offset);
947 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
948 tcg_gen_or_tl(dst, dst, cpu_tmp0);
951 // 1 or 2: FCC0 ^ FCC1
952 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
953 unsigned int fcc_offset)
955 gen_mov_reg_FCC0(dst, src, fcc_offset);
956 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
957 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
961 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
962 unsigned int fcc_offset)
964 gen_mov_reg_FCC0(dst, src, fcc_offset);
968 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
969 unsigned int fcc_offset)
971 gen_mov_reg_FCC0(dst, src, fcc_offset);
972 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
973 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
974 tcg_gen_and_tl(dst, dst, cpu_tmp0);
978 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
979 unsigned int fcc_offset)
981 gen_mov_reg_FCC1(dst, src, fcc_offset);
985 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
986 unsigned int fcc_offset)
988 gen_mov_reg_FCC0(dst, src, fcc_offset);
989 tcg_gen_xori_tl(dst, dst, 0x1);
990 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
991 tcg_gen_and_tl(dst, dst, cpu_tmp0);
995 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
996 unsigned int fcc_offset)
998 gen_mov_reg_FCC0(dst, src, fcc_offset);
999 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1000 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1003 // 0: !(FCC0 | FCC1)
1004 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1005 unsigned int fcc_offset)
1007 gen_mov_reg_FCC0(dst, src, fcc_offset);
1008 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1009 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1010 tcg_gen_xori_tl(dst, dst, 0x1);
1013 // 0 or 3: !(FCC0 ^ FCC1)
1014 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1015 unsigned int fcc_offset)
1017 gen_mov_reg_FCC0(dst, src, fcc_offset);
1018 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1019 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1020 tcg_gen_xori_tl(dst, dst, 0x1);
1024 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1025 unsigned int fcc_offset)
1027 gen_mov_reg_FCC0(dst, src, fcc_offset);
1028 tcg_gen_xori_tl(dst, dst, 0x1);
1031 // !1: !(FCC0 & !FCC1)
1032 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1033 unsigned int fcc_offset)
1035 gen_mov_reg_FCC0(dst, src, fcc_offset);
1036 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1037 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1038 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1039 tcg_gen_xori_tl(dst, dst, 0x1);
1043 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1044 unsigned int fcc_offset)
1046 gen_mov_reg_FCC1(dst, src, fcc_offset);
1047 tcg_gen_xori_tl(dst, dst, 0x1);
1050 // !2: !(!FCC0 & FCC1)
1051 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1052 unsigned int fcc_offset)
1054 gen_mov_reg_FCC0(dst, src, fcc_offset);
1055 tcg_gen_xori_tl(dst, dst, 0x1);
1056 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1057 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1058 tcg_gen_xori_tl(dst, dst, 0x1);
1061 // !3: !(FCC0 & FCC1)
1062 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1063 unsigned int fcc_offset)
1065 gen_mov_reg_FCC0(dst, src, fcc_offset);
1066 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1067 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1068 tcg_gen_xori_tl(dst, dst, 0x1);
1071 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1072 target_ulong pc2, TCGv r_cond)
1076 l1 = gen_new_label();
1078 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1080 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1083 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1086 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1087 target_ulong pc2, TCGv r_cond)
1091 l1 = gen_new_label();
1093 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1095 gen_goto_tb(dc, 0, pc2, pc1);
1098 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1101 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1106 l1 = gen_new_label();
1107 l2 = gen_new_label();
1109 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1111 tcg_gen_movi_tl(cpu_npc, npc1);
1115 tcg_gen_movi_tl(cpu_npc, npc2);
1119 /* call this function before using the condition register as it may
1120 have been set for a jump */
1121 static inline void flush_cond(DisasContext *dc, TCGv cond)
1123 if (dc->npc == JUMP_PC) {
1124 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1125 dc->npc = DYNAMIC_PC;
1129 static inline void save_npc(DisasContext *dc, TCGv cond)
1131 if (dc->npc == JUMP_PC) {
1132 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1133 dc->npc = DYNAMIC_PC;
1134 } else if (dc->npc != DYNAMIC_PC) {
1135 tcg_gen_movi_tl(cpu_npc, dc->npc);
1139 static inline void save_state(DisasContext *dc, TCGv cond)
1141 tcg_gen_movi_tl(cpu_pc, dc->pc);
1142 /* flush pending conditional evaluations before exposing cpu state */
1143 if (dc->cc_op != CC_OP_FLAGS) {
1144 dc->cc_op = CC_OP_FLAGS;
1145 gen_helper_compute_psr(cpu_env);
1150 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1152 if (dc->npc == JUMP_PC) {
1153 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1154 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1155 dc->pc = DYNAMIC_PC;
1156 } else if (dc->npc == DYNAMIC_PC) {
1157 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1158 dc->pc = DYNAMIC_PC;
1164 static inline void gen_op_next_insn(void)
1166 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1167 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1170 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1175 #ifdef TARGET_SPARC64
1183 switch (dc->cc_op) {
1187 gen_helper_compute_psr(cpu_env);
1188 dc->cc_op = CC_OP_FLAGS;
1193 gen_op_eval_bn(r_dst);
1196 gen_op_eval_be(r_dst, r_src);
1199 gen_op_eval_ble(r_dst, r_src);
1202 gen_op_eval_bl(r_dst, r_src);
1205 gen_op_eval_bleu(r_dst, r_src);
1208 gen_op_eval_bcs(r_dst, r_src);
1211 gen_op_eval_bneg(r_dst, r_src);
1214 gen_op_eval_bvs(r_dst, r_src);
1217 gen_op_eval_ba(r_dst);
1220 gen_op_eval_bne(r_dst, r_src);
1223 gen_op_eval_bg(r_dst, r_src);
1226 gen_op_eval_bge(r_dst, r_src);
1229 gen_op_eval_bgu(r_dst, r_src);
1232 gen_op_eval_bcc(r_dst, r_src);
1235 gen_op_eval_bpos(r_dst, r_src);
1238 gen_op_eval_bvc(r_dst, r_src);
1243 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1245 unsigned int offset;
1265 gen_op_eval_bn(r_dst);
1268 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1271 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1274 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1277 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1280 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1283 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1286 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1289 gen_op_eval_ba(r_dst);
1292 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1295 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1298 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1301 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1304 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1307 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1310 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1315 #ifdef TARGET_SPARC64
1317 static const int gen_tcg_cond_reg[8] = {
1328 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1332 l1 = gen_new_label();
1333 tcg_gen_movi_tl(r_dst, 0);
1334 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1335 tcg_gen_movi_tl(r_dst, 1);
1340 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1343 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1344 target_ulong target = dc->pc + offset;
1347 /* unconditional not taken */
1349 dc->pc = dc->npc + 4;
1350 dc->npc = dc->pc + 4;
1353 dc->npc = dc->pc + 4;
1355 } else if (cond == 0x8) {
1356 /* unconditional taken */
1359 dc->npc = dc->pc + 4;
1363 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1366 flush_cond(dc, r_cond);
1367 gen_cond(r_cond, cc, cond, dc);
1369 gen_branch_a(dc, target, dc->npc, r_cond);
1373 dc->jump_pc[0] = target;
1374 if (unlikely(dc->npc == DYNAMIC_PC)) {
1375 dc->jump_pc[1] = DYNAMIC_PC;
1376 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1378 dc->jump_pc[1] = dc->npc + 4;
1385 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1388 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1389 target_ulong target = dc->pc + offset;
1392 /* unconditional not taken */
1394 dc->pc = dc->npc + 4;
1395 dc->npc = dc->pc + 4;
1398 dc->npc = dc->pc + 4;
1400 } else if (cond == 0x8) {
1401 /* unconditional taken */
1404 dc->npc = dc->pc + 4;
1408 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1411 flush_cond(dc, r_cond);
1412 gen_fcond(r_cond, cc, cond);
1414 gen_branch_a(dc, target, dc->npc, r_cond);
1418 dc->jump_pc[0] = target;
1419 if (unlikely(dc->npc == DYNAMIC_PC)) {
1420 dc->jump_pc[1] = DYNAMIC_PC;
1421 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1423 dc->jump_pc[1] = dc->npc + 4;
1430 #ifdef TARGET_SPARC64
1431 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1432 TCGv r_cond, TCGv r_reg)
1434 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1435 target_ulong target = dc->pc + offset;
1437 flush_cond(dc, r_cond);
1438 gen_cond_reg(r_cond, cond, r_reg);
1440 gen_branch_a(dc, target, dc->npc, r_cond);
1444 dc->jump_pc[0] = target;
1445 if (unlikely(dc->npc == DYNAMIC_PC)) {
1446 dc->jump_pc[1] = DYNAMIC_PC;
1447 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1449 dc->jump_pc[1] = dc->npc + 4;
1455 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1459 gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1462 gen_helper_fcmps_fcc1(cpu_env, r_rs1, r_rs2);
1465 gen_helper_fcmps_fcc2(cpu_env, r_rs1, r_rs2);
1468 gen_helper_fcmps_fcc3(cpu_env, r_rs1, r_rs2);
1473 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1477 gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1480 gen_helper_fcmpd_fcc1(cpu_env, r_rs1, r_rs2);
1483 gen_helper_fcmpd_fcc2(cpu_env, r_rs1, r_rs2);
1486 gen_helper_fcmpd_fcc3(cpu_env, r_rs1, r_rs2);
1491 static inline void gen_op_fcmpq(int fccno)
1495 gen_helper_fcmpq(cpu_env);
1498 gen_helper_fcmpq_fcc1(cpu_env);
1501 gen_helper_fcmpq_fcc2(cpu_env);
1504 gen_helper_fcmpq_fcc3(cpu_env);
1509 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1513 gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1516 gen_helper_fcmpes_fcc1(cpu_env, r_rs1, r_rs2);
1519 gen_helper_fcmpes_fcc2(cpu_env, r_rs1, r_rs2);
1522 gen_helper_fcmpes_fcc3(cpu_env, r_rs1, r_rs2);
1527 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1531 gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1534 gen_helper_fcmped_fcc1(cpu_env, r_rs1, r_rs2);
1537 gen_helper_fcmped_fcc2(cpu_env, r_rs1, r_rs2);
1540 gen_helper_fcmped_fcc3(cpu_env, r_rs1, r_rs2);
1545 static inline void gen_op_fcmpeq(int fccno)
1549 gen_helper_fcmpeq(cpu_env);
1552 gen_helper_fcmpeq_fcc1(cpu_env);
1555 gen_helper_fcmpeq_fcc2(cpu_env);
1558 gen_helper_fcmpeq_fcc3(cpu_env);
1565 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1567 gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1570 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1572 gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1575 static inline void gen_op_fcmpq(int fccno)
1577 gen_helper_fcmpq(cpu_env);
1580 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1582 gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1585 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1587 gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1590 static inline void gen_op_fcmpeq(int fccno)
1592 gen_helper_fcmpeq(cpu_env);
1596 static inline void gen_op_fpexception_im(int fsr_flags)
1600 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1601 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1602 r_const = tcg_const_i32(TT_FP_EXCP);
1603 gen_helper_raise_exception(cpu_env, r_const);
1604 tcg_temp_free_i32(r_const);
1607 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1609 #if !defined(CONFIG_USER_ONLY)
1610 if (!dc->fpu_enabled) {
1613 save_state(dc, r_cond);
1614 r_const = tcg_const_i32(TT_NFPU_INSN);
1615 gen_helper_raise_exception(cpu_env, r_const);
1616 tcg_temp_free_i32(r_const);
1624 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1626 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1629 static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1630 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1634 src = gen_load_fpr_F(dc, rs);
1635 dst = gen_dest_fpr_F();
1637 gen(dst, cpu_env, src);
1639 gen_store_fpr_F(dc, rd, dst);
1642 static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1643 void (*gen)(TCGv_i32, TCGv_i32))
1647 src = gen_load_fpr_F(dc, rs);
1648 dst = gen_dest_fpr_F();
1652 gen_store_fpr_F(dc, rd, dst);
1655 static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1656 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1658 TCGv_i32 dst, src1, src2;
1660 src1 = gen_load_fpr_F(dc, rs1);
1661 src2 = gen_load_fpr_F(dc, rs2);
1662 dst = gen_dest_fpr_F();
1664 gen(dst, cpu_env, src1, src2);
1666 gen_store_fpr_F(dc, rd, dst);
1669 #ifdef TARGET_SPARC64
1670 static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1671 void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1673 TCGv_i32 dst, src1, src2;
1675 src1 = gen_load_fpr_F(dc, rs1);
1676 src2 = gen_load_fpr_F(dc, rs2);
1677 dst = gen_dest_fpr_F();
1679 gen(dst, src1, src2);
1681 gen_store_fpr_F(dc, rd, dst);
1685 static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1686 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1690 src = gen_load_fpr_D(dc, rs);
1691 dst = gen_dest_fpr_D();
1693 gen(dst, cpu_env, src);
1695 gen_store_fpr_D(dc, rd, dst);
1698 #ifdef TARGET_SPARC64
1699 static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1700 void (*gen)(TCGv_i64, TCGv_i64))
1704 src = gen_load_fpr_D(dc, rs);
1705 dst = gen_dest_fpr_D();
1709 gen_store_fpr_D(dc, rd, dst);
1713 static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1714 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1716 TCGv_i64 dst, src1, src2;
1718 src1 = gen_load_fpr_D(dc, rs1);
1719 src2 = gen_load_fpr_D(dc, rs2);
1720 dst = gen_dest_fpr_D();
1722 gen(dst, cpu_env, src1, src2);
1724 gen_store_fpr_D(dc, rd, dst);
1727 #ifdef TARGET_SPARC64
1728 static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1729 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1731 TCGv_i64 dst, src1, src2;
1733 src1 = gen_load_fpr_D(dc, rs1);
1734 src2 = gen_load_fpr_D(dc, rs2);
1735 dst = gen_dest_fpr_D();
1737 gen(dst, src1, src2);
1739 gen_store_fpr_D(dc, rd, dst);
1742 static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1743 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1745 TCGv_i64 dst, src0, src1, src2;
1747 src1 = gen_load_fpr_D(dc, rs1);
1748 src2 = gen_load_fpr_D(dc, rs2);
1749 src0 = gen_load_fpr_D(dc, rd);
1750 dst = gen_dest_fpr_D();
1752 gen(dst, src0, src1, src2);
1754 gen_store_fpr_D(dc, rd, dst);
1758 static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1759 void (*gen)(TCGv_ptr))
1761 gen_op_load_fpr_QT1(QFPREG(rs));
1765 gen_op_store_QT0_fpr(QFPREG(rd));
1766 gen_update_fprs_dirty(QFPREG(rd));
1769 #ifdef TARGET_SPARC64
1770 static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1771 void (*gen)(TCGv_ptr))
1773 gen_op_load_fpr_QT1(QFPREG(rs));
1777 gen_op_store_QT0_fpr(QFPREG(rd));
1778 gen_update_fprs_dirty(QFPREG(rd));
1782 static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1783 void (*gen)(TCGv_ptr))
1785 gen_op_load_fpr_QT0(QFPREG(rs1));
1786 gen_op_load_fpr_QT1(QFPREG(rs2));
1790 gen_op_store_QT0_fpr(QFPREG(rd));
1791 gen_update_fprs_dirty(QFPREG(rd));
1794 static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1795 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1798 TCGv_i32 src1, src2;
1800 src1 = gen_load_fpr_F(dc, rs1);
1801 src2 = gen_load_fpr_F(dc, rs2);
1802 dst = gen_dest_fpr_D();
1804 gen(dst, cpu_env, src1, src2);
1806 gen_store_fpr_D(dc, rd, dst);
1809 static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1810 void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1812 TCGv_i64 src1, src2;
1814 src1 = gen_load_fpr_D(dc, rs1);
1815 src2 = gen_load_fpr_D(dc, rs2);
1817 gen(cpu_env, src1, src2);
1819 gen_op_store_QT0_fpr(QFPREG(rd));
1820 gen_update_fprs_dirty(QFPREG(rd));
1823 #ifdef TARGET_SPARC64
1824 static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1825 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1830 src = gen_load_fpr_F(dc, rs);
1831 dst = gen_dest_fpr_D();
1833 gen(dst, cpu_env, src);
1835 gen_store_fpr_D(dc, rd, dst);
1839 static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1840 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1845 src = gen_load_fpr_F(dc, rs);
1846 dst = gen_dest_fpr_D();
1848 gen(dst, cpu_env, src);
1850 gen_store_fpr_D(dc, rd, dst);
1853 static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1854 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1859 src = gen_load_fpr_D(dc, rs);
1860 dst = gen_dest_fpr_F();
1862 gen(dst, cpu_env, src);
1864 gen_store_fpr_F(dc, rd, dst);
1867 static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1868 void (*gen)(TCGv_i32, TCGv_ptr))
1872 gen_op_load_fpr_QT1(QFPREG(rs));
1873 dst = gen_dest_fpr_F();
1877 gen_store_fpr_F(dc, rd, dst);
1880 static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1881 void (*gen)(TCGv_i64, TCGv_ptr))
1885 gen_op_load_fpr_QT1(QFPREG(rs));
1886 dst = gen_dest_fpr_D();
1890 gen_store_fpr_D(dc, rd, dst);
1893 static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1894 void (*gen)(TCGv_ptr, TCGv_i32))
1898 src = gen_load_fpr_F(dc, rs);
1902 gen_op_store_QT0_fpr(QFPREG(rd));
1903 gen_update_fprs_dirty(QFPREG(rd));
1906 static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1907 void (*gen)(TCGv_ptr, TCGv_i64))
1911 src = gen_load_fpr_D(dc, rs);
1915 gen_op_store_QT0_fpr(QFPREG(rd));
1916 gen_update_fprs_dirty(QFPREG(rd));
1920 #ifdef TARGET_SPARC64
1921 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1927 r_asi = tcg_temp_new_i32();
1928 tcg_gen_mov_i32(r_asi, cpu_asi);
1930 asi = GET_FIELD(insn, 19, 26);
1931 r_asi = tcg_const_i32(asi);
1936 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1939 TCGv_i32 r_asi, r_size, r_sign;
1941 r_asi = gen_get_asi(insn, addr);
1942 r_size = tcg_const_i32(size);
1943 r_sign = tcg_const_i32(sign);
1944 gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1945 tcg_temp_free_i32(r_sign);
1946 tcg_temp_free_i32(r_size);
1947 tcg_temp_free_i32(r_asi);
1950 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1952 TCGv_i32 r_asi, r_size;
1954 r_asi = gen_get_asi(insn, addr);
1955 r_size = tcg_const_i32(size);
1956 gen_helper_st_asi(addr, src, r_asi, r_size);
1957 tcg_temp_free_i32(r_size);
1958 tcg_temp_free_i32(r_asi);
1961 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1963 TCGv_i32 r_asi, r_size, r_rd;
1965 r_asi = gen_get_asi(insn, addr);
1966 r_size = tcg_const_i32(size);
1967 r_rd = tcg_const_i32(rd);
1968 gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1969 tcg_temp_free_i32(r_rd);
1970 tcg_temp_free_i32(r_size);
1971 tcg_temp_free_i32(r_asi);
1974 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1976 TCGv_i32 r_asi, r_size, r_rd;
1978 r_asi = gen_get_asi(insn, addr);
1979 r_size = tcg_const_i32(size);
1980 r_rd = tcg_const_i32(rd);
1981 gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1982 tcg_temp_free_i32(r_rd);
1983 tcg_temp_free_i32(r_size);
1984 tcg_temp_free_i32(r_asi);
1987 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1989 TCGv_i32 r_asi, r_size, r_sign;
1991 r_asi = gen_get_asi(insn, addr);
1992 r_size = tcg_const_i32(4);
1993 r_sign = tcg_const_i32(0);
1994 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1995 tcg_temp_free_i32(r_sign);
1996 gen_helper_st_asi(addr, dst, r_asi, r_size);
1997 tcg_temp_free_i32(r_size);
1998 tcg_temp_free_i32(r_asi);
1999 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2002 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
2004 TCGv_i32 r_asi, r_rd;
2006 r_asi = gen_get_asi(insn, addr);
2007 r_rd = tcg_const_i32(rd);
2008 gen_helper_ldda_asi(addr, r_asi, r_rd);
2009 tcg_temp_free_i32(r_rd);
2010 tcg_temp_free_i32(r_asi);
2013 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
2015 TCGv_i32 r_asi, r_size;
2017 gen_movl_reg_TN(rd + 1, cpu_tmp0);
2018 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
2019 r_asi = gen_get_asi(insn, addr);
2020 r_size = tcg_const_i32(8);
2021 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
2022 tcg_temp_free_i32(r_size);
2023 tcg_temp_free_i32(r_asi);
2026 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
2032 r_val1 = tcg_temp_new();
2033 gen_movl_reg_TN(rd, r_val1);
2034 r_asi = gen_get_asi(insn, addr);
2035 gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
2036 tcg_temp_free_i32(r_asi);
2037 tcg_temp_free(r_val1);
2040 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
2045 gen_movl_reg_TN(rd, cpu_tmp64);
2046 r_asi = gen_get_asi(insn, addr);
2047 gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
2048 tcg_temp_free_i32(r_asi);
2051 #elif !defined(CONFIG_USER_ONLY)
2053 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
2056 TCGv_i32 r_asi, r_size, r_sign;
2058 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2059 r_size = tcg_const_i32(size);
2060 r_sign = tcg_const_i32(sign);
2061 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
2062 tcg_temp_free(r_sign);
2063 tcg_temp_free(r_size);
2064 tcg_temp_free(r_asi);
2065 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2068 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2070 TCGv_i32 r_asi, r_size;
2072 tcg_gen_extu_tl_i64(cpu_tmp64, src);
2073 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2074 r_size = tcg_const_i32(size);
2075 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
2076 tcg_temp_free(r_size);
2077 tcg_temp_free(r_asi);
2080 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
2082 TCGv_i32 r_asi, r_size, r_sign;
2085 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2086 r_size = tcg_const_i32(4);
2087 r_sign = tcg_const_i32(0);
2088 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
2089 tcg_temp_free(r_sign);
2090 r_val = tcg_temp_new_i64();
2091 tcg_gen_extu_tl_i64(r_val, dst);
2092 gen_helper_st_asi(addr, r_val, r_asi, r_size);
2093 tcg_temp_free_i64(r_val);
2094 tcg_temp_free(r_size);
2095 tcg_temp_free(r_asi);
2096 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2099 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
2101 TCGv_i32 r_asi, r_size, r_sign;
2103 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2104 r_size = tcg_const_i32(8);
2105 r_sign = tcg_const_i32(0);
2106 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
2107 tcg_temp_free(r_sign);
2108 tcg_temp_free(r_size);
2109 tcg_temp_free(r_asi);
2110 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
2111 gen_movl_TN_reg(rd + 1, cpu_tmp0);
2112 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
2113 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
2114 gen_movl_TN_reg(rd, hi);
2117 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
2119 TCGv_i32 r_asi, r_size;
2121 gen_movl_reg_TN(rd + 1, cpu_tmp0);
2122 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
2123 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2124 r_size = tcg_const_i32(8);
2125 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
2126 tcg_temp_free(r_size);
2127 tcg_temp_free(r_asi);
2131 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2132 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
2135 TCGv_i32 r_asi, r_size;
2137 gen_ld_asi(dst, addr, insn, 1, 0);
2139 r_val = tcg_const_i64(0xffULL);
2140 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2141 r_size = tcg_const_i32(1);
2142 gen_helper_st_asi(addr, r_val, r_asi, r_size);
2143 tcg_temp_free_i32(r_size);
2144 tcg_temp_free_i32(r_asi);
2145 tcg_temp_free_i64(r_val);
2149 static inline TCGv get_src1(unsigned int insn, TCGv def)
2154 rs1 = GET_FIELD(insn, 13, 17);
2156 tcg_gen_movi_tl(def, 0);
2157 } else if (rs1 < 8) {
2158 r_rs1 = cpu_gregs[rs1];
2160 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
2165 static inline TCGv get_src2(unsigned int insn, TCGv def)
2169 if (IS_IMM) { /* immediate */
2170 target_long simm = GET_FIELDs(insn, 19, 31);
2171 tcg_gen_movi_tl(def, simm);
2172 } else { /* register */
2173 unsigned int rs2 = GET_FIELD(insn, 27, 31);
2175 tcg_gen_movi_tl(def, 0);
2176 } else if (rs2 < 8) {
2177 r_rs2 = cpu_gregs[rs2];
2179 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
2185 #ifdef TARGET_SPARC64
2186 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
2188 TCGv_i32 r_tl = tcg_temp_new_i32();
2190 /* load env->tl into r_tl */
2191 tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2193 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2194 tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2196 /* calculate offset to current trap state from env->ts, reuse r_tl */
2197 tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2198 tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUState, ts));
2200 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2202 TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2203 tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2204 tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2205 tcg_temp_free_ptr(r_tl_tmp);
2208 tcg_temp_free_i32(r_tl);
2212 #define CHECK_IU_FEATURE(dc, FEATURE) \
2213 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2215 #define CHECK_FPU_FEATURE(dc, FEATURE) \
2216 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2219 /* before an instruction, dc->pc must be static */
2220 static void disas_sparc_insn(DisasContext * dc)
2222 unsigned int insn, opc, rs1, rs2, rd;
2223 TCGv cpu_src1, cpu_src2, cpu_tmp1, cpu_tmp2;
2224 TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
2225 TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
2228 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
2229 tcg_gen_debug_insn_start(dc->pc);
2230 insn = ldl_code(dc->pc);
2231 opc = GET_FIELD(insn, 0, 1);
2233 rd = GET_FIELD(insn, 2, 6);
2235 cpu_tmp1 = cpu_src1 = tcg_temp_new();
2236 cpu_tmp2 = cpu_src2 = tcg_temp_new();
2239 case 0: /* branches/sethi */
2241 unsigned int xop = GET_FIELD(insn, 7, 9);
2244 #ifdef TARGET_SPARC64
2245 case 0x1: /* V9 BPcc */
2249 target = GET_FIELD_SP(insn, 0, 18);
2250 target = sign_extend(target, 19);
2252 cc = GET_FIELD_SP(insn, 20, 21);
2254 do_branch(dc, target, insn, 0, cpu_cond);
2256 do_branch(dc, target, insn, 1, cpu_cond);
2261 case 0x3: /* V9 BPr */
2263 target = GET_FIELD_SP(insn, 0, 13) |
2264 (GET_FIELD_SP(insn, 20, 21) << 14);
2265 target = sign_extend(target, 16);
2267 cpu_src1 = get_src1(insn, cpu_src1);
2268 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
2271 case 0x5: /* V9 FBPcc */
2273 int cc = GET_FIELD_SP(insn, 20, 21);
2274 if (gen_trap_ifnofpu(dc, cpu_cond))
2276 target = GET_FIELD_SP(insn, 0, 18);
2277 target = sign_extend(target, 19);
2279 do_fbranch(dc, target, insn, cc, cpu_cond);
2283 case 0x7: /* CBN+x */
2288 case 0x2: /* BN+x */
2290 target = GET_FIELD(insn, 10, 31);
2291 target = sign_extend(target, 22);
2293 do_branch(dc, target, insn, 0, cpu_cond);
2296 case 0x6: /* FBN+x */
2298 if (gen_trap_ifnofpu(dc, cpu_cond))
2300 target = GET_FIELD(insn, 10, 31);
2301 target = sign_extend(target, 22);
2303 do_fbranch(dc, target, insn, 0, cpu_cond);
2306 case 0x4: /* SETHI */
2308 uint32_t value = GET_FIELD(insn, 10, 31);
2311 r_const = tcg_const_tl(value << 10);
2312 gen_movl_TN_reg(rd, r_const);
2313 tcg_temp_free(r_const);
2316 case 0x0: /* UNIMPL */
2325 target_long target = GET_FIELDs(insn, 2, 31) << 2;
2328 r_const = tcg_const_tl(dc->pc);
2329 gen_movl_TN_reg(15, r_const);
2330 tcg_temp_free(r_const);
2332 gen_mov_pc_npc(dc, cpu_cond);
2336 case 2: /* FPU & Logical Operations */
2338 unsigned int xop = GET_FIELD(insn, 7, 12);
2339 if (xop == 0x3a) { /* generate trap */
2342 cpu_src1 = get_src1(insn, cpu_src1);
2344 rs2 = GET_FIELD(insn, 25, 31);
2345 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2347 rs2 = GET_FIELD(insn, 27, 31);
2349 gen_movl_reg_TN(rs2, cpu_src2);
2350 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2352 tcg_gen_mov_tl(cpu_dst, cpu_src1);
2355 cond = GET_FIELD(insn, 3, 6);
2356 if (cond == 0x8) { /* Trap Always */
2357 save_state(dc, cpu_cond);
2358 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2360 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2362 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2363 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2364 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2367 dc->def->features & CPU_FEATURE_TA0_SHUTDOWN) {
2369 gen_helper_shutdown();
2372 gen_helper_raise_exception(cpu_env, cpu_tmp32);
2374 } else if (cond != 0) {
2375 TCGv r_cond = tcg_temp_new();
2377 #ifdef TARGET_SPARC64
2379 int cc = GET_FIELD_SP(insn, 11, 12);
2381 save_state(dc, cpu_cond);
2383 gen_cond(r_cond, 0, cond, dc);
2385 gen_cond(r_cond, 1, cond, dc);
2389 save_state(dc, cpu_cond);
2390 gen_cond(r_cond, 0, cond, dc);
2392 l1 = gen_new_label();
2393 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
2395 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2397 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2399 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2400 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2401 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2402 gen_helper_raise_exception(cpu_env, cpu_tmp32);
2405 tcg_temp_free(r_cond);
2411 } else if (xop == 0x28) {
2412 rs1 = GET_FIELD(insn, 13, 17);
2415 #ifndef TARGET_SPARC64
2416 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2417 manual, rdy on the microSPARC
2419 case 0x0f: /* stbar in the SPARCv8 manual,
2420 rdy on the microSPARC II */
2421 case 0x10 ... 0x1f: /* implementation-dependent in the
2422 SPARCv8 manual, rdy on the
2425 if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2428 /* Read Asr17 for a Leon3 monoprocessor */
2429 r_const = tcg_const_tl((1 << 8)
2430 | (dc->def->nwindows - 1));
2431 gen_movl_TN_reg(rd, r_const);
2432 tcg_temp_free(r_const);
2436 gen_movl_TN_reg(rd, cpu_y);
2438 #ifdef TARGET_SPARC64
2439 case 0x2: /* V9 rdccr */
2440 gen_helper_compute_psr(cpu_env);
2441 gen_helper_rdccr(cpu_dst, cpu_env);
2442 gen_movl_TN_reg(rd, cpu_dst);
2444 case 0x3: /* V9 rdasi */
2445 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2446 gen_movl_TN_reg(rd, cpu_dst);
2448 case 0x4: /* V9 rdtick */
2452 r_tickptr = tcg_temp_new_ptr();
2453 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2454 offsetof(CPUState, tick));
2455 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2456 tcg_temp_free_ptr(r_tickptr);
2457 gen_movl_TN_reg(rd, cpu_dst);
2460 case 0x5: /* V9 rdpc */
2464 r_const = tcg_const_tl(dc->pc);
2465 gen_movl_TN_reg(rd, r_const);
2466 tcg_temp_free(r_const);
2469 case 0x6: /* V9 rdfprs */
2470 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2471 gen_movl_TN_reg(rd, cpu_dst);
2473 case 0xf: /* V9 membar */
2474 break; /* no effect */
2475 case 0x13: /* Graphics Status */
2476 if (gen_trap_ifnofpu(dc, cpu_cond))
2478 gen_movl_TN_reg(rd, cpu_gsr);
2480 case 0x16: /* Softint */
2481 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2482 gen_movl_TN_reg(rd, cpu_dst);
2484 case 0x17: /* Tick compare */
2485 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2487 case 0x18: /* System tick */
2491 r_tickptr = tcg_temp_new_ptr();
2492 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2493 offsetof(CPUState, stick));
2494 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2495 tcg_temp_free_ptr(r_tickptr);
2496 gen_movl_TN_reg(rd, cpu_dst);
2499 case 0x19: /* System tick compare */
2500 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2502 case 0x10: /* Performance Control */
2503 case 0x11: /* Performance Instrumentation Counter */
2504 case 0x12: /* Dispatch Control */
2505 case 0x14: /* Softint set, WO */
2506 case 0x15: /* Softint clear, WO */
2511 #if !defined(CONFIG_USER_ONLY)
2512 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2513 #ifndef TARGET_SPARC64
2514 if (!supervisor(dc))
2516 gen_helper_compute_psr(cpu_env);
2517 dc->cc_op = CC_OP_FLAGS;
2518 gen_helper_rdpsr(cpu_dst, cpu_env);
2520 CHECK_IU_FEATURE(dc, HYPV);
2521 if (!hypervisor(dc))
2523 rs1 = GET_FIELD(insn, 13, 17);
2526 // gen_op_rdhpstate();
2529 // gen_op_rdhtstate();
2532 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2535 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2538 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2540 case 31: // hstick_cmpr
2541 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2547 gen_movl_TN_reg(rd, cpu_dst);
2549 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2550 if (!supervisor(dc))
2552 #ifdef TARGET_SPARC64
2553 rs1 = GET_FIELD(insn, 13, 17);
2559 r_tsptr = tcg_temp_new_ptr();
2560 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2561 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2562 offsetof(trap_state, tpc));
2563 tcg_temp_free_ptr(r_tsptr);
2570 r_tsptr = tcg_temp_new_ptr();
2571 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2572 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2573 offsetof(trap_state, tnpc));
2574 tcg_temp_free_ptr(r_tsptr);
2581 r_tsptr = tcg_temp_new_ptr();
2582 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2583 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2584 offsetof(trap_state, tstate));
2585 tcg_temp_free_ptr(r_tsptr);
2592 r_tsptr = tcg_temp_new_ptr();
2593 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2594 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2595 offsetof(trap_state, tt));
2596 tcg_temp_free_ptr(r_tsptr);
2597 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2604 r_tickptr = tcg_temp_new_ptr();
2605 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2606 offsetof(CPUState, tick));
2607 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2608 gen_movl_TN_reg(rd, cpu_tmp0);
2609 tcg_temp_free_ptr(r_tickptr);
2613 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2616 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2617 offsetof(CPUSPARCState, pstate));
2618 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2621 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2622 offsetof(CPUSPARCState, tl));
2623 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2626 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2627 offsetof(CPUSPARCState, psrpil));
2628 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2631 gen_helper_rdcwp(cpu_tmp0, cpu_env);
2634 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2635 offsetof(CPUSPARCState, cansave));
2636 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2638 case 11: // canrestore
2639 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2640 offsetof(CPUSPARCState, canrestore));
2641 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2643 case 12: // cleanwin
2644 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2645 offsetof(CPUSPARCState, cleanwin));
2646 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2648 case 13: // otherwin
2649 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2650 offsetof(CPUSPARCState, otherwin));
2651 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2654 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2655 offsetof(CPUSPARCState, wstate));
2656 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2658 case 16: // UA2005 gl
2659 CHECK_IU_FEATURE(dc, GL);
2660 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2661 offsetof(CPUSPARCState, gl));
2662 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2664 case 26: // UA2005 strand status
2665 CHECK_IU_FEATURE(dc, HYPV);
2666 if (!hypervisor(dc))
2668 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2671 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2678 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2680 gen_movl_TN_reg(rd, cpu_tmp0);
2682 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2683 #ifdef TARGET_SPARC64
2684 save_state(dc, cpu_cond);
2685 gen_helper_flushw(cpu_env);
2687 if (!supervisor(dc))
2689 gen_movl_TN_reg(rd, cpu_tbr);
2693 } else if (xop == 0x34) { /* FPU Operations */
2694 if (gen_trap_ifnofpu(dc, cpu_cond))
2696 gen_op_clear_ieee_excp_and_FTT();
2697 rs1 = GET_FIELD(insn, 13, 17);
2698 rs2 = GET_FIELD(insn, 27, 31);
2699 xop = GET_FIELD(insn, 18, 26);
2700 save_state(dc, cpu_cond);
2702 case 0x1: /* fmovs */
2703 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2704 gen_store_fpr_F(dc, rd, cpu_src1_32);
2706 case 0x5: /* fnegs */
2707 gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
2709 case 0x9: /* fabss */
2710 gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
2712 case 0x29: /* fsqrts */
2713 CHECK_FPU_FEATURE(dc, FSQRT);
2714 gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
2716 case 0x2a: /* fsqrtd */
2717 CHECK_FPU_FEATURE(dc, FSQRT);
2718 gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
2720 case 0x2b: /* fsqrtq */
2721 CHECK_FPU_FEATURE(dc, FLOAT128);
2722 gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
2724 case 0x41: /* fadds */
2725 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
2727 case 0x42: /* faddd */
2728 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
2730 case 0x43: /* faddq */
2731 CHECK_FPU_FEATURE(dc, FLOAT128);
2732 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
2734 case 0x45: /* fsubs */
2735 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
2737 case 0x46: /* fsubd */
2738 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
2740 case 0x47: /* fsubq */
2741 CHECK_FPU_FEATURE(dc, FLOAT128);
2742 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
2744 case 0x49: /* fmuls */
2745 CHECK_FPU_FEATURE(dc, FMUL);
2746 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
2748 case 0x4a: /* fmuld */
2749 CHECK_FPU_FEATURE(dc, FMUL);
2750 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
2752 case 0x4b: /* fmulq */
2753 CHECK_FPU_FEATURE(dc, FLOAT128);
2754 CHECK_FPU_FEATURE(dc, FMUL);
2755 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
2757 case 0x4d: /* fdivs */
2758 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
2760 case 0x4e: /* fdivd */
2761 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
2763 case 0x4f: /* fdivq */
2764 CHECK_FPU_FEATURE(dc, FLOAT128);
2765 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
2767 case 0x69: /* fsmuld */
2768 CHECK_FPU_FEATURE(dc, FSMULD);
2769 gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
2771 case 0x6e: /* fdmulq */
2772 CHECK_FPU_FEATURE(dc, FLOAT128);
2773 gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
2775 case 0xc4: /* fitos */
2776 gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
2778 case 0xc6: /* fdtos */
2779 gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
2781 case 0xc7: /* fqtos */
2782 CHECK_FPU_FEATURE(dc, FLOAT128);
2783 gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
2785 case 0xc8: /* fitod */
2786 gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
2788 case 0xc9: /* fstod */
2789 gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
2791 case 0xcb: /* fqtod */
2792 CHECK_FPU_FEATURE(dc, FLOAT128);
2793 gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
2795 case 0xcc: /* fitoq */
2796 CHECK_FPU_FEATURE(dc, FLOAT128);
2797 gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
2799 case 0xcd: /* fstoq */
2800 CHECK_FPU_FEATURE(dc, FLOAT128);
2801 gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
2803 case 0xce: /* fdtoq */
2804 CHECK_FPU_FEATURE(dc, FLOAT128);
2805 gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
2807 case 0xd1: /* fstoi */
2808 gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
2810 case 0xd2: /* fdtoi */
2811 gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
2813 case 0xd3: /* fqtoi */
2814 CHECK_FPU_FEATURE(dc, FLOAT128);
2815 gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
2817 #ifdef TARGET_SPARC64
2818 case 0x2: /* V9 fmovd */
2819 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
2820 gen_store_fpr_D(dc, rd, cpu_src1_64);
2822 case 0x3: /* V9 fmovq */
2823 CHECK_FPU_FEATURE(dc, FLOAT128);
2824 gen_move_Q(rd, rs2);
2826 case 0x6: /* V9 fnegd */
2827 gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
2829 case 0x7: /* V9 fnegq */
2830 CHECK_FPU_FEATURE(dc, FLOAT128);
2831 gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
2833 case 0xa: /* V9 fabsd */
2834 gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
2836 case 0xb: /* V9 fabsq */
2837 CHECK_FPU_FEATURE(dc, FLOAT128);
2838 gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
2840 case 0x81: /* V9 fstox */
2841 gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
2843 case 0x82: /* V9 fdtox */
2844 gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
2846 case 0x83: /* V9 fqtox */
2847 CHECK_FPU_FEATURE(dc, FLOAT128);
2848 gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
2850 case 0x84: /* V9 fxtos */
2851 gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
2853 case 0x88: /* V9 fxtod */
2854 gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
2856 case 0x8c: /* V9 fxtoq */
2857 CHECK_FPU_FEATURE(dc, FLOAT128);
2858 gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
2864 } else if (xop == 0x35) { /* FPU Operations */
2865 #ifdef TARGET_SPARC64
2868 if (gen_trap_ifnofpu(dc, cpu_cond))
2870 gen_op_clear_ieee_excp_and_FTT();
2871 rs1 = GET_FIELD(insn, 13, 17);
2872 rs2 = GET_FIELD(insn, 27, 31);
2873 xop = GET_FIELD(insn, 18, 26);
2874 save_state(dc, cpu_cond);
2875 #ifdef TARGET_SPARC64
2876 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2879 l1 = gen_new_label();
2880 cond = GET_FIELD_SP(insn, 14, 17);
2881 cpu_src1 = get_src1(insn, cpu_src1);
2882 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2884 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2885 gen_store_fpr_F(dc, rd, cpu_src1_32);
2888 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2891 l1 = gen_new_label();
2892 cond = GET_FIELD_SP(insn, 14, 17);
2893 cpu_src1 = get_src1(insn, cpu_src1);
2894 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2896 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
2897 gen_store_fpr_D(dc, rd, cpu_src1_64);
2900 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2903 CHECK_FPU_FEATURE(dc, FLOAT128);
2904 l1 = gen_new_label();
2905 cond = GET_FIELD_SP(insn, 14, 17);
2906 cpu_src1 = get_src1(insn, cpu_src1);
2907 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2909 gen_move_Q(rd, rs2);
2915 #ifdef TARGET_SPARC64
2916 #define FMOVSCC(fcc) \
2921 l1 = gen_new_label(); \
2922 r_cond = tcg_temp_new(); \
2923 cond = GET_FIELD_SP(insn, 14, 17); \
2924 gen_fcond(r_cond, fcc, cond); \
2925 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2927 cpu_src1_32 = gen_load_fpr_F(dc, rs2); \
2928 gen_store_fpr_F(dc, rd, cpu_src1_32); \
2929 gen_set_label(l1); \
2930 tcg_temp_free(r_cond); \
2932 #define FMOVDCC(fcc) \
2937 l1 = gen_new_label(); \
2938 r_cond = tcg_temp_new(); \
2939 cond = GET_FIELD_SP(insn, 14, 17); \
2940 gen_fcond(r_cond, fcc, cond); \
2941 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2943 cpu_src1_64 = gen_load_fpr_D(dc, rs2); \
2944 gen_store_fpr_D(dc, rd, cpu_src1_64); \
2945 gen_set_label(l1); \
2946 tcg_temp_free(r_cond); \
2948 #define FMOVQCC(fcc) \
2953 l1 = gen_new_label(); \
2954 r_cond = tcg_temp_new(); \
2955 cond = GET_FIELD_SP(insn, 14, 17); \
2956 gen_fcond(r_cond, fcc, cond); \
2957 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2959 gen_move_Q(rd, rs2); \
2960 gen_set_label(l1); \
2961 tcg_temp_free(r_cond); \
2963 case 0x001: /* V9 fmovscc %fcc0 */
2966 case 0x002: /* V9 fmovdcc %fcc0 */
2969 case 0x003: /* V9 fmovqcc %fcc0 */
2970 CHECK_FPU_FEATURE(dc, FLOAT128);
2973 case 0x041: /* V9 fmovscc %fcc1 */
2976 case 0x042: /* V9 fmovdcc %fcc1 */
2979 case 0x043: /* V9 fmovqcc %fcc1 */
2980 CHECK_FPU_FEATURE(dc, FLOAT128);
2983 case 0x081: /* V9 fmovscc %fcc2 */
2986 case 0x082: /* V9 fmovdcc %fcc2 */
2989 case 0x083: /* V9 fmovqcc %fcc2 */
2990 CHECK_FPU_FEATURE(dc, FLOAT128);
2993 case 0x0c1: /* V9 fmovscc %fcc3 */
2996 case 0x0c2: /* V9 fmovdcc %fcc3 */
2999 case 0x0c3: /* V9 fmovqcc %fcc3 */
3000 CHECK_FPU_FEATURE(dc, FLOAT128);
3006 #define FMOVSCC(icc) \
3011 l1 = gen_new_label(); \
3012 r_cond = tcg_temp_new(); \
3013 cond = GET_FIELD_SP(insn, 14, 17); \
3014 gen_cond(r_cond, icc, cond, dc); \
3015 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3017 cpu_src1_32 = gen_load_fpr_F(dc, rs2); \
3018 gen_store_fpr_F(dc, rd, cpu_src1_32); \
3019 gen_set_label(l1); \
3020 tcg_temp_free(r_cond); \
3022 #define FMOVDCC(icc) \
3027 l1 = gen_new_label(); \
3028 r_cond = tcg_temp_new(); \
3029 cond = GET_FIELD_SP(insn, 14, 17); \
3030 gen_cond(r_cond, icc, cond, dc); \
3031 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3033 cpu_src1_64 = gen_load_fpr_D(dc, rs2); \
3034 gen_store_fpr_D(dc, rd, cpu_src1_64); \
3035 gen_update_fprs_dirty(DFPREG(rd)); \
3036 gen_set_label(l1); \
3037 tcg_temp_free(r_cond); \
3039 #define FMOVQCC(icc) \
3044 l1 = gen_new_label(); \
3045 r_cond = tcg_temp_new(); \
3046 cond = GET_FIELD_SP(insn, 14, 17); \
3047 gen_cond(r_cond, icc, cond, dc); \
3048 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3050 gen_move_Q(rd, rs2); \
3051 gen_set_label(l1); \
3052 tcg_temp_free(r_cond); \
3055 case 0x101: /* V9 fmovscc %icc */
3058 case 0x102: /* V9 fmovdcc %icc */
3061 case 0x103: /* V9 fmovqcc %icc */
3062 CHECK_FPU_FEATURE(dc, FLOAT128);
3065 case 0x181: /* V9 fmovscc %xcc */
3068 case 0x182: /* V9 fmovdcc %xcc */
3071 case 0x183: /* V9 fmovqcc %xcc */
3072 CHECK_FPU_FEATURE(dc, FLOAT128);
3079 case 0x51: /* fcmps, V9 %fcc */
3080 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3081 cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3082 gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3084 case 0x52: /* fcmpd, V9 %fcc */
3085 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3086 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3087 gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3089 case 0x53: /* fcmpq, V9 %fcc */
3090 CHECK_FPU_FEATURE(dc, FLOAT128);
3091 gen_op_load_fpr_QT0(QFPREG(rs1));
3092 gen_op_load_fpr_QT1(QFPREG(rs2));
3093 gen_op_fcmpq(rd & 3);
3095 case 0x55: /* fcmpes, V9 %fcc */
3096 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3097 cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3098 gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3100 case 0x56: /* fcmped, V9 %fcc */
3101 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3102 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3103 gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3105 case 0x57: /* fcmpeq, V9 %fcc */
3106 CHECK_FPU_FEATURE(dc, FLOAT128);
3107 gen_op_load_fpr_QT0(QFPREG(rs1));
3108 gen_op_load_fpr_QT1(QFPREG(rs2));
3109 gen_op_fcmpeq(rd & 3);
3114 } else if (xop == 0x2) {
3117 rs1 = GET_FIELD(insn, 13, 17);
3119 // or %g0, x, y -> mov T0, x; mov y, T0
3120 if (IS_IMM) { /* immediate */
3123 simm = GET_FIELDs(insn, 19, 31);
3124 r_const = tcg_const_tl(simm);
3125 gen_movl_TN_reg(rd, r_const);
3126 tcg_temp_free(r_const);
3127 } else { /* register */
3128 rs2 = GET_FIELD(insn, 27, 31);
3129 gen_movl_reg_TN(rs2, cpu_dst);
3130 gen_movl_TN_reg(rd, cpu_dst);
3133 cpu_src1 = get_src1(insn, cpu_src1);
3134 if (IS_IMM) { /* immediate */
3135 simm = GET_FIELDs(insn, 19, 31);
3136 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3137 gen_movl_TN_reg(rd, cpu_dst);
3138 } else { /* register */
3139 // or x, %g0, y -> mov T1, x; mov y, T1
3140 rs2 = GET_FIELD(insn, 27, 31);
3142 gen_movl_reg_TN(rs2, cpu_src2);
3143 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3144 gen_movl_TN_reg(rd, cpu_dst);
3146 gen_movl_TN_reg(rd, cpu_src1);
3149 #ifdef TARGET_SPARC64
3150 } else if (xop == 0x25) { /* sll, V9 sllx */
3151 cpu_src1 = get_src1(insn, cpu_src1);
3152 if (IS_IMM) { /* immediate */
3153 simm = GET_FIELDs(insn, 20, 31);
3154 if (insn & (1 << 12)) {
3155 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3157 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3159 } else { /* register */
3160 rs2 = GET_FIELD(insn, 27, 31);
3161 gen_movl_reg_TN(rs2, cpu_src2);
3162 if (insn & (1 << 12)) {
3163 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3165 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3167 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3169 gen_movl_TN_reg(rd, cpu_dst);
3170 } else if (xop == 0x26) { /* srl, V9 srlx */
3171 cpu_src1 = get_src1(insn, cpu_src1);
3172 if (IS_IMM) { /* immediate */
3173 simm = GET_FIELDs(insn, 20, 31);
3174 if (insn & (1 << 12)) {
3175 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3177 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3178 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3180 } else { /* register */
3181 rs2 = GET_FIELD(insn, 27, 31);
3182 gen_movl_reg_TN(rs2, cpu_src2);
3183 if (insn & (1 << 12)) {
3184 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3185 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3187 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3188 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3189 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3192 gen_movl_TN_reg(rd, cpu_dst);
3193 } else if (xop == 0x27) { /* sra, V9 srax */
3194 cpu_src1 = get_src1(insn, cpu_src1);
3195 if (IS_IMM) { /* immediate */
3196 simm = GET_FIELDs(insn, 20, 31);
3197 if (insn & (1 << 12)) {
3198 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3200 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3201 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3202 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3204 } else { /* register */
3205 rs2 = GET_FIELD(insn, 27, 31);
3206 gen_movl_reg_TN(rs2, cpu_src2);
3207 if (insn & (1 << 12)) {
3208 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3209 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3211 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3212 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3213 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3214 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3217 gen_movl_TN_reg(rd, cpu_dst);
3219 } else if (xop < 0x36) {
3221 cpu_src1 = get_src1(insn, cpu_src1);
3222 cpu_src2 = get_src2(insn, cpu_src2);
3223 switch (xop & ~0x10) {
3226 simm = GET_FIELDs(insn, 19, 31);
3228 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3229 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3230 dc->cc_op = CC_OP_ADD;
3232 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3236 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3237 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3238 dc->cc_op = CC_OP_ADD;
3240 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3246 simm = GET_FIELDs(insn, 19, 31);
3247 tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3249 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3252 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3253 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3254 dc->cc_op = CC_OP_LOGIC;
3259 simm = GET_FIELDs(insn, 19, 31);
3260 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3262 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3265 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3266 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3267 dc->cc_op = CC_OP_LOGIC;
3272 simm = GET_FIELDs(insn, 19, 31);
3273 tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3275 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3278 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3279 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3280 dc->cc_op = CC_OP_LOGIC;
3285 simm = GET_FIELDs(insn, 19, 31);
3287 gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
3289 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3293 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3294 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3295 dc->cc_op = CC_OP_SUB;
3297 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3301 case 0x5: /* andn */
3303 simm = GET_FIELDs(insn, 19, 31);
3304 tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3306 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3309 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3310 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3311 dc->cc_op = CC_OP_LOGIC;
3316 simm = GET_FIELDs(insn, 19, 31);
3317 tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3319 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3322 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3323 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3324 dc->cc_op = CC_OP_LOGIC;
3327 case 0x7: /* xorn */
3329 simm = GET_FIELDs(insn, 19, 31);
3330 tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3332 tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3333 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3336 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3337 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3338 dc->cc_op = CC_OP_LOGIC;
3341 case 0x8: /* addx, V9 addc */
3342 gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3345 #ifdef TARGET_SPARC64
3346 case 0x9: /* V9 mulx */
3348 simm = GET_FIELDs(insn, 19, 31);
3349 tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3351 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3355 case 0xa: /* umul */
3356 CHECK_IU_FEATURE(dc, MUL);
3357 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3359 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3360 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3361 dc->cc_op = CC_OP_LOGIC;
3364 case 0xb: /* smul */
3365 CHECK_IU_FEATURE(dc, MUL);
3366 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3368 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3369 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3370 dc->cc_op = CC_OP_LOGIC;
3373 case 0xc: /* subx, V9 subc */
3374 gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3377 #ifdef TARGET_SPARC64
3378 case 0xd: /* V9 udivx */
3380 TCGv r_temp1, r_temp2;
3381 r_temp1 = tcg_temp_local_new();
3382 r_temp2 = tcg_temp_local_new();
3383 tcg_gen_mov_tl(r_temp1, cpu_src1);
3384 tcg_gen_mov_tl(r_temp2, cpu_src2);
3385 gen_trap_ifdivzero_tl(r_temp2);
3386 tcg_gen_divu_i64(cpu_dst, r_temp1, r_temp2);
3387 tcg_temp_free(r_temp1);
3388 tcg_temp_free(r_temp2);
3392 case 0xe: /* udiv */
3393 CHECK_IU_FEATURE(dc, DIV);
3395 gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
3397 dc->cc_op = CC_OP_DIV;
3399 gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
3403 case 0xf: /* sdiv */
3404 CHECK_IU_FEATURE(dc, DIV);
3406 gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
3408 dc->cc_op = CC_OP_DIV;
3410 gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
3417 gen_movl_TN_reg(rd, cpu_dst);
3419 cpu_src1 = get_src1(insn, cpu_src1);
3420 cpu_src2 = get_src2(insn, cpu_src2);
3422 case 0x20: /* taddcc */
3423 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3424 gen_movl_TN_reg(rd, cpu_dst);
3425 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3426 dc->cc_op = CC_OP_TADD;
3428 case 0x21: /* tsubcc */
3429 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3430 gen_movl_TN_reg(rd, cpu_dst);
3431 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3432 dc->cc_op = CC_OP_TSUB;
3434 case 0x22: /* taddcctv */
3435 save_state(dc, cpu_cond);
3436 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3437 gen_movl_TN_reg(rd, cpu_dst);
3438 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADDTV);
3439 dc->cc_op = CC_OP_TADDTV;
3441 case 0x23: /* tsubcctv */
3442 save_state(dc, cpu_cond);
3443 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3444 gen_movl_TN_reg(rd, cpu_dst);
3445 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUBTV);
3446 dc->cc_op = CC_OP_TSUBTV;
3448 case 0x24: /* mulscc */
3449 gen_helper_compute_psr(cpu_env);
3450 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3451 gen_movl_TN_reg(rd, cpu_dst);
3452 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3453 dc->cc_op = CC_OP_ADD;
3455 #ifndef TARGET_SPARC64
3456 case 0x25: /* sll */
3457 if (IS_IMM) { /* immediate */
3458 simm = GET_FIELDs(insn, 20, 31);
3459 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3460 } else { /* register */
3461 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3462 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3464 gen_movl_TN_reg(rd, cpu_dst);
3466 case 0x26: /* srl */
3467 if (IS_IMM) { /* immediate */
3468 simm = GET_FIELDs(insn, 20, 31);
3469 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3470 } else { /* register */
3471 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3472 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3474 gen_movl_TN_reg(rd, cpu_dst);
3476 case 0x27: /* sra */
3477 if (IS_IMM) { /* immediate */
3478 simm = GET_FIELDs(insn, 20, 31);
3479 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3480 } else { /* register */
3481 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3482 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3484 gen_movl_TN_reg(rd, cpu_dst);
3491 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3492 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3494 #ifndef TARGET_SPARC64
3495 case 0x01 ... 0x0f: /* undefined in the
3499 case 0x10 ... 0x1f: /* implementation-dependent
3505 case 0x2: /* V9 wrccr */
3506 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3507 gen_helper_wrccr(cpu_env, cpu_dst);
3508 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3509 dc->cc_op = CC_OP_FLAGS;
3511 case 0x3: /* V9 wrasi */
3512 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3513 tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3514 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3516 case 0x6: /* V9 wrfprs */
3517 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3518 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3519 save_state(dc, cpu_cond);
3524 case 0xf: /* V9 sir, nop if user */
3525 #if !defined(CONFIG_USER_ONLY)
3526 if (supervisor(dc)) {
3531 case 0x13: /* Graphics Status */
3532 if (gen_trap_ifnofpu(dc, cpu_cond))
3534 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3536 case 0x14: /* Softint set */
3537 if (!supervisor(dc))
3539 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3540 gen_helper_set_softint(cpu_env, cpu_tmp64);
3542 case 0x15: /* Softint clear */
3543 if (!supervisor(dc))
3545 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3546 gen_helper_clear_softint(cpu_env, cpu_tmp64);
3548 case 0x16: /* Softint write */
3549 if (!supervisor(dc))
3551 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3552 gen_helper_write_softint(cpu_env, cpu_tmp64);
3554 case 0x17: /* Tick compare */
3555 #if !defined(CONFIG_USER_ONLY)
3556 if (!supervisor(dc))
3562 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3564 r_tickptr = tcg_temp_new_ptr();
3565 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3566 offsetof(CPUState, tick));
3567 gen_helper_tick_set_limit(r_tickptr,
3569 tcg_temp_free_ptr(r_tickptr);
3572 case 0x18: /* System tick */
3573 #if !defined(CONFIG_USER_ONLY)
3574 if (!supervisor(dc))
3580 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3582 r_tickptr = tcg_temp_new_ptr();
3583 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3584 offsetof(CPUState, stick));
3585 gen_helper_tick_set_count(r_tickptr,
3587 tcg_temp_free_ptr(r_tickptr);
3590 case 0x19: /* System tick compare */
3591 #if !defined(CONFIG_USER_ONLY)
3592 if (!supervisor(dc))
3598 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3600 r_tickptr = tcg_temp_new_ptr();
3601 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3602 offsetof(CPUState, stick));
3603 gen_helper_tick_set_limit(r_tickptr,
3605 tcg_temp_free_ptr(r_tickptr);
3609 case 0x10: /* Performance Control */
3610 case 0x11: /* Performance Instrumentation
3612 case 0x12: /* Dispatch Control */
3619 #if !defined(CONFIG_USER_ONLY)
3620 case 0x31: /* wrpsr, V9 saved, restored */
3622 if (!supervisor(dc))
3624 #ifdef TARGET_SPARC64
3627 gen_helper_saved(cpu_env);
3630 gen_helper_restored(cpu_env);
3632 case 2: /* UA2005 allclean */
3633 case 3: /* UA2005 otherw */
3634 case 4: /* UA2005 normalw */
3635 case 5: /* UA2005 invalw */
3641 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3642 gen_helper_wrpsr(cpu_env, cpu_dst);
3643 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3644 dc->cc_op = CC_OP_FLAGS;
3645 save_state(dc, cpu_cond);
3652 case 0x32: /* wrwim, V9 wrpr */
3654 if (!supervisor(dc))
3656 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3657 #ifdef TARGET_SPARC64
3663 r_tsptr = tcg_temp_new_ptr();
3664 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3665 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3666 offsetof(trap_state, tpc));
3667 tcg_temp_free_ptr(r_tsptr);
3674 r_tsptr = tcg_temp_new_ptr();
3675 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3676 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3677 offsetof(trap_state, tnpc));
3678 tcg_temp_free_ptr(r_tsptr);
3685 r_tsptr = tcg_temp_new_ptr();
3686 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3687 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3688 offsetof(trap_state,
3690 tcg_temp_free_ptr(r_tsptr);
3697 r_tsptr = tcg_temp_new_ptr();
3698 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3699 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3700 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3701 offsetof(trap_state, tt));
3702 tcg_temp_free_ptr(r_tsptr);
3709 r_tickptr = tcg_temp_new_ptr();
3710 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3711 offsetof(CPUState, tick));
3712 gen_helper_tick_set_count(r_tickptr,
3714 tcg_temp_free_ptr(r_tickptr);
3718 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3722 TCGv r_tmp = tcg_temp_local_new();
3724 tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3725 save_state(dc, cpu_cond);
3726 gen_helper_wrpstate(cpu_env, r_tmp);
3727 tcg_temp_free(r_tmp);
3728 dc->npc = DYNAMIC_PC;
3733 TCGv r_tmp = tcg_temp_local_new();
3735 tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3736 save_state(dc, cpu_cond);
3737 tcg_gen_trunc_tl_i32(cpu_tmp32, r_tmp);
3738 tcg_temp_free(r_tmp);
3739 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3740 offsetof(CPUSPARCState, tl));
3741 dc->npc = DYNAMIC_PC;
3745 gen_helper_wrpil(cpu_env, cpu_tmp0);
3748 gen_helper_wrcwp(cpu_env, cpu_tmp0);
3751 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3752 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3753 offsetof(CPUSPARCState,
3756 case 11: // canrestore
3757 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3758 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3759 offsetof(CPUSPARCState,
3762 case 12: // cleanwin
3763 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3764 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3765 offsetof(CPUSPARCState,
3768 case 13: // otherwin
3769 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3770 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3771 offsetof(CPUSPARCState,
3775 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3776 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3777 offsetof(CPUSPARCState,
3780 case 16: // UA2005 gl
3781 CHECK_IU_FEATURE(dc, GL);
3782 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3783 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3784 offsetof(CPUSPARCState, gl));
3786 case 26: // UA2005 strand status
3787 CHECK_IU_FEATURE(dc, HYPV);
3788 if (!hypervisor(dc))
3790 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3796 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3797 if (dc->def->nwindows != 32)
3798 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3799 (1 << dc->def->nwindows) - 1);
3800 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3804 case 0x33: /* wrtbr, UA2005 wrhpr */
3806 #ifndef TARGET_SPARC64
3807 if (!supervisor(dc))
3809 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3811 CHECK_IU_FEATURE(dc, HYPV);
3812 if (!hypervisor(dc))
3814 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3817 // XXX gen_op_wrhpstate();
3818 save_state(dc, cpu_cond);
3824 // XXX gen_op_wrhtstate();
3827 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3830 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3832 case 31: // hstick_cmpr
3836 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3837 r_tickptr = tcg_temp_new_ptr();
3838 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3839 offsetof(CPUState, hstick));
3840 gen_helper_tick_set_limit(r_tickptr,
3842 tcg_temp_free_ptr(r_tickptr);
3845 case 6: // hver readonly
3853 #ifdef TARGET_SPARC64
3854 case 0x2c: /* V9 movcc */
3856 int cc = GET_FIELD_SP(insn, 11, 12);
3857 int cond = GET_FIELD_SP(insn, 14, 17);
3861 r_cond = tcg_temp_new();
3862 if (insn & (1 << 18)) {
3864 gen_cond(r_cond, 0, cond, dc);
3866 gen_cond(r_cond, 1, cond, dc);
3870 gen_fcond(r_cond, cc, cond);
3873 l1 = gen_new_label();
3875 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3876 if (IS_IMM) { /* immediate */
3879 simm = GET_FIELD_SPs(insn, 0, 10);
3880 r_const = tcg_const_tl(simm);
3881 gen_movl_TN_reg(rd, r_const);
3882 tcg_temp_free(r_const);
3884 rs2 = GET_FIELD_SP(insn, 0, 4);
3885 gen_movl_reg_TN(rs2, cpu_tmp0);
3886 gen_movl_TN_reg(rd, cpu_tmp0);
3889 tcg_temp_free(r_cond);
3892 case 0x2d: /* V9 sdivx */
3893 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3894 gen_movl_TN_reg(rd, cpu_dst);
3896 case 0x2e: /* V9 popc */
3898 cpu_src2 = get_src2(insn, cpu_src2);
3899 gen_helper_popc(cpu_dst, cpu_src2);
3900 gen_movl_TN_reg(rd, cpu_dst);
3902 case 0x2f: /* V9 movr */
3904 int cond = GET_FIELD_SP(insn, 10, 12);
3907 cpu_src1 = get_src1(insn, cpu_src1);
3909 l1 = gen_new_label();
3911 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3913 if (IS_IMM) { /* immediate */
3916 simm = GET_FIELD_SPs(insn, 0, 9);
3917 r_const = tcg_const_tl(simm);
3918 gen_movl_TN_reg(rd, r_const);
3919 tcg_temp_free(r_const);
3921 rs2 = GET_FIELD_SP(insn, 0, 4);
3922 gen_movl_reg_TN(rs2, cpu_tmp0);
3923 gen_movl_TN_reg(rd, cpu_tmp0);
3933 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3934 #ifdef TARGET_SPARC64
3935 int opf = GET_FIELD_SP(insn, 5, 13);
3936 rs1 = GET_FIELD(insn, 13, 17);
3937 rs2 = GET_FIELD(insn, 27, 31);
3938 if (gen_trap_ifnofpu(dc, cpu_cond))
3942 case 0x000: /* VIS I edge8cc */
3943 case 0x001: /* VIS II edge8n */
3944 case 0x002: /* VIS I edge8lcc */
3945 case 0x003: /* VIS II edge8ln */
3946 case 0x004: /* VIS I edge16cc */
3947 case 0x005: /* VIS II edge16n */
3948 case 0x006: /* VIS I edge16lcc */
3949 case 0x007: /* VIS II edge16ln */
3950 case 0x008: /* VIS I edge32cc */
3951 case 0x009: /* VIS II edge32n */
3952 case 0x00a: /* VIS I edge32lcc */
3953 case 0x00b: /* VIS II edge32ln */
3956 case 0x010: /* VIS I array8 */
3957 CHECK_FPU_FEATURE(dc, VIS1);
3958 cpu_src1 = get_src1(insn, cpu_src1);
3959 gen_movl_reg_TN(rs2, cpu_src2);
3960 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3961 gen_movl_TN_reg(rd, cpu_dst);
3963 case 0x012: /* VIS I array16 */
3964 CHECK_FPU_FEATURE(dc, VIS1);
3965 cpu_src1 = get_src1(insn, cpu_src1);
3966 gen_movl_reg_TN(rs2, cpu_src2);
3967 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3968 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3969 gen_movl_TN_reg(rd, cpu_dst);
3971 case 0x014: /* VIS I array32 */
3972 CHECK_FPU_FEATURE(dc, VIS1);
3973 cpu_src1 = get_src1(insn, cpu_src1);
3974 gen_movl_reg_TN(rs2, cpu_src2);
3975 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3976 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3977 gen_movl_TN_reg(rd, cpu_dst);
3979 case 0x018: /* VIS I alignaddr */
3980 CHECK_FPU_FEATURE(dc, VIS1);
3981 cpu_src1 = get_src1(insn, cpu_src1);
3982 gen_movl_reg_TN(rs2, cpu_src2);
3983 gen_helper_alignaddr(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3984 gen_movl_TN_reg(rd, cpu_dst);
3986 case 0x019: /* VIS II bmask */
3987 case 0x01a: /* VIS I alignaddrl */
3990 case 0x020: /* VIS I fcmple16 */
3991 CHECK_FPU_FEATURE(dc, VIS1);
3992 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3993 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3994 gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
3995 gen_movl_TN_reg(rd, cpu_dst);
3997 case 0x022: /* VIS I fcmpne16 */
3998 CHECK_FPU_FEATURE(dc, VIS1);
3999 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4000 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4001 gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4002 gen_movl_TN_reg(rd, cpu_dst);
4004 case 0x024: /* VIS I fcmple32 */
4005 CHECK_FPU_FEATURE(dc, VIS1);
4006 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4007 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4008 gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4009 gen_movl_TN_reg(rd, cpu_dst);
4011 case 0x026: /* VIS I fcmpne32 */
4012 CHECK_FPU_FEATURE(dc, VIS1);
4013 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4014 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4015 gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4016 gen_movl_TN_reg(rd, cpu_dst);
4018 case 0x028: /* VIS I fcmpgt16 */
4019 CHECK_FPU_FEATURE(dc, VIS1);
4020 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4021 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4022 gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4023 gen_movl_TN_reg(rd, cpu_dst);
4025 case 0x02a: /* VIS I fcmpeq16 */
4026 CHECK_FPU_FEATURE(dc, VIS1);
4027 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4028 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4029 gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4030 gen_movl_TN_reg(rd, cpu_dst);
4032 case 0x02c: /* VIS I fcmpgt32 */
4033 CHECK_FPU_FEATURE(dc, VIS1);
4034 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4035 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4036 gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4037 gen_movl_TN_reg(rd, cpu_dst);
4039 case 0x02e: /* VIS I fcmpeq32 */
4040 CHECK_FPU_FEATURE(dc, VIS1);
4041 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4042 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4043 gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4044 gen_movl_TN_reg(rd, cpu_dst);
4046 case 0x031: /* VIS I fmul8x16 */
4047 CHECK_FPU_FEATURE(dc, VIS1);
4048 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4050 case 0x033: /* VIS I fmul8x16au */
4051 CHECK_FPU_FEATURE(dc, VIS1);
4052 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4054 case 0x035: /* VIS I fmul8x16al */
4055 CHECK_FPU_FEATURE(dc, VIS1);
4056 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4058 case 0x036: /* VIS I fmul8sux16 */
4059 CHECK_FPU_FEATURE(dc, VIS1);
4060 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4062 case 0x037: /* VIS I fmul8ulx16 */
4063 CHECK_FPU_FEATURE(dc, VIS1);
4064 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4066 case 0x038: /* VIS I fmuld8sux16 */
4067 CHECK_FPU_FEATURE(dc, VIS1);
4068 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4070 case 0x039: /* VIS I fmuld8ulx16 */
4071 CHECK_FPU_FEATURE(dc, VIS1);
4072 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4074 case 0x03a: /* VIS I fpack32 */
4075 case 0x03b: /* VIS I fpack16 */
4076 case 0x03d: /* VIS I fpackfix */
4078 case 0x03e: /* VIS I pdist */
4079 CHECK_FPU_FEATURE(dc, VIS1);
4080 gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4082 case 0x048: /* VIS I faligndata */
4083 CHECK_FPU_FEATURE(dc, VIS1);
4084 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4085 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4086 cpu_dst_64 = gen_dest_fpr_D();
4087 gen_helper_faligndata(cpu_dst_64, cpu_env,
4088 cpu_src1_64, cpu_src2_64);
4089 gen_store_fpr_D(dc, rd, cpu_dst_64);
4091 case 0x04b: /* VIS I fpmerge */
4092 CHECK_FPU_FEATURE(dc, VIS1);
4093 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4095 case 0x04c: /* VIS II bshuffle */
4098 case 0x04d: /* VIS I fexpand */
4099 CHECK_FPU_FEATURE(dc, VIS1);
4100 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4102 case 0x050: /* VIS I fpadd16 */
4103 CHECK_FPU_FEATURE(dc, VIS1);
4104 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4106 case 0x051: /* VIS I fpadd16s */
4107 CHECK_FPU_FEATURE(dc, VIS1);
4108 gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4110 case 0x052: /* VIS I fpadd32 */
4111 CHECK_FPU_FEATURE(dc, VIS1);
4112 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4114 case 0x053: /* VIS I fpadd32s */
4115 CHECK_FPU_FEATURE(dc, VIS1);
4116 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4118 case 0x054: /* VIS I fpsub16 */
4119 CHECK_FPU_FEATURE(dc, VIS1);
4120 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4122 case 0x055: /* VIS I fpsub16s */
4123 CHECK_FPU_FEATURE(dc, VIS1);
4124 gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4126 case 0x056: /* VIS I fpsub32 */
4127 CHECK_FPU_FEATURE(dc, VIS1);
4128 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4130 case 0x057: /* VIS I fpsub32s */
4131 CHECK_FPU_FEATURE(dc, VIS1);
4132 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4134 case 0x060: /* VIS I fzero */
4135 CHECK_FPU_FEATURE(dc, VIS1);
4136 cpu_dst_64 = gen_dest_fpr_D();
4137 tcg_gen_movi_i64(cpu_dst_64, 0);
4138 gen_store_fpr_D(dc, rd, cpu_dst_64);
4140 case 0x061: /* VIS I fzeros */
4141 CHECK_FPU_FEATURE(dc, VIS1);
4142 cpu_dst_32 = gen_dest_fpr_F();
4143 tcg_gen_movi_i32(cpu_dst_32, 0);
4144 gen_store_fpr_F(dc, rd, cpu_dst_32);
4146 case 0x062: /* VIS I fnor */
4147 CHECK_FPU_FEATURE(dc, VIS1);
4148 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4150 case 0x063: /* VIS I fnors */
4151 CHECK_FPU_FEATURE(dc, VIS1);
4152 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4154 case 0x064: /* VIS I fandnot2 */
4155 CHECK_FPU_FEATURE(dc, VIS1);
4156 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4158 case 0x065: /* VIS I fandnot2s */
4159 CHECK_FPU_FEATURE(dc, VIS1);
4160 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4162 case 0x066: /* VIS I fnot2 */
4163 CHECK_FPU_FEATURE(dc, VIS1);
4164 gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4166 case 0x067: /* VIS I fnot2s */
4167 CHECK_FPU_FEATURE(dc, VIS1);
4168 gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4170 case 0x068: /* VIS I fandnot1 */
4171 CHECK_FPU_FEATURE(dc, VIS1);
4172 gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4174 case 0x069: /* VIS I fandnot1s */
4175 CHECK_FPU_FEATURE(dc, VIS1);
4176 gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4178 case 0x06a: /* VIS I fnot1 */
4179 CHECK_FPU_FEATURE(dc, VIS1);
4180 gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4182 case 0x06b: /* VIS I fnot1s */
4183 CHECK_FPU_FEATURE(dc, VIS1);
4184 gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4186 case 0x06c: /* VIS I fxor */
4187 CHECK_FPU_FEATURE(dc, VIS1);
4188 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4190 case 0x06d: /* VIS I fxors */
4191 CHECK_FPU_FEATURE(dc, VIS1);
4192 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4194 case 0x06e: /* VIS I fnand */
4195 CHECK_FPU_FEATURE(dc, VIS1);
4196 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4198 case 0x06f: /* VIS I fnands */
4199 CHECK_FPU_FEATURE(dc, VIS1);
4200 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4202 case 0x070: /* VIS I fand */
4203 CHECK_FPU_FEATURE(dc, VIS1);
4204 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4206 case 0x071: /* VIS I fands */
4207 CHECK_FPU_FEATURE(dc, VIS1);
4208 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4210 case 0x072: /* VIS I fxnor */
4211 CHECK_FPU_FEATURE(dc, VIS1);
4212 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4214 case 0x073: /* VIS I fxnors */
4215 CHECK_FPU_FEATURE(dc, VIS1);
4216 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4218 case 0x074: /* VIS I fsrc1 */
4219 CHECK_FPU_FEATURE(dc, VIS1);
4220 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4221 gen_store_fpr_D(dc, rd, cpu_src1_64);
4223 case 0x075: /* VIS I fsrc1s */
4224 CHECK_FPU_FEATURE(dc, VIS1);
4225 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4226 gen_store_fpr_F(dc, rd, cpu_src1_32);
4228 case 0x076: /* VIS I fornot2 */
4229 CHECK_FPU_FEATURE(dc, VIS1);
4230 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4232 case 0x077: /* VIS I fornot2s */
4233 CHECK_FPU_FEATURE(dc, VIS1);
4234 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4236 case 0x078: /* VIS I fsrc2 */
4237 CHECK_FPU_FEATURE(dc, VIS1);
4238 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4239 gen_store_fpr_D(dc, rd, cpu_src1_64);
4241 case 0x079: /* VIS I fsrc2s */
4242 CHECK_FPU_FEATURE(dc, VIS1);
4243 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4244 gen_store_fpr_F(dc, rd, cpu_src1_32);
4246 case 0x07a: /* VIS I fornot1 */
4247 CHECK_FPU_FEATURE(dc, VIS1);
4248 gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4250 case 0x07b: /* VIS I fornot1s */
4251 CHECK_FPU_FEATURE(dc, VIS1);
4252 gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
4254 case 0x07c: /* VIS I for */
4255 CHECK_FPU_FEATURE(dc, VIS1);
4256 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
4258 case 0x07d: /* VIS I fors */
4259 CHECK_FPU_FEATURE(dc, VIS1);
4260 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
4262 case 0x07e: /* VIS I fone */
4263 CHECK_FPU_FEATURE(dc, VIS1);
4264 cpu_dst_64 = gen_dest_fpr_D();
4265 tcg_gen_movi_i64(cpu_dst_64, -1);
4266 gen_store_fpr_D(dc, rd, cpu_dst_64);
4268 case 0x07f: /* VIS I fones */
4269 CHECK_FPU_FEATURE(dc, VIS1);
4270 cpu_dst_32 = gen_dest_fpr_F();
4271 tcg_gen_movi_i32(cpu_dst_32, -1);
4272 gen_store_fpr_F(dc, rd, cpu_dst_32);
4274 case 0x080: /* VIS I shutdown */
4275 case 0x081: /* VIS II siam */
4284 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4285 #ifdef TARGET_SPARC64
4290 #ifdef TARGET_SPARC64
4291 } else if (xop == 0x39) { /* V9 return */
4294 save_state(dc, cpu_cond);
4295 cpu_src1 = get_src1(insn, cpu_src1);
4296 if (IS_IMM) { /* immediate */
4297 simm = GET_FIELDs(insn, 19, 31);
4298 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4299 } else { /* register */
4300 rs2 = GET_FIELD(insn, 27, 31);
4302 gen_movl_reg_TN(rs2, cpu_src2);
4303 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4305 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4307 gen_helper_restore(cpu_env);
4308 gen_mov_pc_npc(dc, cpu_cond);
4309 r_const = tcg_const_i32(3);
4310 gen_helper_check_align(cpu_dst, r_const);
4311 tcg_temp_free_i32(r_const);
4312 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4313 dc->npc = DYNAMIC_PC;
4317 cpu_src1 = get_src1(insn, cpu_src1);
4318 if (IS_IMM) { /* immediate */
4319 simm = GET_FIELDs(insn, 19, 31);
4320 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4321 } else { /* register */
4322 rs2 = GET_FIELD(insn, 27, 31);
4324 gen_movl_reg_TN(rs2, cpu_src2);
4325 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4327 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4330 case 0x38: /* jmpl */
4335 r_pc = tcg_const_tl(dc->pc);
4336 gen_movl_TN_reg(rd, r_pc);
4337 tcg_temp_free(r_pc);
4338 gen_mov_pc_npc(dc, cpu_cond);
4339 r_const = tcg_const_i32(3);
4340 gen_helper_check_align(cpu_dst, r_const);
4341 tcg_temp_free_i32(r_const);
4342 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4343 dc->npc = DYNAMIC_PC;
4346 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4347 case 0x39: /* rett, V9 return */
4351 if (!supervisor(dc))
4353 gen_mov_pc_npc(dc, cpu_cond);
4354 r_const = tcg_const_i32(3);
4355 gen_helper_check_align(cpu_dst, r_const);
4356 tcg_temp_free_i32(r_const);
4357 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4358 dc->npc = DYNAMIC_PC;
4359 gen_helper_rett(cpu_env);
4363 case 0x3b: /* flush */
4364 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4368 case 0x3c: /* save */
4369 save_state(dc, cpu_cond);
4370 gen_helper_save(cpu_env);
4371 gen_movl_TN_reg(rd, cpu_dst);
4373 case 0x3d: /* restore */
4374 save_state(dc, cpu_cond);
4375 gen_helper_restore(cpu_env);
4376 gen_movl_TN_reg(rd, cpu_dst);
4378 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4379 case 0x3e: /* V9 done/retry */
4383 if (!supervisor(dc))
4385 dc->npc = DYNAMIC_PC;
4386 dc->pc = DYNAMIC_PC;
4387 gen_helper_done(cpu_env);
4390 if (!supervisor(dc))
4392 dc->npc = DYNAMIC_PC;
4393 dc->pc = DYNAMIC_PC;
4394 gen_helper_retry(cpu_env);
4409 case 3: /* load/store instructions */
4411 unsigned int xop = GET_FIELD(insn, 7, 12);
4413 /* flush pending conditional evaluations before exposing
4415 if (dc->cc_op != CC_OP_FLAGS) {
4416 dc->cc_op = CC_OP_FLAGS;
4417 gen_helper_compute_psr(cpu_env);
4419 cpu_src1 = get_src1(insn, cpu_src1);
4420 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4421 rs2 = GET_FIELD(insn, 27, 31);
4422 gen_movl_reg_TN(rs2, cpu_src2);
4423 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4424 } else if (IS_IMM) { /* immediate */
4425 simm = GET_FIELDs(insn, 19, 31);
4426 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4427 } else { /* register */
4428 rs2 = GET_FIELD(insn, 27, 31);
4430 gen_movl_reg_TN(rs2, cpu_src2);
4431 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4433 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4435 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4436 (xop > 0x17 && xop <= 0x1d ) ||
4437 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4439 case 0x0: /* ld, V9 lduw, load unsigned word */
4440 gen_address_mask(dc, cpu_addr);
4441 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4443 case 0x1: /* ldub, load unsigned byte */
4444 gen_address_mask(dc, cpu_addr);
4445 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4447 case 0x2: /* lduh, load unsigned halfword */
4448 gen_address_mask(dc, cpu_addr);
4449 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4451 case 0x3: /* ldd, load double word */
4457 save_state(dc, cpu_cond);
4458 r_const = tcg_const_i32(7);
4459 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4460 tcg_temp_free_i32(r_const);
4461 gen_address_mask(dc, cpu_addr);
4462 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4463 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4464 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4465 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4466 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4467 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4468 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4471 case 0x9: /* ldsb, load signed byte */
4472 gen_address_mask(dc, cpu_addr);
4473 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4475 case 0xa: /* ldsh, load signed halfword */
4476 gen_address_mask(dc, cpu_addr);
4477 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4479 case 0xd: /* ldstub -- XXX: should be atomically */
4483 gen_address_mask(dc, cpu_addr);
4484 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4485 r_const = tcg_const_tl(0xff);
4486 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4487 tcg_temp_free(r_const);
4490 case 0x0f: /* swap, swap register with memory. Also
4492 CHECK_IU_FEATURE(dc, SWAP);
4493 gen_movl_reg_TN(rd, cpu_val);
4494 gen_address_mask(dc, cpu_addr);
4495 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4496 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4497 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4499 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4500 case 0x10: /* lda, V9 lduwa, load word alternate */
4501 #ifndef TARGET_SPARC64
4504 if (!supervisor(dc))
4507 save_state(dc, cpu_cond);
4508 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4510 case 0x11: /* lduba, load unsigned byte alternate */
4511 #ifndef TARGET_SPARC64
4514 if (!supervisor(dc))
4517 save_state(dc, cpu_cond);
4518 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4520 case 0x12: /* lduha, load unsigned halfword alternate */
4521 #ifndef TARGET_SPARC64
4524 if (!supervisor(dc))
4527 save_state(dc, cpu_cond);
4528 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4530 case 0x13: /* ldda, load double word alternate */
4531 #ifndef TARGET_SPARC64
4534 if (!supervisor(dc))
4539 save_state(dc, cpu_cond);
4540 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4542 case 0x19: /* ldsba, load signed byte alternate */
4543 #ifndef TARGET_SPARC64
4546 if (!supervisor(dc))
4549 save_state(dc, cpu_cond);
4550 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4552 case 0x1a: /* ldsha, load signed halfword alternate */
4553 #ifndef TARGET_SPARC64
4556 if (!supervisor(dc))
4559 save_state(dc, cpu_cond);
4560 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4562 case 0x1d: /* ldstuba -- XXX: should be atomically */
4563 #ifndef TARGET_SPARC64
4566 if (!supervisor(dc))
4569 save_state(dc, cpu_cond);
4570 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4572 case 0x1f: /* swapa, swap reg with alt. memory. Also
4574 CHECK_IU_FEATURE(dc, SWAP);
4575 #ifndef TARGET_SPARC64
4578 if (!supervisor(dc))
4581 save_state(dc, cpu_cond);
4582 gen_movl_reg_TN(rd, cpu_val);
4583 gen_swap_asi(cpu_val, cpu_addr, insn);
4586 #ifndef TARGET_SPARC64
4587 case 0x30: /* ldc */
4588 case 0x31: /* ldcsr */
4589 case 0x33: /* lddc */
4593 #ifdef TARGET_SPARC64
4594 case 0x08: /* V9 ldsw */
4595 gen_address_mask(dc, cpu_addr);
4596 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4598 case 0x0b: /* V9 ldx */
4599 gen_address_mask(dc, cpu_addr);
4600 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4602 case 0x18: /* V9 ldswa */
4603 save_state(dc, cpu_cond);
4604 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4606 case 0x1b: /* V9 ldxa */
4607 save_state(dc, cpu_cond);
4608 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4610 case 0x2d: /* V9 prefetch, no effect */
4612 case 0x30: /* V9 ldfa */
4613 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4616 save_state(dc, cpu_cond);
4617 gen_ldf_asi(cpu_addr, insn, 4, rd);
4618 gen_update_fprs_dirty(rd);
4620 case 0x33: /* V9 lddfa */
4621 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4624 save_state(dc, cpu_cond);
4625 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4626 gen_update_fprs_dirty(DFPREG(rd));
4628 case 0x3d: /* V9 prefetcha, no effect */
4630 case 0x32: /* V9 ldqfa */
4631 CHECK_FPU_FEATURE(dc, FLOAT128);
4632 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4635 save_state(dc, cpu_cond);
4636 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4637 gen_update_fprs_dirty(QFPREG(rd));
4643 gen_movl_TN_reg(rd, cpu_val);
4644 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4647 } else if (xop >= 0x20 && xop < 0x24) {
4648 if (gen_trap_ifnofpu(dc, cpu_cond))
4650 save_state(dc, cpu_cond);
4652 case 0x20: /* ldf, load fpreg */
4653 gen_address_mask(dc, cpu_addr);
4654 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4655 cpu_dst_32 = gen_dest_fpr_F();
4656 tcg_gen_trunc_tl_i32(cpu_dst_32, cpu_tmp0);
4657 gen_store_fpr_F(dc, rd, cpu_dst_32);
4659 case 0x21: /* ldfsr, V9 ldxfsr */
4660 #ifdef TARGET_SPARC64
4661 gen_address_mask(dc, cpu_addr);
4663 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4664 gen_helper_ldxfsr(cpu_env, cpu_tmp64);
4666 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4667 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
4668 gen_helper_ldfsr(cpu_env, cpu_tmp32);
4672 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4673 gen_helper_ldfsr(cpu_env, cpu_tmp32);
4677 case 0x22: /* ldqf, load quad fpreg */
4681 CHECK_FPU_FEATURE(dc, FLOAT128);
4682 r_const = tcg_const_i32(dc->mem_idx);
4683 gen_address_mask(dc, cpu_addr);
4684 gen_helper_ldqf(cpu_addr, r_const);
4685 tcg_temp_free_i32(r_const);
4686 gen_op_store_QT0_fpr(QFPREG(rd));
4687 gen_update_fprs_dirty(QFPREG(rd));
4690 case 0x23: /* lddf, load double fpreg */
4691 gen_address_mask(dc, cpu_addr);
4692 cpu_dst_64 = gen_dest_fpr_D();
4693 tcg_gen_qemu_ld64(cpu_dst_64, cpu_addr, dc->mem_idx);
4694 gen_store_fpr_D(dc, rd, cpu_dst_64);
4699 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4700 xop == 0xe || xop == 0x1e) {
4701 gen_movl_reg_TN(rd, cpu_val);
4703 case 0x4: /* st, store word */
4704 gen_address_mask(dc, cpu_addr);
4705 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4707 case 0x5: /* stb, store byte */
4708 gen_address_mask(dc, cpu_addr);
4709 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4711 case 0x6: /* sth, store halfword */
4712 gen_address_mask(dc, cpu_addr);
4713 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4715 case 0x7: /* std, store double word */
4721 save_state(dc, cpu_cond);
4722 gen_address_mask(dc, cpu_addr);
4723 r_const = tcg_const_i32(7);
4724 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4725 tcg_temp_free_i32(r_const);
4726 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4727 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4728 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4731 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4732 case 0x14: /* sta, V9 stwa, store word alternate */
4733 #ifndef TARGET_SPARC64
4736 if (!supervisor(dc))
4739 save_state(dc, cpu_cond);
4740 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4741 dc->npc = DYNAMIC_PC;
4743 case 0x15: /* stba, store byte alternate */
4744 #ifndef TARGET_SPARC64
4747 if (!supervisor(dc))
4750 save_state(dc, cpu_cond);
4751 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4752 dc->npc = DYNAMIC_PC;
4754 case 0x16: /* stha, store halfword alternate */
4755 #ifndef TARGET_SPARC64
4758 if (!supervisor(dc))
4761 save_state(dc, cpu_cond);
4762 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4763 dc->npc = DYNAMIC_PC;
4765 case 0x17: /* stda, store double word alternate */
4766 #ifndef TARGET_SPARC64
4769 if (!supervisor(dc))
4775 save_state(dc, cpu_cond);
4776 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4780 #ifdef TARGET_SPARC64
4781 case 0x0e: /* V9 stx */
4782 gen_address_mask(dc, cpu_addr);
4783 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4785 case 0x1e: /* V9 stxa */
4786 save_state(dc, cpu_cond);
4787 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4788 dc->npc = DYNAMIC_PC;
4794 } else if (xop > 0x23 && xop < 0x28) {
4795 if (gen_trap_ifnofpu(dc, cpu_cond))
4797 save_state(dc, cpu_cond);
4799 case 0x24: /* stf, store fpreg */
4800 gen_address_mask(dc, cpu_addr);
4801 cpu_src1_32 = gen_load_fpr_F(dc, rd);
4802 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_src1_32);
4803 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4805 case 0x25: /* stfsr, V9 stxfsr */
4806 #ifdef TARGET_SPARC64
4807 gen_address_mask(dc, cpu_addr);
4808 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4810 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4812 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4814 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4815 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4819 #ifdef TARGET_SPARC64
4820 /* V9 stqf, store quad fpreg */
4824 CHECK_FPU_FEATURE(dc, FLOAT128);
4825 gen_op_load_fpr_QT0(QFPREG(rd));
4826 r_const = tcg_const_i32(dc->mem_idx);
4827 gen_address_mask(dc, cpu_addr);
4828 gen_helper_stqf(cpu_addr, r_const);
4829 tcg_temp_free_i32(r_const);
4832 #else /* !TARGET_SPARC64 */
4833 /* stdfq, store floating point queue */
4834 #if defined(CONFIG_USER_ONLY)
4837 if (!supervisor(dc))
4839 if (gen_trap_ifnofpu(dc, cpu_cond))
4844 case 0x27: /* stdf, store double fpreg */
4845 gen_address_mask(dc, cpu_addr);
4846 cpu_src1_64 = gen_load_fpr_D(dc, rd);
4847 tcg_gen_qemu_st64(cpu_src1_64, cpu_addr, dc->mem_idx);
4852 } else if (xop > 0x33 && xop < 0x3f) {
4853 save_state(dc, cpu_cond);
4855 #ifdef TARGET_SPARC64
4856 case 0x34: /* V9 stfa */
4857 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4860 gen_stf_asi(cpu_addr, insn, 4, rd);
4862 case 0x36: /* V9 stqfa */
4866 CHECK_FPU_FEATURE(dc, FLOAT128);
4867 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4870 r_const = tcg_const_i32(7);
4871 gen_helper_check_align(cpu_addr, r_const);
4872 tcg_temp_free_i32(r_const);
4873 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4876 case 0x37: /* V9 stdfa */
4877 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4880 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4882 case 0x3c: /* V9 casa */
4883 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4884 gen_movl_TN_reg(rd, cpu_val);
4886 case 0x3e: /* V9 casxa */
4887 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4888 gen_movl_TN_reg(rd, cpu_val);
4891 case 0x34: /* stc */
4892 case 0x35: /* stcsr */
4893 case 0x36: /* stdcq */
4894 case 0x37: /* stdc */
4905 /* default case for non jump instructions */
4906 if (dc->npc == DYNAMIC_PC) {
4907 dc->pc = DYNAMIC_PC;
4909 } else if (dc->npc == JUMP_PC) {
4910 /* we can do a static jump */
4911 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4915 dc->npc = dc->npc + 4;
4923 save_state(dc, cpu_cond);
4924 r_const = tcg_const_i32(TT_ILL_INSN);
4925 gen_helper_raise_exception(cpu_env, r_const);
4926 tcg_temp_free_i32(r_const);
4934 save_state(dc, cpu_cond);
4935 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4936 gen_helper_raise_exception(cpu_env, r_const);
4937 tcg_temp_free_i32(r_const);
4941 #if !defined(CONFIG_USER_ONLY)
4946 save_state(dc, cpu_cond);
4947 r_const = tcg_const_i32(TT_PRIV_INSN);
4948 gen_helper_raise_exception(cpu_env, r_const);
4949 tcg_temp_free_i32(r_const);
4955 save_state(dc, cpu_cond);
4956 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4959 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4961 save_state(dc, cpu_cond);
4962 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4966 #ifndef TARGET_SPARC64
4971 save_state(dc, cpu_cond);
4972 r_const = tcg_const_i32(TT_NCP_INSN);
4973 gen_helper_raise_exception(cpu_env, r_const);
4974 tcg_temp_free(r_const);
4980 tcg_temp_free(cpu_tmp1);
4981 tcg_temp_free(cpu_tmp2);
4982 if (dc->n_t32 != 0) {
4984 for (i = dc->n_t32 - 1; i >= 0; --i) {
4985 tcg_temp_free_i32(dc->t32[i]);
4991 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4992 int spc, CPUSPARCState *env)
4994 target_ulong pc_start, last_pc;
4995 uint16_t *gen_opc_end;
4996 DisasContext dc1, *dc = &dc1;
5002 memset(dc, 0, sizeof(DisasContext));
5007 dc->npc = (target_ulong) tb->cs_base;
5008 dc->cc_op = CC_OP_DYNAMIC;
5009 dc->mem_idx = cpu_mmu_index(env);
5011 dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5012 dc->address_mask_32bit = tb_am_enabled(tb->flags);
5013 dc->singlestep = (env->singlestep_enabled || singlestep);
5014 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
5016 cpu_tmp0 = tcg_temp_new();
5017 cpu_tmp32 = tcg_temp_new_i32();
5018 cpu_tmp64 = tcg_temp_new_i64();
5020 cpu_dst = tcg_temp_local_new();
5023 cpu_val = tcg_temp_local_new();
5024 cpu_addr = tcg_temp_local_new();
5027 max_insns = tb->cflags & CF_COUNT_MASK;
5029 max_insns = CF_COUNT_MASK;
5032 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
5033 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
5034 if (bp->pc == dc->pc) {
5035 if (dc->pc != pc_start)
5036 save_state(dc, cpu_cond);
5037 gen_helper_debug(cpu_env);
5045 qemu_log("Search PC...\n");
5046 j = gen_opc_ptr - gen_opc_buf;
5050 gen_opc_instr_start[lj++] = 0;
5051 gen_opc_pc[lj] = dc->pc;
5052 gen_opc_npc[lj] = dc->npc;
5053 gen_opc_instr_start[lj] = 1;
5054 gen_opc_icount[lj] = num_insns;
5057 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
5060 disas_sparc_insn(dc);
5065 /* if the next PC is different, we abort now */
5066 if (dc->pc != (last_pc + 4))
5068 /* if we reach a page boundary, we stop generation so that the
5069 PC of a TT_TFAULT exception is always in the right page */
5070 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5072 /* if single step mode, we generate only one instruction and
5073 generate an exception */
5074 if (dc->singlestep) {
5077 } while ((gen_opc_ptr < gen_opc_end) &&
5078 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5079 num_insns < max_insns);
5082 tcg_temp_free(cpu_addr);
5083 tcg_temp_free(cpu_val);
5084 tcg_temp_free(cpu_dst);
5085 tcg_temp_free_i64(cpu_tmp64);
5086 tcg_temp_free_i32(cpu_tmp32);
5087 tcg_temp_free(cpu_tmp0);
5089 if (tb->cflags & CF_LAST_IO)
5092 if (dc->pc != DYNAMIC_PC &&
5093 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5094 /* static PC and NPC: we can use direct chaining */
5095 gen_goto_tb(dc, 0, dc->pc, dc->npc);
5097 if (dc->pc != DYNAMIC_PC)
5098 tcg_gen_movi_tl(cpu_pc, dc->pc);
5099 save_npc(dc, cpu_cond);
5103 gen_icount_end(tb, num_insns);
5104 *gen_opc_ptr = INDEX_op_end;
5106 j = gen_opc_ptr - gen_opc_buf;
5109 gen_opc_instr_start[lj++] = 0;
5113 gen_opc_jump_pc[0] = dc->jump_pc[0];
5114 gen_opc_jump_pc[1] = dc->jump_pc[1];
5116 tb->size = last_pc + 4 - pc_start;
5117 tb->icount = num_insns;
5120 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5121 qemu_log("--------------\n");
5122 qemu_log("IN: %s\n", lookup_symbol(pc_start));
5123 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
5129 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5131 gen_intermediate_code_internal(tb, 0, env);
5134 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5136 gen_intermediate_code_internal(tb, 1, env);
5139 void gen_intermediate_code_init(CPUSPARCState *env)
5143 static const char * const gregnames[8] = {
5144 NULL, // g0 not used
5153 static const char * const fregnames[32] = {
5154 "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5155 "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5156 "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5157 "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5160 /* init various static tables */
5164 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5165 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5166 offsetof(CPUState, regwptr),
5168 #ifdef TARGET_SPARC64
5169 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
5171 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
5173 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
5175 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
5177 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5178 offsetof(CPUState, tick_cmpr),
5180 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5181 offsetof(CPUState, stick_cmpr),
5183 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5184 offsetof(CPUState, hstick_cmpr),
5186 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
5188 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
5190 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
5192 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5193 offsetof(CPUState, ssr), "ssr");
5194 cpu_ver = tcg_global_mem_new(TCG_AREG0,
5195 offsetof(CPUState, version), "ver");
5196 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5197 offsetof(CPUState, softint),
5200 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
5203 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
5205 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
5207 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5208 offsetof(CPUState, cc_src2),
5210 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
5212 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
5214 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
5216 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
5218 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
5220 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
5222 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
5223 #ifndef CONFIG_USER_ONLY
5224 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
5227 for (i = 1; i < 8; i++) {
5228 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5229 offsetof(CPUState, gregs[i]),
5232 for (i = 0; i < TARGET_DPREGS; i++) {
5233 cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
5234 offsetof(CPUState, fpr[i]),
5238 /* register helpers */
5240 #define GEN_HELPER 2
5245 void restore_state_to_opc(CPUState *env, TranslationBlock *tb, int pc_pos)
5248 env->pc = gen_opc_pc[pc_pos];
5249 npc = gen_opc_npc[pc_pos];
5251 /* dynamic NPC: already stored */
5252 } else if (npc == 2) {
5253 /* jump PC: use 'cond' and the jump targets of the translation */
5255 env->npc = gen_opc_jump_pc[0];
5257 env->npc = gen_opc_jump_pc[1];
5263 /* flush pending conditional evaluations before exposing cpu state */
5264 if (CC_OP != CC_OP_FLAGS) {
5265 helper_compute_psr(env);