]> Git Repo - qemu.git/blob - target-sparc/translate.c
2646aaf1db266129c5016fdc5f8d6ef76b0f1476
[qemu.git] / target-sparc / translate.c
1 /*
2    SPARC translation
3
4    Copyright (C) 2003 Thomas M. Ogrisegg <[email protected]>
5    Copyright (C) 2003-2005 Fabrice Bellard
6
7    This library is free software; you can redistribute it and/or
8    modify it under the terms of the GNU Lesser General Public
9    License as published by the Free Software Foundation; either
10    version 2 of the License, or (at your option) any later version.
11
12    This library is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15    Lesser General Public License for more details.
16
17    You should have received a copy of the GNU Lesser General Public
18    License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20
21 #include <stdarg.h>
22 #include <stdlib.h>
23 #include <stdio.h>
24 #include <string.h>
25 #include <inttypes.h>
26
27 #include "cpu.h"
28 #include "disas.h"
29 #include "helper.h"
30 #include "tcg-op.h"
31
32 #define GEN_HELPER 1
33 #include "helper.h"
34
35 #define DEBUG_DISAS
36
37 #define DYNAMIC_PC  1 /* dynamic pc value */
38 #define JUMP_PC     2 /* dynamic pc value which takes only two values
39                          according to jump_pc[T2] */
40
41 /* global register indexes */
42 static TCGv_ptr cpu_env, cpu_regwptr;
43 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
44 static TCGv_i32 cpu_cc_op;
45 static TCGv_i32 cpu_psr;
46 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
47 static TCGv cpu_y;
48 #ifndef CONFIG_USER_ONLY
49 static TCGv cpu_tbr;
50 #endif
51 static TCGv cpu_cond, cpu_dst, cpu_addr, cpu_val;
52 #ifdef TARGET_SPARC64
53 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
54 static TCGv cpu_gsr;
55 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
56 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
57 static TCGv_i32 cpu_softint;
58 #else
59 static TCGv cpu_wim;
60 #endif
61 /* local register indexes (only used inside old micro ops) */
62 static TCGv cpu_tmp0;
63 static TCGv_i32 cpu_tmp32;
64 static TCGv_i64 cpu_tmp64;
65 /* Floating point registers */
66 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
67
68 static target_ulong gen_opc_npc[OPC_BUF_SIZE];
69 static target_ulong gen_opc_jump_pc[2];
70
71 #include "gen-icount.h"
72
73 typedef struct DisasContext {
74     target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
75     target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
76     target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
77     int is_br;
78     int mem_idx;
79     int fpu_enabled;
80     int address_mask_32bit;
81     int singlestep;
82     uint32_t cc_op;  /* current CC operation */
83     struct TranslationBlock *tb;
84     sparc_def_t *def;
85     TCGv_i32 t32[3];
86     int n_t32;
87 } DisasContext;
88
89 // This function uses non-native bit order
90 #define GET_FIELD(X, FROM, TO)                                  \
91     ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
92
93 // This function uses the order in the manuals, i.e. bit 0 is 2^0
94 #define GET_FIELD_SP(X, FROM, TO)               \
95     GET_FIELD(X, 31 - (TO), 31 - (FROM))
96
97 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
98 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
99
100 #ifdef TARGET_SPARC64
101 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
102 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
103 #else
104 #define DFPREG(r) (r & 0x1e)
105 #define QFPREG(r) (r & 0x1c)
106 #endif
107
108 #define UA2005_HTRAP_MASK 0xff
109 #define V8_TRAP_MASK 0x7f
110
111 static int sign_extend(int x, int len)
112 {
113     len = 32 - len;
114     return (x << len) >> len;
115 }
116
117 #define IS_IMM (insn & (1<<13))
118
119 static inline void gen_update_fprs_dirty(int rd)
120 {
121 #if defined(TARGET_SPARC64)
122     tcg_gen_ori_i32(cpu_fprs, cpu_fprs, (rd < 32) ? 1 : 2);
123 #endif
124 }
125
126 /* floating point registers moves */
127 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
128 {
129 #if TCG_TARGET_REG_BITS == 32
130     if (src & 1) {
131         return TCGV_LOW(cpu_fpr[src / 2]);
132     } else {
133         return TCGV_HIGH(cpu_fpr[src / 2]);
134     }
135 #else
136     if (src & 1) {
137         return MAKE_TCGV_I32(GET_TCGV_I64(cpu_fpr[src / 2]));
138     } else {
139         TCGv_i32 ret = tcg_temp_local_new_i32();
140         TCGv_i64 t = tcg_temp_new_i64();
141
142         tcg_gen_shri_i64(t, cpu_fpr[src / 2], 32);
143         tcg_gen_trunc_i64_i32(ret, t);
144         tcg_temp_free_i64(t);
145
146         dc->t32[dc->n_t32++] = ret;
147         assert(dc->n_t32 <= ARRAY_SIZE(dc->t32));
148
149         return ret;
150     }
151 #endif
152 }
153
154 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
155 {
156 #if TCG_TARGET_REG_BITS == 32
157     if (dst & 1) {
158         tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
159     } else {
160         tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
161     }
162 #else
163     TCGv_i64 t = MAKE_TCGV_I64(GET_TCGV_I32(v));
164     tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
165                         (dst & 1 ? 0 : 32), 32);
166 #endif
167     gen_update_fprs_dirty(dst);
168 }
169
170 static TCGv_i32 gen_dest_fpr_F(void)
171 {
172     return cpu_tmp32;
173 }
174
175 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
176 {
177     src = DFPREG(src);
178     return cpu_fpr[src / 2];
179 }
180
181 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
182 {
183     dst = DFPREG(dst);
184     tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
185     gen_update_fprs_dirty(dst);
186 }
187
188 static TCGv_i64 gen_dest_fpr_D(void)
189 {
190     return cpu_tmp64;
191 }
192
193 static void gen_op_load_fpr_QT0(unsigned int src)
194 {
195     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
196                    offsetof(CPU_QuadU, ll.upper));
197     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
198                    offsetof(CPU_QuadU, ll.lower));
199 }
200
201 static void gen_op_load_fpr_QT1(unsigned int src)
202 {
203     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
204                    offsetof(CPU_QuadU, ll.upper));
205     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
206                    offsetof(CPU_QuadU, ll.lower));
207 }
208
209 static void gen_op_store_QT0_fpr(unsigned int dst)
210 {
211     tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
212                    offsetof(CPU_QuadU, ll.upper));
213     tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
214                    offsetof(CPU_QuadU, ll.lower));
215 }
216
217 #ifdef TARGET_SPARC64
218 static void gen_move_Q(unsigned int rd, unsigned int rs)
219 {
220     rd = QFPREG(rd);
221     rs = QFPREG(rs);
222
223     tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
224     tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
225     gen_update_fprs_dirty(rd);
226 }
227 #endif
228
229 /* moves */
230 #ifdef CONFIG_USER_ONLY
231 #define supervisor(dc) 0
232 #ifdef TARGET_SPARC64
233 #define hypervisor(dc) 0
234 #endif
235 #else
236 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
237 #ifdef TARGET_SPARC64
238 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
239 #else
240 #endif
241 #endif
242
243 #ifdef TARGET_SPARC64
244 #ifndef TARGET_ABI32
245 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
246 #else
247 #define AM_CHECK(dc) (1)
248 #endif
249 #endif
250
251 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
252 {
253 #ifdef TARGET_SPARC64
254     if (AM_CHECK(dc))
255         tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
256 #endif
257 }
258
259 static inline void gen_movl_reg_TN(int reg, TCGv tn)
260 {
261     if (reg == 0)
262         tcg_gen_movi_tl(tn, 0);
263     else if (reg < 8)
264         tcg_gen_mov_tl(tn, cpu_gregs[reg]);
265     else {
266         tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
267     }
268 }
269
270 static inline void gen_movl_TN_reg(int reg, TCGv tn)
271 {
272     if (reg == 0)
273         return;
274     else if (reg < 8)
275         tcg_gen_mov_tl(cpu_gregs[reg], tn);
276     else {
277         tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
278     }
279 }
280
281 static inline void gen_goto_tb(DisasContext *s, int tb_num,
282                                target_ulong pc, target_ulong npc)
283 {
284     TranslationBlock *tb;
285
286     tb = s->tb;
287     if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
288         (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
289         !s->singlestep)  {
290         /* jump to same page: we can use a direct jump */
291         tcg_gen_goto_tb(tb_num);
292         tcg_gen_movi_tl(cpu_pc, pc);
293         tcg_gen_movi_tl(cpu_npc, npc);
294         tcg_gen_exit_tb((tcg_target_long)tb + tb_num);
295     } else {
296         /* jump to another page: currently not optimized */
297         tcg_gen_movi_tl(cpu_pc, pc);
298         tcg_gen_movi_tl(cpu_npc, npc);
299         tcg_gen_exit_tb(0);
300     }
301 }
302
303 // XXX suboptimal
304 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
305 {
306     tcg_gen_extu_i32_tl(reg, src);
307     tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
308     tcg_gen_andi_tl(reg, reg, 0x1);
309 }
310
311 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
312 {
313     tcg_gen_extu_i32_tl(reg, src);
314     tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
315     tcg_gen_andi_tl(reg, reg, 0x1);
316 }
317
318 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
319 {
320     tcg_gen_extu_i32_tl(reg, src);
321     tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
322     tcg_gen_andi_tl(reg, reg, 0x1);
323 }
324
325 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
326 {
327     tcg_gen_extu_i32_tl(reg, src);
328     tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
329     tcg_gen_andi_tl(reg, reg, 0x1);
330 }
331
332 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
333 {
334     TCGv r_temp;
335     TCGv_i32 r_const;
336     int l1;
337
338     l1 = gen_new_label();
339
340     r_temp = tcg_temp_new();
341     tcg_gen_xor_tl(r_temp, src1, src2);
342     tcg_gen_not_tl(r_temp, r_temp);
343     tcg_gen_xor_tl(cpu_tmp0, src1, dst);
344     tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
345     tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
346     tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
347     r_const = tcg_const_i32(TT_TOVF);
348     gen_helper_raise_exception(cpu_env, r_const);
349     tcg_temp_free_i32(r_const);
350     gen_set_label(l1);
351     tcg_temp_free(r_temp);
352 }
353
354 static inline void gen_tag_tv(TCGv src1, TCGv src2)
355 {
356     int l1;
357     TCGv_i32 r_const;
358
359     l1 = gen_new_label();
360     tcg_gen_or_tl(cpu_tmp0, src1, src2);
361     tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
362     tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
363     r_const = tcg_const_i32(TT_TOVF);
364     gen_helper_raise_exception(cpu_env, r_const);
365     tcg_temp_free_i32(r_const);
366     gen_set_label(l1);
367 }
368
369 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
370 {
371     tcg_gen_mov_tl(cpu_cc_src, src1);
372     tcg_gen_movi_tl(cpu_cc_src2, src2);
373     tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
374     tcg_gen_mov_tl(dst, cpu_cc_dst);
375 }
376
377 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
378 {
379     tcg_gen_mov_tl(cpu_cc_src, src1);
380     tcg_gen_mov_tl(cpu_cc_src2, src2);
381     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
382     tcg_gen_mov_tl(dst, cpu_cc_dst);
383 }
384
385 static TCGv_i32 gen_add32_carry32(void)
386 {
387     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
388
389     /* Carry is computed from a previous add: (dst < src)  */
390 #if TARGET_LONG_BITS == 64
391     cc_src1_32 = tcg_temp_new_i32();
392     cc_src2_32 = tcg_temp_new_i32();
393     tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_dst);
394     tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src);
395 #else
396     cc_src1_32 = cpu_cc_dst;
397     cc_src2_32 = cpu_cc_src;
398 #endif
399
400     carry_32 = tcg_temp_new_i32();
401     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
402
403 #if TARGET_LONG_BITS == 64
404     tcg_temp_free_i32(cc_src1_32);
405     tcg_temp_free_i32(cc_src2_32);
406 #endif
407
408     return carry_32;
409 }
410
411 static TCGv_i32 gen_sub32_carry32(void)
412 {
413     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
414
415     /* Carry is computed from a previous borrow: (src1 < src2)  */
416 #if TARGET_LONG_BITS == 64
417     cc_src1_32 = tcg_temp_new_i32();
418     cc_src2_32 = tcg_temp_new_i32();
419     tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_src);
420     tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src2);
421 #else
422     cc_src1_32 = cpu_cc_src;
423     cc_src2_32 = cpu_cc_src2;
424 #endif
425
426     carry_32 = tcg_temp_new_i32();
427     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
428
429 #if TARGET_LONG_BITS == 64
430     tcg_temp_free_i32(cc_src1_32);
431     tcg_temp_free_i32(cc_src2_32);
432 #endif
433
434     return carry_32;
435 }
436
437 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
438                             TCGv src2, int update_cc)
439 {
440     TCGv_i32 carry_32;
441     TCGv carry;
442
443     switch (dc->cc_op) {
444     case CC_OP_DIV:
445     case CC_OP_LOGIC:
446         /* Carry is known to be zero.  Fall back to plain ADD.  */
447         if (update_cc) {
448             gen_op_add_cc(dst, src1, src2);
449         } else {
450             tcg_gen_add_tl(dst, src1, src2);
451         }
452         return;
453
454     case CC_OP_ADD:
455     case CC_OP_TADD:
456     case CC_OP_TADDTV:
457 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
458         {
459             /* For 32-bit hosts, we can re-use the host's hardware carry
460                generation by using an ADD2 opcode.  We discard the low
461                part of the output.  Ideally we'd combine this operation
462                with the add that generated the carry in the first place.  */
463             TCGv dst_low = tcg_temp_new();
464             tcg_gen_op6_i32(INDEX_op_add2_i32, dst_low, dst,
465                             cpu_cc_src, src1, cpu_cc_src2, src2);
466             tcg_temp_free(dst_low);
467             goto add_done;
468         }
469 #endif
470         carry_32 = gen_add32_carry32();
471         break;
472
473     case CC_OP_SUB:
474     case CC_OP_TSUB:
475     case CC_OP_TSUBTV:
476         carry_32 = gen_sub32_carry32();
477         break;
478
479     default:
480         /* We need external help to produce the carry.  */
481         carry_32 = tcg_temp_new_i32();
482         gen_helper_compute_C_icc(carry_32, cpu_env);
483         break;
484     }
485
486 #if TARGET_LONG_BITS == 64
487     carry = tcg_temp_new();
488     tcg_gen_extu_i32_i64(carry, carry_32);
489 #else
490     carry = carry_32;
491 #endif
492
493     tcg_gen_add_tl(dst, src1, src2);
494     tcg_gen_add_tl(dst, dst, carry);
495
496     tcg_temp_free_i32(carry_32);
497 #if TARGET_LONG_BITS == 64
498     tcg_temp_free(carry);
499 #endif
500
501 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
502  add_done:
503 #endif
504     if (update_cc) {
505         tcg_gen_mov_tl(cpu_cc_src, src1);
506         tcg_gen_mov_tl(cpu_cc_src2, src2);
507         tcg_gen_mov_tl(cpu_cc_dst, dst);
508         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
509         dc->cc_op = CC_OP_ADDX;
510     }
511 }
512
513 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
514 {
515     tcg_gen_mov_tl(cpu_cc_src, src1);
516     tcg_gen_mov_tl(cpu_cc_src2, src2);
517     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
518     tcg_gen_mov_tl(dst, cpu_cc_dst);
519 }
520
521 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
522 {
523     tcg_gen_mov_tl(cpu_cc_src, src1);
524     tcg_gen_mov_tl(cpu_cc_src2, src2);
525     gen_tag_tv(cpu_cc_src, cpu_cc_src2);
526     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
527     gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
528     tcg_gen_mov_tl(dst, cpu_cc_dst);
529 }
530
531 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
532 {
533     TCGv r_temp;
534     TCGv_i32 r_const;
535     int l1;
536
537     l1 = gen_new_label();
538
539     r_temp = tcg_temp_new();
540     tcg_gen_xor_tl(r_temp, src1, src2);
541     tcg_gen_xor_tl(cpu_tmp0, src1, dst);
542     tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
543     tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
544     tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
545     r_const = tcg_const_i32(TT_TOVF);
546     gen_helper_raise_exception(cpu_env, r_const);
547     tcg_temp_free_i32(r_const);
548     gen_set_label(l1);
549     tcg_temp_free(r_temp);
550 }
551
552 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
553 {
554     tcg_gen_mov_tl(cpu_cc_src, src1);
555     tcg_gen_movi_tl(cpu_cc_src2, src2);
556     if (src2 == 0) {
557         tcg_gen_mov_tl(cpu_cc_dst, src1);
558         tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
559         dc->cc_op = CC_OP_LOGIC;
560     } else {
561         tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
562         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
563         dc->cc_op = CC_OP_SUB;
564     }
565     tcg_gen_mov_tl(dst, cpu_cc_dst);
566 }
567
568 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
569 {
570     tcg_gen_mov_tl(cpu_cc_src, src1);
571     tcg_gen_mov_tl(cpu_cc_src2, src2);
572     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
573     tcg_gen_mov_tl(dst, cpu_cc_dst);
574 }
575
576 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
577                             TCGv src2, int update_cc)
578 {
579     TCGv_i32 carry_32;
580     TCGv carry;
581
582     switch (dc->cc_op) {
583     case CC_OP_DIV:
584     case CC_OP_LOGIC:
585         /* Carry is known to be zero.  Fall back to plain SUB.  */
586         if (update_cc) {
587             gen_op_sub_cc(dst, src1, src2);
588         } else {
589             tcg_gen_sub_tl(dst, src1, src2);
590         }
591         return;
592
593     case CC_OP_ADD:
594     case CC_OP_TADD:
595     case CC_OP_TADDTV:
596         carry_32 = gen_add32_carry32();
597         break;
598
599     case CC_OP_SUB:
600     case CC_OP_TSUB:
601     case CC_OP_TSUBTV:
602 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
603         {
604             /* For 32-bit hosts, we can re-use the host's hardware carry
605                generation by using a SUB2 opcode.  We discard the low
606                part of the output.  Ideally we'd combine this operation
607                with the add that generated the carry in the first place.  */
608             TCGv dst_low = tcg_temp_new();
609             tcg_gen_op6_i32(INDEX_op_sub2_i32, dst_low, dst,
610                             cpu_cc_src, src1, cpu_cc_src2, src2);
611             tcg_temp_free(dst_low);
612             goto sub_done;
613         }
614 #endif
615         carry_32 = gen_sub32_carry32();
616         break;
617
618     default:
619         /* We need external help to produce the carry.  */
620         carry_32 = tcg_temp_new_i32();
621         gen_helper_compute_C_icc(carry_32, cpu_env);
622         break;
623     }
624
625 #if TARGET_LONG_BITS == 64
626     carry = tcg_temp_new();
627     tcg_gen_extu_i32_i64(carry, carry_32);
628 #else
629     carry = carry_32;
630 #endif
631
632     tcg_gen_sub_tl(dst, src1, src2);
633     tcg_gen_sub_tl(dst, dst, carry);
634
635     tcg_temp_free_i32(carry_32);
636 #if TARGET_LONG_BITS == 64
637     tcg_temp_free(carry);
638 #endif
639
640 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
641  sub_done:
642 #endif
643     if (update_cc) {
644         tcg_gen_mov_tl(cpu_cc_src, src1);
645         tcg_gen_mov_tl(cpu_cc_src2, src2);
646         tcg_gen_mov_tl(cpu_cc_dst, dst);
647         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
648         dc->cc_op = CC_OP_SUBX;
649     }
650 }
651
652 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
653 {
654     tcg_gen_mov_tl(cpu_cc_src, src1);
655     tcg_gen_mov_tl(cpu_cc_src2, src2);
656     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
657     tcg_gen_mov_tl(dst, cpu_cc_dst);
658 }
659
660 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
661 {
662     tcg_gen_mov_tl(cpu_cc_src, src1);
663     tcg_gen_mov_tl(cpu_cc_src2, src2);
664     gen_tag_tv(cpu_cc_src, cpu_cc_src2);
665     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
666     gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
667     tcg_gen_mov_tl(dst, cpu_cc_dst);
668 }
669
670 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
671 {
672     TCGv r_temp;
673     int l1;
674
675     l1 = gen_new_label();
676     r_temp = tcg_temp_new();
677
678     /* old op:
679     if (!(env->y & 1))
680         T1 = 0;
681     */
682     tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
683     tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
684     tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
685     tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
686     tcg_gen_movi_tl(cpu_cc_src2, 0);
687     gen_set_label(l1);
688
689     // b2 = T0 & 1;
690     // env->y = (b2 << 31) | (env->y >> 1);
691     tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
692     tcg_gen_shli_tl(r_temp, r_temp, 31);
693     tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
694     tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
695     tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
696     tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
697
698     // b1 = N ^ V;
699     gen_mov_reg_N(cpu_tmp0, cpu_psr);
700     gen_mov_reg_V(r_temp, cpu_psr);
701     tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
702     tcg_temp_free(r_temp);
703
704     // T0 = (b1 << 31) | (T0 >> 1);
705     // src1 = T0;
706     tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
707     tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
708     tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
709
710     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
711
712     tcg_gen_mov_tl(dst, cpu_cc_dst);
713 }
714
715 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
716 {
717     TCGv_i32 r_src1, r_src2;
718     TCGv_i64 r_temp, r_temp2;
719
720     r_src1 = tcg_temp_new_i32();
721     r_src2 = tcg_temp_new_i32();
722
723     tcg_gen_trunc_tl_i32(r_src1, src1);
724     tcg_gen_trunc_tl_i32(r_src2, src2);
725
726     r_temp = tcg_temp_new_i64();
727     r_temp2 = tcg_temp_new_i64();
728
729     if (sign_ext) {
730         tcg_gen_ext_i32_i64(r_temp, r_src2);
731         tcg_gen_ext_i32_i64(r_temp2, r_src1);
732     } else {
733         tcg_gen_extu_i32_i64(r_temp, r_src2);
734         tcg_gen_extu_i32_i64(r_temp2, r_src1);
735     }
736
737     tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
738
739     tcg_gen_shri_i64(r_temp, r_temp2, 32);
740     tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
741     tcg_temp_free_i64(r_temp);
742     tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
743
744     tcg_gen_trunc_i64_tl(dst, r_temp2);
745
746     tcg_temp_free_i64(r_temp2);
747
748     tcg_temp_free_i32(r_src1);
749     tcg_temp_free_i32(r_src2);
750 }
751
752 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
753 {
754     /* zero-extend truncated operands before multiplication */
755     gen_op_multiply(dst, src1, src2, 0);
756 }
757
758 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
759 {
760     /* sign-extend truncated operands before multiplication */
761     gen_op_multiply(dst, src1, src2, 1);
762 }
763
764 #ifdef TARGET_SPARC64
765 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
766 {
767     TCGv_i32 r_const;
768     int l1;
769
770     l1 = gen_new_label();
771     tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
772     r_const = tcg_const_i32(TT_DIV_ZERO);
773     gen_helper_raise_exception(cpu_env, r_const);
774     tcg_temp_free_i32(r_const);
775     gen_set_label(l1);
776 }
777
778 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
779 {
780     int l1, l2;
781     TCGv r_temp1, r_temp2;
782
783     l1 = gen_new_label();
784     l2 = gen_new_label();
785     r_temp1 = tcg_temp_local_new();
786     r_temp2 = tcg_temp_local_new();
787     tcg_gen_mov_tl(r_temp1, src1);
788     tcg_gen_mov_tl(r_temp2, src2);
789     gen_trap_ifdivzero_tl(r_temp2);
790     tcg_gen_brcondi_tl(TCG_COND_NE, r_temp1, INT64_MIN, l1);
791     tcg_gen_brcondi_tl(TCG_COND_NE, r_temp2, -1, l1);
792     tcg_gen_movi_i64(dst, INT64_MIN);
793     tcg_gen_br(l2);
794     gen_set_label(l1);
795     tcg_gen_div_i64(dst, r_temp1, r_temp2);
796     gen_set_label(l2);
797     tcg_temp_free(r_temp1);
798     tcg_temp_free(r_temp2);
799 }
800 #endif
801
802 // 1
803 static inline void gen_op_eval_ba(TCGv dst)
804 {
805     tcg_gen_movi_tl(dst, 1);
806 }
807
808 // Z
809 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
810 {
811     gen_mov_reg_Z(dst, src);
812 }
813
814 // Z | (N ^ V)
815 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
816 {
817     gen_mov_reg_N(cpu_tmp0, src);
818     gen_mov_reg_V(dst, src);
819     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
820     gen_mov_reg_Z(cpu_tmp0, src);
821     tcg_gen_or_tl(dst, dst, cpu_tmp0);
822 }
823
824 // N ^ V
825 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
826 {
827     gen_mov_reg_V(cpu_tmp0, src);
828     gen_mov_reg_N(dst, src);
829     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
830 }
831
832 // C | Z
833 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
834 {
835     gen_mov_reg_Z(cpu_tmp0, src);
836     gen_mov_reg_C(dst, src);
837     tcg_gen_or_tl(dst, dst, cpu_tmp0);
838 }
839
840 // C
841 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
842 {
843     gen_mov_reg_C(dst, src);
844 }
845
846 // V
847 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
848 {
849     gen_mov_reg_V(dst, src);
850 }
851
852 // 0
853 static inline void gen_op_eval_bn(TCGv dst)
854 {
855     tcg_gen_movi_tl(dst, 0);
856 }
857
858 // N
859 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
860 {
861     gen_mov_reg_N(dst, src);
862 }
863
864 // !Z
865 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
866 {
867     gen_mov_reg_Z(dst, src);
868     tcg_gen_xori_tl(dst, dst, 0x1);
869 }
870
871 // !(Z | (N ^ V))
872 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
873 {
874     gen_mov_reg_N(cpu_tmp0, src);
875     gen_mov_reg_V(dst, src);
876     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
877     gen_mov_reg_Z(cpu_tmp0, src);
878     tcg_gen_or_tl(dst, dst, cpu_tmp0);
879     tcg_gen_xori_tl(dst, dst, 0x1);
880 }
881
882 // !(N ^ V)
883 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
884 {
885     gen_mov_reg_V(cpu_tmp0, src);
886     gen_mov_reg_N(dst, src);
887     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
888     tcg_gen_xori_tl(dst, dst, 0x1);
889 }
890
891 // !(C | Z)
892 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
893 {
894     gen_mov_reg_Z(cpu_tmp0, src);
895     gen_mov_reg_C(dst, src);
896     tcg_gen_or_tl(dst, dst, cpu_tmp0);
897     tcg_gen_xori_tl(dst, dst, 0x1);
898 }
899
900 // !C
901 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
902 {
903     gen_mov_reg_C(dst, src);
904     tcg_gen_xori_tl(dst, dst, 0x1);
905 }
906
907 // !N
908 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
909 {
910     gen_mov_reg_N(dst, src);
911     tcg_gen_xori_tl(dst, dst, 0x1);
912 }
913
914 // !V
915 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
916 {
917     gen_mov_reg_V(dst, src);
918     tcg_gen_xori_tl(dst, dst, 0x1);
919 }
920
921 /*
922   FPSR bit field FCC1 | FCC0:
923    0 =
924    1 <
925    2 >
926    3 unordered
927 */
928 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
929                                     unsigned int fcc_offset)
930 {
931     tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
932     tcg_gen_andi_tl(reg, reg, 0x1);
933 }
934
935 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
936                                     unsigned int fcc_offset)
937 {
938     tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
939     tcg_gen_andi_tl(reg, reg, 0x1);
940 }
941
942 // !0: FCC0 | FCC1
943 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
944                                     unsigned int fcc_offset)
945 {
946     gen_mov_reg_FCC0(dst, src, fcc_offset);
947     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
948     tcg_gen_or_tl(dst, dst, cpu_tmp0);
949 }
950
951 // 1 or 2: FCC0 ^ FCC1
952 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
953                                     unsigned int fcc_offset)
954 {
955     gen_mov_reg_FCC0(dst, src, fcc_offset);
956     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
957     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
958 }
959
960 // 1 or 3: FCC0
961 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
962                                     unsigned int fcc_offset)
963 {
964     gen_mov_reg_FCC0(dst, src, fcc_offset);
965 }
966
967 // 1: FCC0 & !FCC1
968 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
969                                     unsigned int fcc_offset)
970 {
971     gen_mov_reg_FCC0(dst, src, fcc_offset);
972     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
973     tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
974     tcg_gen_and_tl(dst, dst, cpu_tmp0);
975 }
976
977 // 2 or 3: FCC1
978 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
979                                     unsigned int fcc_offset)
980 {
981     gen_mov_reg_FCC1(dst, src, fcc_offset);
982 }
983
984 // 2: !FCC0 & FCC1
985 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
986                                     unsigned int fcc_offset)
987 {
988     gen_mov_reg_FCC0(dst, src, fcc_offset);
989     tcg_gen_xori_tl(dst, dst, 0x1);
990     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
991     tcg_gen_and_tl(dst, dst, cpu_tmp0);
992 }
993
994 // 3: FCC0 & FCC1
995 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
996                                     unsigned int fcc_offset)
997 {
998     gen_mov_reg_FCC0(dst, src, fcc_offset);
999     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1000     tcg_gen_and_tl(dst, dst, cpu_tmp0);
1001 }
1002
1003 // 0: !(FCC0 | FCC1)
1004 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1005                                     unsigned int fcc_offset)
1006 {
1007     gen_mov_reg_FCC0(dst, src, fcc_offset);
1008     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1009     tcg_gen_or_tl(dst, dst, cpu_tmp0);
1010     tcg_gen_xori_tl(dst, dst, 0x1);
1011 }
1012
1013 // 0 or 3: !(FCC0 ^ FCC1)
1014 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1015                                     unsigned int fcc_offset)
1016 {
1017     gen_mov_reg_FCC0(dst, src, fcc_offset);
1018     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1019     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1020     tcg_gen_xori_tl(dst, dst, 0x1);
1021 }
1022
1023 // 0 or 2: !FCC0
1024 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1025                                     unsigned int fcc_offset)
1026 {
1027     gen_mov_reg_FCC0(dst, src, fcc_offset);
1028     tcg_gen_xori_tl(dst, dst, 0x1);
1029 }
1030
1031 // !1: !(FCC0 & !FCC1)
1032 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1033                                     unsigned int fcc_offset)
1034 {
1035     gen_mov_reg_FCC0(dst, src, fcc_offset);
1036     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1037     tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1038     tcg_gen_and_tl(dst, dst, cpu_tmp0);
1039     tcg_gen_xori_tl(dst, dst, 0x1);
1040 }
1041
1042 // 0 or 1: !FCC1
1043 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1044                                     unsigned int fcc_offset)
1045 {
1046     gen_mov_reg_FCC1(dst, src, fcc_offset);
1047     tcg_gen_xori_tl(dst, dst, 0x1);
1048 }
1049
1050 // !2: !(!FCC0 & FCC1)
1051 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1052                                     unsigned int fcc_offset)
1053 {
1054     gen_mov_reg_FCC0(dst, src, fcc_offset);
1055     tcg_gen_xori_tl(dst, dst, 0x1);
1056     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1057     tcg_gen_and_tl(dst, dst, cpu_tmp0);
1058     tcg_gen_xori_tl(dst, dst, 0x1);
1059 }
1060
1061 // !3: !(FCC0 & FCC1)
1062 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1063                                     unsigned int fcc_offset)
1064 {
1065     gen_mov_reg_FCC0(dst, src, fcc_offset);
1066     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1067     tcg_gen_and_tl(dst, dst, cpu_tmp0);
1068     tcg_gen_xori_tl(dst, dst, 0x1);
1069 }
1070
1071 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1072                                target_ulong pc2, TCGv r_cond)
1073 {
1074     int l1;
1075
1076     l1 = gen_new_label();
1077
1078     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1079
1080     gen_goto_tb(dc, 0, pc1, pc1 + 4);
1081
1082     gen_set_label(l1);
1083     gen_goto_tb(dc, 1, pc2, pc2 + 4);
1084 }
1085
1086 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1087                                 target_ulong pc2, TCGv r_cond)
1088 {
1089     int l1;
1090
1091     l1 = gen_new_label();
1092
1093     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1094
1095     gen_goto_tb(dc, 0, pc2, pc1);
1096
1097     gen_set_label(l1);
1098     gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1099 }
1100
1101 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1102                                       TCGv r_cond)
1103 {
1104     int l1, l2;
1105
1106     l1 = gen_new_label();
1107     l2 = gen_new_label();
1108
1109     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1110
1111     tcg_gen_movi_tl(cpu_npc, npc1);
1112     tcg_gen_br(l2);
1113
1114     gen_set_label(l1);
1115     tcg_gen_movi_tl(cpu_npc, npc2);
1116     gen_set_label(l2);
1117 }
1118
1119 /* call this function before using the condition register as it may
1120    have been set for a jump */
1121 static inline void flush_cond(DisasContext *dc, TCGv cond)
1122 {
1123     if (dc->npc == JUMP_PC) {
1124         gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1125         dc->npc = DYNAMIC_PC;
1126     }
1127 }
1128
1129 static inline void save_npc(DisasContext *dc, TCGv cond)
1130 {
1131     if (dc->npc == JUMP_PC) {
1132         gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1133         dc->npc = DYNAMIC_PC;
1134     } else if (dc->npc != DYNAMIC_PC) {
1135         tcg_gen_movi_tl(cpu_npc, dc->npc);
1136     }
1137 }
1138
1139 static inline void save_state(DisasContext *dc, TCGv cond)
1140 {
1141     tcg_gen_movi_tl(cpu_pc, dc->pc);
1142     /* flush pending conditional evaluations before exposing cpu state */
1143     if (dc->cc_op != CC_OP_FLAGS) {
1144         dc->cc_op = CC_OP_FLAGS;
1145         gen_helper_compute_psr(cpu_env);
1146     }
1147     save_npc(dc, cond);
1148 }
1149
1150 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1151 {
1152     if (dc->npc == JUMP_PC) {
1153         gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1154         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1155         dc->pc = DYNAMIC_PC;
1156     } else if (dc->npc == DYNAMIC_PC) {
1157         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1158         dc->pc = DYNAMIC_PC;
1159     } else {
1160         dc->pc = dc->npc;
1161     }
1162 }
1163
1164 static inline void gen_op_next_insn(void)
1165 {
1166     tcg_gen_mov_tl(cpu_pc, cpu_npc);
1167     tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1168 }
1169
1170 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1171                             DisasContext *dc)
1172 {
1173     TCGv_i32 r_src;
1174
1175 #ifdef TARGET_SPARC64
1176     if (cc)
1177         r_src = cpu_xcc;
1178     else
1179         r_src = cpu_psr;
1180 #else
1181     r_src = cpu_psr;
1182 #endif
1183     switch (dc->cc_op) {
1184     case CC_OP_FLAGS:
1185         break;
1186     default:
1187         gen_helper_compute_psr(cpu_env);
1188         dc->cc_op = CC_OP_FLAGS;
1189         break;
1190     }
1191     switch (cond) {
1192     case 0x0:
1193         gen_op_eval_bn(r_dst);
1194         break;
1195     case 0x1:
1196         gen_op_eval_be(r_dst, r_src);
1197         break;
1198     case 0x2:
1199         gen_op_eval_ble(r_dst, r_src);
1200         break;
1201     case 0x3:
1202         gen_op_eval_bl(r_dst, r_src);
1203         break;
1204     case 0x4:
1205         gen_op_eval_bleu(r_dst, r_src);
1206         break;
1207     case 0x5:
1208         gen_op_eval_bcs(r_dst, r_src);
1209         break;
1210     case 0x6:
1211         gen_op_eval_bneg(r_dst, r_src);
1212         break;
1213     case 0x7:
1214         gen_op_eval_bvs(r_dst, r_src);
1215         break;
1216     case 0x8:
1217         gen_op_eval_ba(r_dst);
1218         break;
1219     case 0x9:
1220         gen_op_eval_bne(r_dst, r_src);
1221         break;
1222     case 0xa:
1223         gen_op_eval_bg(r_dst, r_src);
1224         break;
1225     case 0xb:
1226         gen_op_eval_bge(r_dst, r_src);
1227         break;
1228     case 0xc:
1229         gen_op_eval_bgu(r_dst, r_src);
1230         break;
1231     case 0xd:
1232         gen_op_eval_bcc(r_dst, r_src);
1233         break;
1234     case 0xe:
1235         gen_op_eval_bpos(r_dst, r_src);
1236         break;
1237     case 0xf:
1238         gen_op_eval_bvc(r_dst, r_src);
1239         break;
1240     }
1241 }
1242
1243 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1244 {
1245     unsigned int offset;
1246
1247     switch (cc) {
1248     default:
1249     case 0x0:
1250         offset = 0;
1251         break;
1252     case 0x1:
1253         offset = 32 - 10;
1254         break;
1255     case 0x2:
1256         offset = 34 - 10;
1257         break;
1258     case 0x3:
1259         offset = 36 - 10;
1260         break;
1261     }
1262
1263     switch (cond) {
1264     case 0x0:
1265         gen_op_eval_bn(r_dst);
1266         break;
1267     case 0x1:
1268         gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1269         break;
1270     case 0x2:
1271         gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1272         break;
1273     case 0x3:
1274         gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1275         break;
1276     case 0x4:
1277         gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1278         break;
1279     case 0x5:
1280         gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1281         break;
1282     case 0x6:
1283         gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1284         break;
1285     case 0x7:
1286         gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1287         break;
1288     case 0x8:
1289         gen_op_eval_ba(r_dst);
1290         break;
1291     case 0x9:
1292         gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1293         break;
1294     case 0xa:
1295         gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1296         break;
1297     case 0xb:
1298         gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1299         break;
1300     case 0xc:
1301         gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1302         break;
1303     case 0xd:
1304         gen_op_eval_fble(r_dst, cpu_fsr, offset);
1305         break;
1306     case 0xe:
1307         gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1308         break;
1309     case 0xf:
1310         gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1311         break;
1312     }
1313 }
1314
1315 #ifdef TARGET_SPARC64
1316 // Inverted logic
1317 static const int gen_tcg_cond_reg[8] = {
1318     -1,
1319     TCG_COND_NE,
1320     TCG_COND_GT,
1321     TCG_COND_GE,
1322     -1,
1323     TCG_COND_EQ,
1324     TCG_COND_LE,
1325     TCG_COND_LT,
1326 };
1327
1328 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1329 {
1330     int l1;
1331
1332     l1 = gen_new_label();
1333     tcg_gen_movi_tl(r_dst, 0);
1334     tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1335     tcg_gen_movi_tl(r_dst, 1);
1336     gen_set_label(l1);
1337 }
1338 #endif
1339
1340 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1341                       TCGv r_cond)
1342 {
1343     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1344     target_ulong target = dc->pc + offset;
1345
1346     if (cond == 0x0) {
1347         /* unconditional not taken */
1348         if (a) {
1349             dc->pc = dc->npc + 4;
1350             dc->npc = dc->pc + 4;
1351         } else {
1352             dc->pc = dc->npc;
1353             dc->npc = dc->pc + 4;
1354         }
1355     } else if (cond == 0x8) {
1356         /* unconditional taken */
1357         if (a) {
1358             dc->pc = target;
1359             dc->npc = dc->pc + 4;
1360         } else {
1361             dc->pc = dc->npc;
1362             dc->npc = target;
1363             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1364         }
1365     } else {
1366         flush_cond(dc, r_cond);
1367         gen_cond(r_cond, cc, cond, dc);
1368         if (a) {
1369             gen_branch_a(dc, target, dc->npc, r_cond);
1370             dc->is_br = 1;
1371         } else {
1372             dc->pc = dc->npc;
1373             dc->jump_pc[0] = target;
1374             if (unlikely(dc->npc == DYNAMIC_PC)) {
1375                 dc->jump_pc[1] = DYNAMIC_PC;
1376                 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1377             } else {
1378                 dc->jump_pc[1] = dc->npc + 4;
1379                 dc->npc = JUMP_PC;
1380             }
1381         }
1382     }
1383 }
1384
1385 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1386                       TCGv r_cond)
1387 {
1388     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1389     target_ulong target = dc->pc + offset;
1390
1391     if (cond == 0x0) {
1392         /* unconditional not taken */
1393         if (a) {
1394             dc->pc = dc->npc + 4;
1395             dc->npc = dc->pc + 4;
1396         } else {
1397             dc->pc = dc->npc;
1398             dc->npc = dc->pc + 4;
1399         }
1400     } else if (cond == 0x8) {
1401         /* unconditional taken */
1402         if (a) {
1403             dc->pc = target;
1404             dc->npc = dc->pc + 4;
1405         } else {
1406             dc->pc = dc->npc;
1407             dc->npc = target;
1408             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1409         }
1410     } else {
1411         flush_cond(dc, r_cond);
1412         gen_fcond(r_cond, cc, cond);
1413         if (a) {
1414             gen_branch_a(dc, target, dc->npc, r_cond);
1415             dc->is_br = 1;
1416         } else {
1417             dc->pc = dc->npc;
1418             dc->jump_pc[0] = target;
1419             if (unlikely(dc->npc == DYNAMIC_PC)) {
1420                 dc->jump_pc[1] = DYNAMIC_PC;
1421                 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1422             } else {
1423                 dc->jump_pc[1] = dc->npc + 4;
1424                 dc->npc = JUMP_PC;
1425             }
1426         }
1427     }
1428 }
1429
1430 #ifdef TARGET_SPARC64
1431 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1432                           TCGv r_cond, TCGv r_reg)
1433 {
1434     unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1435     target_ulong target = dc->pc + offset;
1436
1437     flush_cond(dc, r_cond);
1438     gen_cond_reg(r_cond, cond, r_reg);
1439     if (a) {
1440         gen_branch_a(dc, target, dc->npc, r_cond);
1441         dc->is_br = 1;
1442     } else {
1443         dc->pc = dc->npc;
1444         dc->jump_pc[0] = target;
1445         if (unlikely(dc->npc == DYNAMIC_PC)) {
1446             dc->jump_pc[1] = DYNAMIC_PC;
1447             tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1448         } else {
1449             dc->jump_pc[1] = dc->npc + 4;
1450             dc->npc = JUMP_PC;
1451         }
1452     }
1453 }
1454
1455 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1456 {
1457     switch (fccno) {
1458     case 0:
1459         gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1460         break;
1461     case 1:
1462         gen_helper_fcmps_fcc1(cpu_env, r_rs1, r_rs2);
1463         break;
1464     case 2:
1465         gen_helper_fcmps_fcc2(cpu_env, r_rs1, r_rs2);
1466         break;
1467     case 3:
1468         gen_helper_fcmps_fcc3(cpu_env, r_rs1, r_rs2);
1469         break;
1470     }
1471 }
1472
1473 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1474 {
1475     switch (fccno) {
1476     case 0:
1477         gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1478         break;
1479     case 1:
1480         gen_helper_fcmpd_fcc1(cpu_env, r_rs1, r_rs2);
1481         break;
1482     case 2:
1483         gen_helper_fcmpd_fcc2(cpu_env, r_rs1, r_rs2);
1484         break;
1485     case 3:
1486         gen_helper_fcmpd_fcc3(cpu_env, r_rs1, r_rs2);
1487         break;
1488     }
1489 }
1490
1491 static inline void gen_op_fcmpq(int fccno)
1492 {
1493     switch (fccno) {
1494     case 0:
1495         gen_helper_fcmpq(cpu_env);
1496         break;
1497     case 1:
1498         gen_helper_fcmpq_fcc1(cpu_env);
1499         break;
1500     case 2:
1501         gen_helper_fcmpq_fcc2(cpu_env);
1502         break;
1503     case 3:
1504         gen_helper_fcmpq_fcc3(cpu_env);
1505         break;
1506     }
1507 }
1508
1509 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1510 {
1511     switch (fccno) {
1512     case 0:
1513         gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1514         break;
1515     case 1:
1516         gen_helper_fcmpes_fcc1(cpu_env, r_rs1, r_rs2);
1517         break;
1518     case 2:
1519         gen_helper_fcmpes_fcc2(cpu_env, r_rs1, r_rs2);
1520         break;
1521     case 3:
1522         gen_helper_fcmpes_fcc3(cpu_env, r_rs1, r_rs2);
1523         break;
1524     }
1525 }
1526
1527 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1528 {
1529     switch (fccno) {
1530     case 0:
1531         gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1532         break;
1533     case 1:
1534         gen_helper_fcmped_fcc1(cpu_env, r_rs1, r_rs2);
1535         break;
1536     case 2:
1537         gen_helper_fcmped_fcc2(cpu_env, r_rs1, r_rs2);
1538         break;
1539     case 3:
1540         gen_helper_fcmped_fcc3(cpu_env, r_rs1, r_rs2);
1541         break;
1542     }
1543 }
1544
1545 static inline void gen_op_fcmpeq(int fccno)
1546 {
1547     switch (fccno) {
1548     case 0:
1549         gen_helper_fcmpeq(cpu_env);
1550         break;
1551     case 1:
1552         gen_helper_fcmpeq_fcc1(cpu_env);
1553         break;
1554     case 2:
1555         gen_helper_fcmpeq_fcc2(cpu_env);
1556         break;
1557     case 3:
1558         gen_helper_fcmpeq_fcc3(cpu_env);
1559         break;
1560     }
1561 }
1562
1563 #else
1564
1565 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1566 {
1567     gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1568 }
1569
1570 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1571 {
1572     gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1573 }
1574
1575 static inline void gen_op_fcmpq(int fccno)
1576 {
1577     gen_helper_fcmpq(cpu_env);
1578 }
1579
1580 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1581 {
1582     gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1583 }
1584
1585 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1586 {
1587     gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1588 }
1589
1590 static inline void gen_op_fcmpeq(int fccno)
1591 {
1592     gen_helper_fcmpeq(cpu_env);
1593 }
1594 #endif
1595
1596 static inline void gen_op_fpexception_im(int fsr_flags)
1597 {
1598     TCGv_i32 r_const;
1599
1600     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1601     tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1602     r_const = tcg_const_i32(TT_FP_EXCP);
1603     gen_helper_raise_exception(cpu_env, r_const);
1604     tcg_temp_free_i32(r_const);
1605 }
1606
1607 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1608 {
1609 #if !defined(CONFIG_USER_ONLY)
1610     if (!dc->fpu_enabled) {
1611         TCGv_i32 r_const;
1612
1613         save_state(dc, r_cond);
1614         r_const = tcg_const_i32(TT_NFPU_INSN);
1615         gen_helper_raise_exception(cpu_env, r_const);
1616         tcg_temp_free_i32(r_const);
1617         dc->is_br = 1;
1618         return 1;
1619     }
1620 #endif
1621     return 0;
1622 }
1623
1624 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1625 {
1626     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1627 }
1628
1629 static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1630                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1631 {
1632     TCGv_i32 dst, src;
1633
1634     src = gen_load_fpr_F(dc, rs);
1635     dst = gen_dest_fpr_F();
1636
1637     gen(dst, cpu_env, src);
1638
1639     gen_store_fpr_F(dc, rd, dst);
1640 }
1641
1642 static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1643                                  void (*gen)(TCGv_i32, TCGv_i32))
1644 {
1645     TCGv_i32 dst, src;
1646
1647     src = gen_load_fpr_F(dc, rs);
1648     dst = gen_dest_fpr_F();
1649
1650     gen(dst, src);
1651
1652     gen_store_fpr_F(dc, rd, dst);
1653 }
1654
1655 static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1656                         void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1657 {
1658     TCGv_i32 dst, src1, src2;
1659
1660     src1 = gen_load_fpr_F(dc, rs1);
1661     src2 = gen_load_fpr_F(dc, rs2);
1662     dst = gen_dest_fpr_F();
1663
1664     gen(dst, cpu_env, src1, src2);
1665
1666     gen_store_fpr_F(dc, rd, dst);
1667 }
1668
1669 #ifdef TARGET_SPARC64
1670 static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1671                                   void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1672 {
1673     TCGv_i32 dst, src1, src2;
1674
1675     src1 = gen_load_fpr_F(dc, rs1);
1676     src2 = gen_load_fpr_F(dc, rs2);
1677     dst = gen_dest_fpr_F();
1678
1679     gen(dst, src1, src2);
1680
1681     gen_store_fpr_F(dc, rd, dst);
1682 }
1683 #endif
1684
1685 static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1686                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1687 {
1688     TCGv_i64 dst, src;
1689
1690     src = gen_load_fpr_D(dc, rs);
1691     dst = gen_dest_fpr_D();
1692
1693     gen(dst, cpu_env, src);
1694
1695     gen_store_fpr_D(dc, rd, dst);
1696 }
1697
1698 #ifdef TARGET_SPARC64
1699 static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1700                                  void (*gen)(TCGv_i64, TCGv_i64))
1701 {
1702     TCGv_i64 dst, src;
1703
1704     src = gen_load_fpr_D(dc, rs);
1705     dst = gen_dest_fpr_D();
1706
1707     gen(dst, src);
1708
1709     gen_store_fpr_D(dc, rd, dst);
1710 }
1711 #endif
1712
1713 static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1714                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1715 {
1716     TCGv_i64 dst, src1, src2;
1717
1718     src1 = gen_load_fpr_D(dc, rs1);
1719     src2 = gen_load_fpr_D(dc, rs2);
1720     dst = gen_dest_fpr_D();
1721
1722     gen(dst, cpu_env, src1, src2);
1723
1724     gen_store_fpr_D(dc, rd, dst);
1725 }
1726
1727 #ifdef TARGET_SPARC64
1728 static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1729                                   void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1730 {
1731     TCGv_i64 dst, src1, src2;
1732
1733     src1 = gen_load_fpr_D(dc, rs1);
1734     src2 = gen_load_fpr_D(dc, rs2);
1735     dst = gen_dest_fpr_D();
1736
1737     gen(dst, src1, src2);
1738
1739     gen_store_fpr_D(dc, rd, dst);
1740 }
1741
1742 static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1743                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1744 {
1745     TCGv_i64 dst, src0, src1, src2;
1746
1747     src1 = gen_load_fpr_D(dc, rs1);
1748     src2 = gen_load_fpr_D(dc, rs2);
1749     src0 = gen_load_fpr_D(dc, rd);
1750     dst = gen_dest_fpr_D();
1751
1752     gen(dst, src0, src1, src2);
1753
1754     gen_store_fpr_D(dc, rd, dst);
1755 }
1756 #endif
1757
1758 static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1759                               void (*gen)(TCGv_ptr))
1760 {
1761     gen_op_load_fpr_QT1(QFPREG(rs));
1762
1763     gen(cpu_env);
1764
1765     gen_op_store_QT0_fpr(QFPREG(rd));
1766     gen_update_fprs_dirty(QFPREG(rd));
1767 }
1768
1769 #ifdef TARGET_SPARC64
1770 static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1771                                  void (*gen)(TCGv_ptr))
1772 {
1773     gen_op_load_fpr_QT1(QFPREG(rs));
1774
1775     gen(cpu_env);
1776
1777     gen_op_store_QT0_fpr(QFPREG(rd));
1778     gen_update_fprs_dirty(QFPREG(rd));
1779 }
1780 #endif
1781
1782 static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1783                                void (*gen)(TCGv_ptr))
1784 {
1785     gen_op_load_fpr_QT0(QFPREG(rs1));
1786     gen_op_load_fpr_QT1(QFPREG(rs2));
1787
1788     gen(cpu_env);
1789
1790     gen_op_store_QT0_fpr(QFPREG(rd));
1791     gen_update_fprs_dirty(QFPREG(rd));
1792 }
1793
1794 static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1795                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1796 {
1797     TCGv_i64 dst;
1798     TCGv_i32 src1, src2;
1799
1800     src1 = gen_load_fpr_F(dc, rs1);
1801     src2 = gen_load_fpr_F(dc, rs2);
1802     dst = gen_dest_fpr_D();
1803
1804     gen(dst, cpu_env, src1, src2);
1805
1806     gen_store_fpr_D(dc, rd, dst);
1807 }
1808
1809 static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1810                                void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1811 {
1812     TCGv_i64 src1, src2;
1813
1814     src1 = gen_load_fpr_D(dc, rs1);
1815     src2 = gen_load_fpr_D(dc, rs2);
1816
1817     gen(cpu_env, src1, src2);
1818
1819     gen_op_store_QT0_fpr(QFPREG(rd));
1820     gen_update_fprs_dirty(QFPREG(rd));
1821 }
1822
1823 #ifdef TARGET_SPARC64
1824 static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1825                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1826 {
1827     TCGv_i64 dst;
1828     TCGv_i32 src;
1829
1830     src = gen_load_fpr_F(dc, rs);
1831     dst = gen_dest_fpr_D();
1832
1833     gen(dst, cpu_env, src);
1834
1835     gen_store_fpr_D(dc, rd, dst);
1836 }
1837 #endif
1838
1839 static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1840                                  void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1841 {
1842     TCGv_i64 dst;
1843     TCGv_i32 src;
1844
1845     src = gen_load_fpr_F(dc, rs);
1846     dst = gen_dest_fpr_D();
1847
1848     gen(dst, cpu_env, src);
1849
1850     gen_store_fpr_D(dc, rd, dst);
1851 }
1852
1853 static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1854                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1855 {
1856     TCGv_i32 dst;
1857     TCGv_i64 src;
1858
1859     src = gen_load_fpr_D(dc, rs);
1860     dst = gen_dest_fpr_F();
1861
1862     gen(dst, cpu_env, src);
1863
1864     gen_store_fpr_F(dc, rd, dst);
1865 }
1866
1867 static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1868                               void (*gen)(TCGv_i32, TCGv_ptr))
1869 {
1870     TCGv_i32 dst;
1871
1872     gen_op_load_fpr_QT1(QFPREG(rs));
1873     dst = gen_dest_fpr_F();
1874
1875     gen(dst, cpu_env);
1876
1877     gen_store_fpr_F(dc, rd, dst);
1878 }
1879
1880 static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1881                               void (*gen)(TCGv_i64, TCGv_ptr))
1882 {
1883     TCGv_i64 dst;
1884
1885     gen_op_load_fpr_QT1(QFPREG(rs));
1886     dst = gen_dest_fpr_D();
1887
1888     gen(dst, cpu_env);
1889
1890     gen_store_fpr_D(dc, rd, dst);
1891 }
1892
1893 static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1894                                  void (*gen)(TCGv_ptr, TCGv_i32))
1895 {
1896     TCGv_i32 src;
1897
1898     src = gen_load_fpr_F(dc, rs);
1899
1900     gen(cpu_env, src);
1901
1902     gen_op_store_QT0_fpr(QFPREG(rd));
1903     gen_update_fprs_dirty(QFPREG(rd));
1904 }
1905
1906 static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1907                                  void (*gen)(TCGv_ptr, TCGv_i64))
1908 {
1909     TCGv_i64 src;
1910
1911     src = gen_load_fpr_D(dc, rs);
1912
1913     gen(cpu_env, src);
1914
1915     gen_op_store_QT0_fpr(QFPREG(rd));
1916     gen_update_fprs_dirty(QFPREG(rd));
1917 }
1918
1919 /* asi moves */
1920 #ifdef TARGET_SPARC64
1921 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1922 {
1923     int asi;
1924     TCGv_i32 r_asi;
1925
1926     if (IS_IMM) {
1927         r_asi = tcg_temp_new_i32();
1928         tcg_gen_mov_i32(r_asi, cpu_asi);
1929     } else {
1930         asi = GET_FIELD(insn, 19, 26);
1931         r_asi = tcg_const_i32(asi);
1932     }
1933     return r_asi;
1934 }
1935
1936 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1937                               int sign)
1938 {
1939     TCGv_i32 r_asi, r_size, r_sign;
1940
1941     r_asi = gen_get_asi(insn, addr);
1942     r_size = tcg_const_i32(size);
1943     r_sign = tcg_const_i32(sign);
1944     gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1945     tcg_temp_free_i32(r_sign);
1946     tcg_temp_free_i32(r_size);
1947     tcg_temp_free_i32(r_asi);
1948 }
1949
1950 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1951 {
1952     TCGv_i32 r_asi, r_size;
1953
1954     r_asi = gen_get_asi(insn, addr);
1955     r_size = tcg_const_i32(size);
1956     gen_helper_st_asi(addr, src, r_asi, r_size);
1957     tcg_temp_free_i32(r_size);
1958     tcg_temp_free_i32(r_asi);
1959 }
1960
1961 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1962 {
1963     TCGv_i32 r_asi, r_size, r_rd;
1964
1965     r_asi = gen_get_asi(insn, addr);
1966     r_size = tcg_const_i32(size);
1967     r_rd = tcg_const_i32(rd);
1968     gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1969     tcg_temp_free_i32(r_rd);
1970     tcg_temp_free_i32(r_size);
1971     tcg_temp_free_i32(r_asi);
1972 }
1973
1974 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1975 {
1976     TCGv_i32 r_asi, r_size, r_rd;
1977
1978     r_asi = gen_get_asi(insn, addr);
1979     r_size = tcg_const_i32(size);
1980     r_rd = tcg_const_i32(rd);
1981     gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1982     tcg_temp_free_i32(r_rd);
1983     tcg_temp_free_i32(r_size);
1984     tcg_temp_free_i32(r_asi);
1985 }
1986
1987 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1988 {
1989     TCGv_i32 r_asi, r_size, r_sign;
1990
1991     r_asi = gen_get_asi(insn, addr);
1992     r_size = tcg_const_i32(4);
1993     r_sign = tcg_const_i32(0);
1994     gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1995     tcg_temp_free_i32(r_sign);
1996     gen_helper_st_asi(addr, dst, r_asi, r_size);
1997     tcg_temp_free_i32(r_size);
1998     tcg_temp_free_i32(r_asi);
1999     tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2000 }
2001
2002 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
2003 {
2004     TCGv_i32 r_asi, r_rd;
2005
2006     r_asi = gen_get_asi(insn, addr);
2007     r_rd = tcg_const_i32(rd);
2008     gen_helper_ldda_asi(addr, r_asi, r_rd);
2009     tcg_temp_free_i32(r_rd);
2010     tcg_temp_free_i32(r_asi);
2011 }
2012
2013 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
2014 {
2015     TCGv_i32 r_asi, r_size;
2016
2017     gen_movl_reg_TN(rd + 1, cpu_tmp0);
2018     tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
2019     r_asi = gen_get_asi(insn, addr);
2020     r_size = tcg_const_i32(8);
2021     gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
2022     tcg_temp_free_i32(r_size);
2023     tcg_temp_free_i32(r_asi);
2024 }
2025
2026 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
2027                                int rd)
2028 {
2029     TCGv r_val1;
2030     TCGv_i32 r_asi;
2031
2032     r_val1 = tcg_temp_new();
2033     gen_movl_reg_TN(rd, r_val1);
2034     r_asi = gen_get_asi(insn, addr);
2035     gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
2036     tcg_temp_free_i32(r_asi);
2037     tcg_temp_free(r_val1);
2038 }
2039
2040 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
2041                                 int rd)
2042 {
2043     TCGv_i32 r_asi;
2044
2045     gen_movl_reg_TN(rd, cpu_tmp64);
2046     r_asi = gen_get_asi(insn, addr);
2047     gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
2048     tcg_temp_free_i32(r_asi);
2049 }
2050
2051 #elif !defined(CONFIG_USER_ONLY)
2052
2053 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
2054                               int sign)
2055 {
2056     TCGv_i32 r_asi, r_size, r_sign;
2057
2058     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2059     r_size = tcg_const_i32(size);
2060     r_sign = tcg_const_i32(sign);
2061     gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
2062     tcg_temp_free(r_sign);
2063     tcg_temp_free(r_size);
2064     tcg_temp_free(r_asi);
2065     tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2066 }
2067
2068 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2069 {
2070     TCGv_i32 r_asi, r_size;
2071
2072     tcg_gen_extu_tl_i64(cpu_tmp64, src);
2073     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2074     r_size = tcg_const_i32(size);
2075     gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
2076     tcg_temp_free(r_size);
2077     tcg_temp_free(r_asi);
2078 }
2079
2080 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
2081 {
2082     TCGv_i32 r_asi, r_size, r_sign;
2083     TCGv_i64 r_val;
2084
2085     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2086     r_size = tcg_const_i32(4);
2087     r_sign = tcg_const_i32(0);
2088     gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
2089     tcg_temp_free(r_sign);
2090     r_val = tcg_temp_new_i64();
2091     tcg_gen_extu_tl_i64(r_val, dst);
2092     gen_helper_st_asi(addr, r_val, r_asi, r_size);
2093     tcg_temp_free_i64(r_val);
2094     tcg_temp_free(r_size);
2095     tcg_temp_free(r_asi);
2096     tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2097 }
2098
2099 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
2100 {
2101     TCGv_i32 r_asi, r_size, r_sign;
2102
2103     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2104     r_size = tcg_const_i32(8);
2105     r_sign = tcg_const_i32(0);
2106     gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
2107     tcg_temp_free(r_sign);
2108     tcg_temp_free(r_size);
2109     tcg_temp_free(r_asi);
2110     tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
2111     gen_movl_TN_reg(rd + 1, cpu_tmp0);
2112     tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
2113     tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
2114     gen_movl_TN_reg(rd, hi);
2115 }
2116
2117 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
2118 {
2119     TCGv_i32 r_asi, r_size;
2120
2121     gen_movl_reg_TN(rd + 1, cpu_tmp0);
2122     tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
2123     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2124     r_size = tcg_const_i32(8);
2125     gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
2126     tcg_temp_free(r_size);
2127     tcg_temp_free(r_asi);
2128 }
2129 #endif
2130
2131 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2132 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
2133 {
2134     TCGv_i64 r_val;
2135     TCGv_i32 r_asi, r_size;
2136
2137     gen_ld_asi(dst, addr, insn, 1, 0);
2138
2139     r_val = tcg_const_i64(0xffULL);
2140     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2141     r_size = tcg_const_i32(1);
2142     gen_helper_st_asi(addr, r_val, r_asi, r_size);
2143     tcg_temp_free_i32(r_size);
2144     tcg_temp_free_i32(r_asi);
2145     tcg_temp_free_i64(r_val);
2146 }
2147 #endif
2148
2149 static inline TCGv get_src1(unsigned int insn, TCGv def)
2150 {
2151     TCGv r_rs1 = def;
2152     unsigned int rs1;
2153
2154     rs1 = GET_FIELD(insn, 13, 17);
2155     if (rs1 == 0) {
2156         tcg_gen_movi_tl(def, 0);
2157     } else if (rs1 < 8) {
2158         r_rs1 = cpu_gregs[rs1];
2159     } else {
2160         tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
2161     }
2162     return r_rs1;
2163 }
2164
2165 static inline TCGv get_src2(unsigned int insn, TCGv def)
2166 {
2167     TCGv r_rs2 = def;
2168
2169     if (IS_IMM) { /* immediate */
2170         target_long simm = GET_FIELDs(insn, 19, 31);
2171         tcg_gen_movi_tl(def, simm);
2172     } else { /* register */
2173         unsigned int rs2 = GET_FIELD(insn, 27, 31);
2174         if (rs2 == 0) {
2175             tcg_gen_movi_tl(def, 0);
2176         } else if (rs2 < 8) {
2177             r_rs2 = cpu_gregs[rs2];
2178         } else {
2179             tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
2180         }
2181     }
2182     return r_rs2;
2183 }
2184
2185 #ifdef TARGET_SPARC64
2186 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
2187 {
2188     TCGv_i32 r_tl = tcg_temp_new_i32();
2189
2190     /* load env->tl into r_tl */
2191     tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2192
2193     /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2194     tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2195
2196     /* calculate offset to current trap state from env->ts, reuse r_tl */
2197     tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2198     tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUState, ts));
2199
2200     /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2201     {
2202         TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2203         tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2204         tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2205         tcg_temp_free_ptr(r_tl_tmp);
2206     }
2207
2208     tcg_temp_free_i32(r_tl);
2209 }
2210 #endif
2211
2212 #define CHECK_IU_FEATURE(dc, FEATURE)                      \
2213     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2214         goto illegal_insn;
2215 #define CHECK_FPU_FEATURE(dc, FEATURE)                     \
2216     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2217         goto nfpu_insn;
2218
2219 /* before an instruction, dc->pc must be static */
2220 static void disas_sparc_insn(DisasContext * dc)
2221 {
2222     unsigned int insn, opc, rs1, rs2, rd;
2223     TCGv cpu_src1, cpu_src2, cpu_tmp1, cpu_tmp2;
2224     TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
2225     TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
2226     target_long simm;
2227
2228     if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
2229         tcg_gen_debug_insn_start(dc->pc);
2230     insn = ldl_code(dc->pc);
2231     opc = GET_FIELD(insn, 0, 1);
2232
2233     rd = GET_FIELD(insn, 2, 6);
2234
2235     cpu_tmp1 = cpu_src1 = tcg_temp_new();
2236     cpu_tmp2 = cpu_src2 = tcg_temp_new();
2237
2238     switch (opc) {
2239     case 0:                     /* branches/sethi */
2240         {
2241             unsigned int xop = GET_FIELD(insn, 7, 9);
2242             int32_t target;
2243             switch (xop) {
2244 #ifdef TARGET_SPARC64
2245             case 0x1:           /* V9 BPcc */
2246                 {
2247                     int cc;
2248
2249                     target = GET_FIELD_SP(insn, 0, 18);
2250                     target = sign_extend(target, 19);
2251                     target <<= 2;
2252                     cc = GET_FIELD_SP(insn, 20, 21);
2253                     if (cc == 0)
2254                         do_branch(dc, target, insn, 0, cpu_cond);
2255                     else if (cc == 2)
2256                         do_branch(dc, target, insn, 1, cpu_cond);
2257                     else
2258                         goto illegal_insn;
2259                     goto jmp_insn;
2260                 }
2261             case 0x3:           /* V9 BPr */
2262                 {
2263                     target = GET_FIELD_SP(insn, 0, 13) |
2264                         (GET_FIELD_SP(insn, 20, 21) << 14);
2265                     target = sign_extend(target, 16);
2266                     target <<= 2;
2267                     cpu_src1 = get_src1(insn, cpu_src1);
2268                     do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
2269                     goto jmp_insn;
2270                 }
2271             case 0x5:           /* V9 FBPcc */
2272                 {
2273                     int cc = GET_FIELD_SP(insn, 20, 21);
2274                     if (gen_trap_ifnofpu(dc, cpu_cond))
2275                         goto jmp_insn;
2276                     target = GET_FIELD_SP(insn, 0, 18);
2277                     target = sign_extend(target, 19);
2278                     target <<= 2;
2279                     do_fbranch(dc, target, insn, cc, cpu_cond);
2280                     goto jmp_insn;
2281                 }
2282 #else
2283             case 0x7:           /* CBN+x */
2284                 {
2285                     goto ncp_insn;
2286                 }
2287 #endif
2288             case 0x2:           /* BN+x */
2289                 {
2290                     target = GET_FIELD(insn, 10, 31);
2291                     target = sign_extend(target, 22);
2292                     target <<= 2;
2293                     do_branch(dc, target, insn, 0, cpu_cond);
2294                     goto jmp_insn;
2295                 }
2296             case 0x6:           /* FBN+x */
2297                 {
2298                     if (gen_trap_ifnofpu(dc, cpu_cond))
2299                         goto jmp_insn;
2300                     target = GET_FIELD(insn, 10, 31);
2301                     target = sign_extend(target, 22);
2302                     target <<= 2;
2303                     do_fbranch(dc, target, insn, 0, cpu_cond);
2304                     goto jmp_insn;
2305                 }
2306             case 0x4:           /* SETHI */
2307                 if (rd) { // nop
2308                     uint32_t value = GET_FIELD(insn, 10, 31);
2309                     TCGv r_const;
2310
2311                     r_const = tcg_const_tl(value << 10);
2312                     gen_movl_TN_reg(rd, r_const);
2313                     tcg_temp_free(r_const);
2314                 }
2315                 break;
2316             case 0x0:           /* UNIMPL */
2317             default:
2318                 goto illegal_insn;
2319             }
2320             break;
2321         }
2322         break;
2323     case 1:                     /*CALL*/
2324         {
2325             target_long target = GET_FIELDs(insn, 2, 31) << 2;
2326             TCGv r_const;
2327
2328             r_const = tcg_const_tl(dc->pc);
2329             gen_movl_TN_reg(15, r_const);
2330             tcg_temp_free(r_const);
2331             target += dc->pc;
2332             gen_mov_pc_npc(dc, cpu_cond);
2333             dc->npc = target;
2334         }
2335         goto jmp_insn;
2336     case 2:                     /* FPU & Logical Operations */
2337         {
2338             unsigned int xop = GET_FIELD(insn, 7, 12);
2339             if (xop == 0x3a) {  /* generate trap */
2340                 int cond;
2341
2342                 cpu_src1 = get_src1(insn, cpu_src1);
2343                 if (IS_IMM) {
2344                     rs2 = GET_FIELD(insn, 25, 31);
2345                     tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2346                 } else {
2347                     rs2 = GET_FIELD(insn, 27, 31);
2348                     if (rs2 != 0) {
2349                         gen_movl_reg_TN(rs2, cpu_src2);
2350                         tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2351                     } else
2352                         tcg_gen_mov_tl(cpu_dst, cpu_src1);
2353                 }
2354
2355                 cond = GET_FIELD(insn, 3, 6);
2356                 if (cond == 0x8) { /* Trap Always */
2357                     save_state(dc, cpu_cond);
2358                     if ((dc->def->features & CPU_FEATURE_HYPV) &&
2359                         supervisor(dc))
2360                         tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2361                     else
2362                         tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2363                     tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2364                     tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2365
2366                     if (rs2 == 0 &&
2367                         dc->def->features & CPU_FEATURE_TA0_SHUTDOWN) {
2368
2369                         gen_helper_shutdown();
2370
2371                     } else {
2372                         gen_helper_raise_exception(cpu_env, cpu_tmp32);
2373                     }
2374                 } else if (cond != 0) {
2375                     TCGv r_cond = tcg_temp_new();
2376                     int l1;
2377 #ifdef TARGET_SPARC64
2378                     /* V9 icc/xcc */
2379                     int cc = GET_FIELD_SP(insn, 11, 12);
2380
2381                     save_state(dc, cpu_cond);
2382                     if (cc == 0)
2383                         gen_cond(r_cond, 0, cond, dc);
2384                     else if (cc == 2)
2385                         gen_cond(r_cond, 1, cond, dc);
2386                     else
2387                         goto illegal_insn;
2388 #else
2389                     save_state(dc, cpu_cond);
2390                     gen_cond(r_cond, 0, cond, dc);
2391 #endif
2392                     l1 = gen_new_label();
2393                     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
2394
2395                     if ((dc->def->features & CPU_FEATURE_HYPV) &&
2396                         supervisor(dc))
2397                         tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2398                     else
2399                         tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2400                     tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2401                     tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2402                     gen_helper_raise_exception(cpu_env, cpu_tmp32);
2403
2404                     gen_set_label(l1);
2405                     tcg_temp_free(r_cond);
2406                 }
2407                 gen_op_next_insn();
2408                 tcg_gen_exit_tb(0);
2409                 dc->is_br = 1;
2410                 goto jmp_insn;
2411             } else if (xop == 0x28) {
2412                 rs1 = GET_FIELD(insn, 13, 17);
2413                 switch(rs1) {
2414                 case 0: /* rdy */
2415 #ifndef TARGET_SPARC64
2416                 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2417                                        manual, rdy on the microSPARC
2418                                        II */
2419                 case 0x0f:          /* stbar in the SPARCv8 manual,
2420                                        rdy on the microSPARC II */
2421                 case 0x10 ... 0x1f: /* implementation-dependent in the
2422                                        SPARCv8 manual, rdy on the
2423                                        microSPARC II */
2424                     /* Read Asr17 */
2425                     if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2426                         TCGv r_const;
2427
2428                         /* Read Asr17 for a Leon3 monoprocessor */
2429                         r_const = tcg_const_tl((1 << 8)
2430                                                | (dc->def->nwindows - 1));
2431                         gen_movl_TN_reg(rd, r_const);
2432                         tcg_temp_free(r_const);
2433                         break;
2434                     }
2435 #endif
2436                     gen_movl_TN_reg(rd, cpu_y);
2437                     break;
2438 #ifdef TARGET_SPARC64
2439                 case 0x2: /* V9 rdccr */
2440                     gen_helper_compute_psr(cpu_env);
2441                     gen_helper_rdccr(cpu_dst, cpu_env);
2442                     gen_movl_TN_reg(rd, cpu_dst);
2443                     break;
2444                 case 0x3: /* V9 rdasi */
2445                     tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2446                     gen_movl_TN_reg(rd, cpu_dst);
2447                     break;
2448                 case 0x4: /* V9 rdtick */
2449                     {
2450                         TCGv_ptr r_tickptr;
2451
2452                         r_tickptr = tcg_temp_new_ptr();
2453                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2454                                        offsetof(CPUState, tick));
2455                         gen_helper_tick_get_count(cpu_dst, r_tickptr);
2456                         tcg_temp_free_ptr(r_tickptr);
2457                         gen_movl_TN_reg(rd, cpu_dst);
2458                     }
2459                     break;
2460                 case 0x5: /* V9 rdpc */
2461                     {
2462                         TCGv r_const;
2463
2464                         r_const = tcg_const_tl(dc->pc);
2465                         gen_movl_TN_reg(rd, r_const);
2466                         tcg_temp_free(r_const);
2467                     }
2468                     break;
2469                 case 0x6: /* V9 rdfprs */
2470                     tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2471                     gen_movl_TN_reg(rd, cpu_dst);
2472                     break;
2473                 case 0xf: /* V9 membar */
2474                     break; /* no effect */
2475                 case 0x13: /* Graphics Status */
2476                     if (gen_trap_ifnofpu(dc, cpu_cond))
2477                         goto jmp_insn;
2478                     gen_movl_TN_reg(rd, cpu_gsr);
2479                     break;
2480                 case 0x16: /* Softint */
2481                     tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2482                     gen_movl_TN_reg(rd, cpu_dst);
2483                     break;
2484                 case 0x17: /* Tick compare */
2485                     gen_movl_TN_reg(rd, cpu_tick_cmpr);
2486                     break;
2487                 case 0x18: /* System tick */
2488                     {
2489                         TCGv_ptr r_tickptr;
2490
2491                         r_tickptr = tcg_temp_new_ptr();
2492                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2493                                        offsetof(CPUState, stick));
2494                         gen_helper_tick_get_count(cpu_dst, r_tickptr);
2495                         tcg_temp_free_ptr(r_tickptr);
2496                         gen_movl_TN_reg(rd, cpu_dst);
2497                     }
2498                     break;
2499                 case 0x19: /* System tick compare */
2500                     gen_movl_TN_reg(rd, cpu_stick_cmpr);
2501                     break;
2502                 case 0x10: /* Performance Control */
2503                 case 0x11: /* Performance Instrumentation Counter */
2504                 case 0x12: /* Dispatch Control */
2505                 case 0x14: /* Softint set, WO */
2506                 case 0x15: /* Softint clear, WO */
2507 #endif
2508                 default:
2509                     goto illegal_insn;
2510                 }
2511 #if !defined(CONFIG_USER_ONLY)
2512             } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2513 #ifndef TARGET_SPARC64
2514                 if (!supervisor(dc))
2515                     goto priv_insn;
2516                 gen_helper_compute_psr(cpu_env);
2517                 dc->cc_op = CC_OP_FLAGS;
2518                 gen_helper_rdpsr(cpu_dst, cpu_env);
2519 #else
2520                 CHECK_IU_FEATURE(dc, HYPV);
2521                 if (!hypervisor(dc))
2522                     goto priv_insn;
2523                 rs1 = GET_FIELD(insn, 13, 17);
2524                 switch (rs1) {
2525                 case 0: // hpstate
2526                     // gen_op_rdhpstate();
2527                     break;
2528                 case 1: // htstate
2529                     // gen_op_rdhtstate();
2530                     break;
2531                 case 3: // hintp
2532                     tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2533                     break;
2534                 case 5: // htba
2535                     tcg_gen_mov_tl(cpu_dst, cpu_htba);
2536                     break;
2537                 case 6: // hver
2538                     tcg_gen_mov_tl(cpu_dst, cpu_hver);
2539                     break;
2540                 case 31: // hstick_cmpr
2541                     tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2542                     break;
2543                 default:
2544                     goto illegal_insn;
2545                 }
2546 #endif
2547                 gen_movl_TN_reg(rd, cpu_dst);
2548                 break;
2549             } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2550                 if (!supervisor(dc))
2551                     goto priv_insn;
2552 #ifdef TARGET_SPARC64
2553                 rs1 = GET_FIELD(insn, 13, 17);
2554                 switch (rs1) {
2555                 case 0: // tpc
2556                     {
2557                         TCGv_ptr r_tsptr;
2558
2559                         r_tsptr = tcg_temp_new_ptr();
2560                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2561                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2562                                       offsetof(trap_state, tpc));
2563                         tcg_temp_free_ptr(r_tsptr);
2564                     }
2565                     break;
2566                 case 1: // tnpc
2567                     {
2568                         TCGv_ptr r_tsptr;
2569
2570                         r_tsptr = tcg_temp_new_ptr();
2571                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2572                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2573                                       offsetof(trap_state, tnpc));
2574                         tcg_temp_free_ptr(r_tsptr);
2575                     }
2576                     break;
2577                 case 2: // tstate
2578                     {
2579                         TCGv_ptr r_tsptr;
2580
2581                         r_tsptr = tcg_temp_new_ptr();
2582                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2583                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2584                                       offsetof(trap_state, tstate));
2585                         tcg_temp_free_ptr(r_tsptr);
2586                     }
2587                     break;
2588                 case 3: // tt
2589                     {
2590                         TCGv_ptr r_tsptr;
2591
2592                         r_tsptr = tcg_temp_new_ptr();
2593                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2594                         tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2595                                        offsetof(trap_state, tt));
2596                         tcg_temp_free_ptr(r_tsptr);
2597                         tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2598                     }
2599                     break;
2600                 case 4: // tick
2601                     {
2602                         TCGv_ptr r_tickptr;
2603
2604                         r_tickptr = tcg_temp_new_ptr();
2605                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2606                                        offsetof(CPUState, tick));
2607                         gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2608                         gen_movl_TN_reg(rd, cpu_tmp0);
2609                         tcg_temp_free_ptr(r_tickptr);
2610                     }
2611                     break;
2612                 case 5: // tba
2613                     tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2614                     break;
2615                 case 6: // pstate
2616                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2617                                    offsetof(CPUSPARCState, pstate));
2618                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2619                     break;
2620                 case 7: // tl
2621                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2622                                    offsetof(CPUSPARCState, tl));
2623                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2624                     break;
2625                 case 8: // pil
2626                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2627                                    offsetof(CPUSPARCState, psrpil));
2628                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2629                     break;
2630                 case 9: // cwp
2631                     gen_helper_rdcwp(cpu_tmp0, cpu_env);
2632                     break;
2633                 case 10: // cansave
2634                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2635                                    offsetof(CPUSPARCState, cansave));
2636                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2637                     break;
2638                 case 11: // canrestore
2639                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2640                                    offsetof(CPUSPARCState, canrestore));
2641                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2642                     break;
2643                 case 12: // cleanwin
2644                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2645                                    offsetof(CPUSPARCState, cleanwin));
2646                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2647                     break;
2648                 case 13: // otherwin
2649                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2650                                    offsetof(CPUSPARCState, otherwin));
2651                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2652                     break;
2653                 case 14: // wstate
2654                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2655                                    offsetof(CPUSPARCState, wstate));
2656                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2657                     break;
2658                 case 16: // UA2005 gl
2659                     CHECK_IU_FEATURE(dc, GL);
2660                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2661                                    offsetof(CPUSPARCState, gl));
2662                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2663                     break;
2664                 case 26: // UA2005 strand status
2665                     CHECK_IU_FEATURE(dc, HYPV);
2666                     if (!hypervisor(dc))
2667                         goto priv_insn;
2668                     tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2669                     break;
2670                 case 31: // ver
2671                     tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2672                     break;
2673                 case 15: // fq
2674                 default:
2675                     goto illegal_insn;
2676                 }
2677 #else
2678                 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2679 #endif
2680                 gen_movl_TN_reg(rd, cpu_tmp0);
2681                 break;
2682             } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2683 #ifdef TARGET_SPARC64
2684                 save_state(dc, cpu_cond);
2685                 gen_helper_flushw(cpu_env);
2686 #else
2687                 if (!supervisor(dc))
2688                     goto priv_insn;
2689                 gen_movl_TN_reg(rd, cpu_tbr);
2690 #endif
2691                 break;
2692 #endif
2693             } else if (xop == 0x34) {   /* FPU Operations */
2694                 if (gen_trap_ifnofpu(dc, cpu_cond))
2695                     goto jmp_insn;
2696                 gen_op_clear_ieee_excp_and_FTT();
2697                 rs1 = GET_FIELD(insn, 13, 17);
2698                 rs2 = GET_FIELD(insn, 27, 31);
2699                 xop = GET_FIELD(insn, 18, 26);
2700                 save_state(dc, cpu_cond);
2701                 switch (xop) {
2702                 case 0x1: /* fmovs */
2703                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2704                     gen_store_fpr_F(dc, rd, cpu_src1_32);
2705                     break;
2706                 case 0x5: /* fnegs */
2707                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
2708                     break;
2709                 case 0x9: /* fabss */
2710                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
2711                     break;
2712                 case 0x29: /* fsqrts */
2713                     CHECK_FPU_FEATURE(dc, FSQRT);
2714                     gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
2715                     break;
2716                 case 0x2a: /* fsqrtd */
2717                     CHECK_FPU_FEATURE(dc, FSQRT);
2718                     gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
2719                     break;
2720                 case 0x2b: /* fsqrtq */
2721                     CHECK_FPU_FEATURE(dc, FLOAT128);
2722                     gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
2723                     break;
2724                 case 0x41: /* fadds */
2725                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
2726                     break;
2727                 case 0x42: /* faddd */
2728                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
2729                     break;
2730                 case 0x43: /* faddq */
2731                     CHECK_FPU_FEATURE(dc, FLOAT128);
2732                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
2733                     break;
2734                 case 0x45: /* fsubs */
2735                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
2736                     break;
2737                 case 0x46: /* fsubd */
2738                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
2739                     break;
2740                 case 0x47: /* fsubq */
2741                     CHECK_FPU_FEATURE(dc, FLOAT128);
2742                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
2743                     break;
2744                 case 0x49: /* fmuls */
2745                     CHECK_FPU_FEATURE(dc, FMUL);
2746                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
2747                     break;
2748                 case 0x4a: /* fmuld */
2749                     CHECK_FPU_FEATURE(dc, FMUL);
2750                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
2751                     break;
2752                 case 0x4b: /* fmulq */
2753                     CHECK_FPU_FEATURE(dc, FLOAT128);
2754                     CHECK_FPU_FEATURE(dc, FMUL);
2755                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
2756                     break;
2757                 case 0x4d: /* fdivs */
2758                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
2759                     break;
2760                 case 0x4e: /* fdivd */
2761                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
2762                     break;
2763                 case 0x4f: /* fdivq */
2764                     CHECK_FPU_FEATURE(dc, FLOAT128);
2765                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
2766                     break;
2767                 case 0x69: /* fsmuld */
2768                     CHECK_FPU_FEATURE(dc, FSMULD);
2769                     gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
2770                     break;
2771                 case 0x6e: /* fdmulq */
2772                     CHECK_FPU_FEATURE(dc, FLOAT128);
2773                     gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
2774                     break;
2775                 case 0xc4: /* fitos */
2776                     gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
2777                     break;
2778                 case 0xc6: /* fdtos */
2779                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
2780                     break;
2781                 case 0xc7: /* fqtos */
2782                     CHECK_FPU_FEATURE(dc, FLOAT128);
2783                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
2784                     break;
2785                 case 0xc8: /* fitod */
2786                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
2787                     break;
2788                 case 0xc9: /* fstod */
2789                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
2790                     break;
2791                 case 0xcb: /* fqtod */
2792                     CHECK_FPU_FEATURE(dc, FLOAT128);
2793                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
2794                     break;
2795                 case 0xcc: /* fitoq */
2796                     CHECK_FPU_FEATURE(dc, FLOAT128);
2797                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
2798                     break;
2799                 case 0xcd: /* fstoq */
2800                     CHECK_FPU_FEATURE(dc, FLOAT128);
2801                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
2802                     break;
2803                 case 0xce: /* fdtoq */
2804                     CHECK_FPU_FEATURE(dc, FLOAT128);
2805                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
2806                     break;
2807                 case 0xd1: /* fstoi */
2808                     gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
2809                     break;
2810                 case 0xd2: /* fdtoi */
2811                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
2812                     break;
2813                 case 0xd3: /* fqtoi */
2814                     CHECK_FPU_FEATURE(dc, FLOAT128);
2815                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
2816                     break;
2817 #ifdef TARGET_SPARC64
2818                 case 0x2: /* V9 fmovd */
2819                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
2820                     gen_store_fpr_D(dc, rd, cpu_src1_64);
2821                     break;
2822                 case 0x3: /* V9 fmovq */
2823                     CHECK_FPU_FEATURE(dc, FLOAT128);
2824                     gen_move_Q(rd, rs2);
2825                     break;
2826                 case 0x6: /* V9 fnegd */
2827                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
2828                     break;
2829                 case 0x7: /* V9 fnegq */
2830                     CHECK_FPU_FEATURE(dc, FLOAT128);
2831                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
2832                     break;
2833                 case 0xa: /* V9 fabsd */
2834                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
2835                     break;
2836                 case 0xb: /* V9 fabsq */
2837                     CHECK_FPU_FEATURE(dc, FLOAT128);
2838                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
2839                     break;
2840                 case 0x81: /* V9 fstox */
2841                     gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
2842                     break;
2843                 case 0x82: /* V9 fdtox */
2844                     gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
2845                     break;
2846                 case 0x83: /* V9 fqtox */
2847                     CHECK_FPU_FEATURE(dc, FLOAT128);
2848                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
2849                     break;
2850                 case 0x84: /* V9 fxtos */
2851                     gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
2852                     break;
2853                 case 0x88: /* V9 fxtod */
2854                     gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
2855                     break;
2856                 case 0x8c: /* V9 fxtoq */
2857                     CHECK_FPU_FEATURE(dc, FLOAT128);
2858                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
2859                     break;
2860 #endif
2861                 default:
2862                     goto illegal_insn;
2863                 }
2864             } else if (xop == 0x35) {   /* FPU Operations */
2865 #ifdef TARGET_SPARC64
2866                 int cond;
2867 #endif
2868                 if (gen_trap_ifnofpu(dc, cpu_cond))
2869                     goto jmp_insn;
2870                 gen_op_clear_ieee_excp_and_FTT();
2871                 rs1 = GET_FIELD(insn, 13, 17);
2872                 rs2 = GET_FIELD(insn, 27, 31);
2873                 xop = GET_FIELD(insn, 18, 26);
2874                 save_state(dc, cpu_cond);
2875 #ifdef TARGET_SPARC64
2876                 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2877                     int l1;
2878
2879                     l1 = gen_new_label();
2880                     cond = GET_FIELD_SP(insn, 14, 17);
2881                     cpu_src1 = get_src1(insn, cpu_src1);
2882                     tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2883                                        0, l1);
2884                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2885                     gen_store_fpr_F(dc, rd, cpu_src1_32);
2886                     gen_set_label(l1);
2887                     break;
2888                 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2889                     int l1;
2890
2891                     l1 = gen_new_label();
2892                     cond = GET_FIELD_SP(insn, 14, 17);
2893                     cpu_src1 = get_src1(insn, cpu_src1);
2894                     tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2895                                        0, l1);
2896                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
2897                     gen_store_fpr_D(dc, rd, cpu_src1_64);
2898                     gen_set_label(l1);
2899                     break;
2900                 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2901                     int l1;
2902
2903                     CHECK_FPU_FEATURE(dc, FLOAT128);
2904                     l1 = gen_new_label();
2905                     cond = GET_FIELD_SP(insn, 14, 17);
2906                     cpu_src1 = get_src1(insn, cpu_src1);
2907                     tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2908                                        0, l1);
2909                     gen_move_Q(rd, rs2);
2910                     gen_set_label(l1);
2911                     break;
2912                 }
2913 #endif
2914                 switch (xop) {
2915 #ifdef TARGET_SPARC64
2916 #define FMOVSCC(fcc)                                                    \
2917                     {                                                   \
2918                         TCGv r_cond;                                    \
2919                         int l1;                                         \
2920                                                                         \
2921                         l1 = gen_new_label();                           \
2922                         r_cond = tcg_temp_new();                        \
2923                         cond = GET_FIELD_SP(insn, 14, 17);              \
2924                         gen_fcond(r_cond, fcc, cond);                   \
2925                         tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2926                                            0, l1);                      \
2927                         cpu_src1_32 = gen_load_fpr_F(dc, rs2);          \
2928                         gen_store_fpr_F(dc, rd, cpu_src1_32);           \
2929                         gen_set_label(l1);                              \
2930                         tcg_temp_free(r_cond);                          \
2931                     }
2932 #define FMOVDCC(fcc)                                                    \
2933                     {                                                   \
2934                         TCGv r_cond;                                    \
2935                         int l1;                                         \
2936                                                                         \
2937                         l1 = gen_new_label();                           \
2938                         r_cond = tcg_temp_new();                        \
2939                         cond = GET_FIELD_SP(insn, 14, 17);              \
2940                         gen_fcond(r_cond, fcc, cond);                   \
2941                         tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2942                                            0, l1);                      \
2943                         cpu_src1_64 = gen_load_fpr_D(dc, rs2);          \
2944                         gen_store_fpr_D(dc, rd, cpu_src1_64);           \
2945                         gen_set_label(l1);                              \
2946                         tcg_temp_free(r_cond);                          \
2947                     }
2948 #define FMOVQCC(fcc)                                                    \
2949                     {                                                   \
2950                         TCGv r_cond;                                    \
2951                         int l1;                                         \
2952                                                                         \
2953                         l1 = gen_new_label();                           \
2954                         r_cond = tcg_temp_new();                        \
2955                         cond = GET_FIELD_SP(insn, 14, 17);              \
2956                         gen_fcond(r_cond, fcc, cond);                   \
2957                         tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2958                                            0, l1);                      \
2959                         gen_move_Q(rd, rs2);                            \
2960                         gen_set_label(l1);                              \
2961                         tcg_temp_free(r_cond);                          \
2962                     }
2963                     case 0x001: /* V9 fmovscc %fcc0 */
2964                         FMOVSCC(0);
2965                         break;
2966                     case 0x002: /* V9 fmovdcc %fcc0 */
2967                         FMOVDCC(0);
2968                         break;
2969                     case 0x003: /* V9 fmovqcc %fcc0 */
2970                         CHECK_FPU_FEATURE(dc, FLOAT128);
2971                         FMOVQCC(0);
2972                         break;
2973                     case 0x041: /* V9 fmovscc %fcc1 */
2974                         FMOVSCC(1);
2975                         break;
2976                     case 0x042: /* V9 fmovdcc %fcc1 */
2977                         FMOVDCC(1);
2978                         break;
2979                     case 0x043: /* V9 fmovqcc %fcc1 */
2980                         CHECK_FPU_FEATURE(dc, FLOAT128);
2981                         FMOVQCC(1);
2982                         break;
2983                     case 0x081: /* V9 fmovscc %fcc2 */
2984                         FMOVSCC(2);
2985                         break;
2986                     case 0x082: /* V9 fmovdcc %fcc2 */
2987                         FMOVDCC(2);
2988                         break;
2989                     case 0x083: /* V9 fmovqcc %fcc2 */
2990                         CHECK_FPU_FEATURE(dc, FLOAT128);
2991                         FMOVQCC(2);
2992                         break;
2993                     case 0x0c1: /* V9 fmovscc %fcc3 */
2994                         FMOVSCC(3);
2995                         break;
2996                     case 0x0c2: /* V9 fmovdcc %fcc3 */
2997                         FMOVDCC(3);
2998                         break;
2999                     case 0x0c3: /* V9 fmovqcc %fcc3 */
3000                         CHECK_FPU_FEATURE(dc, FLOAT128);
3001                         FMOVQCC(3);
3002                         break;
3003 #undef FMOVSCC
3004 #undef FMOVDCC
3005 #undef FMOVQCC
3006 #define FMOVSCC(icc)                                                    \
3007                     {                                                   \
3008                         TCGv r_cond;                                    \
3009                         int l1;                                         \
3010                                                                         \
3011                         l1 = gen_new_label();                           \
3012                         r_cond = tcg_temp_new();                        \
3013                         cond = GET_FIELD_SP(insn, 14, 17);              \
3014                         gen_cond(r_cond, icc, cond, dc);                \
3015                         tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
3016                                            0, l1);                      \
3017                         cpu_src1_32 = gen_load_fpr_F(dc, rs2);          \
3018                         gen_store_fpr_F(dc, rd, cpu_src1_32);           \
3019                         gen_set_label(l1);                              \
3020                         tcg_temp_free(r_cond);                          \
3021                     }
3022 #define FMOVDCC(icc)                                                    \
3023                     {                                                   \
3024                         TCGv r_cond;                                    \
3025                         int l1;                                         \
3026                                                                         \
3027                         l1 = gen_new_label();                           \
3028                         r_cond = tcg_temp_new();                        \
3029                         cond = GET_FIELD_SP(insn, 14, 17);              \
3030                         gen_cond(r_cond, icc, cond, dc);                \
3031                         tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
3032                                            0, l1);                      \
3033                         cpu_src1_64 = gen_load_fpr_D(dc, rs2);          \
3034                         gen_store_fpr_D(dc, rd, cpu_src1_64);           \
3035                         gen_update_fprs_dirty(DFPREG(rd));              \
3036                         gen_set_label(l1);                              \
3037                         tcg_temp_free(r_cond);                          \
3038                     }
3039 #define FMOVQCC(icc)                                                    \
3040                     {                                                   \
3041                         TCGv r_cond;                                    \
3042                         int l1;                                         \
3043                                                                         \
3044                         l1 = gen_new_label();                           \
3045                         r_cond = tcg_temp_new();                        \
3046                         cond = GET_FIELD_SP(insn, 14, 17);              \
3047                         gen_cond(r_cond, icc, cond, dc);                \
3048                         tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
3049                                            0, l1);                      \
3050                         gen_move_Q(rd, rs2);                            \
3051                         gen_set_label(l1);                              \
3052                         tcg_temp_free(r_cond);                          \
3053                     }
3054
3055                     case 0x101: /* V9 fmovscc %icc */
3056                         FMOVSCC(0);
3057                         break;
3058                     case 0x102: /* V9 fmovdcc %icc */
3059                         FMOVDCC(0);
3060                         break;
3061                     case 0x103: /* V9 fmovqcc %icc */
3062                         CHECK_FPU_FEATURE(dc, FLOAT128);
3063                         FMOVQCC(0);
3064                         break;
3065                     case 0x181: /* V9 fmovscc %xcc */
3066                         FMOVSCC(1);
3067                         break;
3068                     case 0x182: /* V9 fmovdcc %xcc */
3069                         FMOVDCC(1);
3070                         break;
3071                     case 0x183: /* V9 fmovqcc %xcc */
3072                         CHECK_FPU_FEATURE(dc, FLOAT128);
3073                         FMOVQCC(1);
3074                         break;
3075 #undef FMOVSCC
3076 #undef FMOVDCC
3077 #undef FMOVQCC
3078 #endif
3079                     case 0x51: /* fcmps, V9 %fcc */
3080                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3081                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3082                         gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3083                         break;
3084                     case 0x52: /* fcmpd, V9 %fcc */
3085                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3086                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3087                         gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3088                         break;
3089                     case 0x53: /* fcmpq, V9 %fcc */
3090                         CHECK_FPU_FEATURE(dc, FLOAT128);
3091                         gen_op_load_fpr_QT0(QFPREG(rs1));
3092                         gen_op_load_fpr_QT1(QFPREG(rs2));
3093                         gen_op_fcmpq(rd & 3);
3094                         break;
3095                     case 0x55: /* fcmpes, V9 %fcc */
3096                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3097                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3098                         gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3099                         break;
3100                     case 0x56: /* fcmped, V9 %fcc */
3101                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3102                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3103                         gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3104                         break;
3105                     case 0x57: /* fcmpeq, V9 %fcc */
3106                         CHECK_FPU_FEATURE(dc, FLOAT128);
3107                         gen_op_load_fpr_QT0(QFPREG(rs1));
3108                         gen_op_load_fpr_QT1(QFPREG(rs2));
3109                         gen_op_fcmpeq(rd & 3);
3110                         break;
3111                     default:
3112                         goto illegal_insn;
3113                 }
3114             } else if (xop == 0x2) {
3115                 // clr/mov shortcut
3116
3117                 rs1 = GET_FIELD(insn, 13, 17);
3118                 if (rs1 == 0) {
3119                     // or %g0, x, y -> mov T0, x; mov y, T0
3120                     if (IS_IMM) {       /* immediate */
3121                         TCGv r_const;
3122
3123                         simm = GET_FIELDs(insn, 19, 31);
3124                         r_const = tcg_const_tl(simm);
3125                         gen_movl_TN_reg(rd, r_const);
3126                         tcg_temp_free(r_const);
3127                     } else {            /* register */
3128                         rs2 = GET_FIELD(insn, 27, 31);
3129                         gen_movl_reg_TN(rs2, cpu_dst);
3130                         gen_movl_TN_reg(rd, cpu_dst);
3131                     }
3132                 } else {
3133                     cpu_src1 = get_src1(insn, cpu_src1);
3134                     if (IS_IMM) {       /* immediate */
3135                         simm = GET_FIELDs(insn, 19, 31);
3136                         tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3137                         gen_movl_TN_reg(rd, cpu_dst);
3138                     } else {            /* register */
3139                         // or x, %g0, y -> mov T1, x; mov y, T1
3140                         rs2 = GET_FIELD(insn, 27, 31);
3141                         if (rs2 != 0) {
3142                             gen_movl_reg_TN(rs2, cpu_src2);
3143                             tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3144                             gen_movl_TN_reg(rd, cpu_dst);
3145                         } else
3146                             gen_movl_TN_reg(rd, cpu_src1);
3147                     }
3148                 }
3149 #ifdef TARGET_SPARC64
3150             } else if (xop == 0x25) { /* sll, V9 sllx */
3151                 cpu_src1 = get_src1(insn, cpu_src1);
3152                 if (IS_IMM) {   /* immediate */
3153                     simm = GET_FIELDs(insn, 20, 31);
3154                     if (insn & (1 << 12)) {
3155                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3156                     } else {
3157                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3158                     }
3159                 } else {                /* register */
3160                     rs2 = GET_FIELD(insn, 27, 31);
3161                     gen_movl_reg_TN(rs2, cpu_src2);
3162                     if (insn & (1 << 12)) {
3163                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3164                     } else {
3165                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3166                     }
3167                     tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3168                 }
3169                 gen_movl_TN_reg(rd, cpu_dst);
3170             } else if (xop == 0x26) { /* srl, V9 srlx */
3171                 cpu_src1 = get_src1(insn, cpu_src1);
3172                 if (IS_IMM) {   /* immediate */
3173                     simm = GET_FIELDs(insn, 20, 31);
3174                     if (insn & (1 << 12)) {
3175                         tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3176                     } else {
3177                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3178                         tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3179                     }
3180                 } else {                /* register */
3181                     rs2 = GET_FIELD(insn, 27, 31);
3182                     gen_movl_reg_TN(rs2, cpu_src2);
3183                     if (insn & (1 << 12)) {
3184                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3185                         tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3186                     } else {
3187                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3188                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3189                         tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3190                     }
3191                 }
3192                 gen_movl_TN_reg(rd, cpu_dst);
3193             } else if (xop == 0x27) { /* sra, V9 srax */
3194                 cpu_src1 = get_src1(insn, cpu_src1);
3195                 if (IS_IMM) {   /* immediate */
3196                     simm = GET_FIELDs(insn, 20, 31);
3197                     if (insn & (1 << 12)) {
3198                         tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3199                     } else {
3200                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3201                         tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3202                         tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3203                     }
3204                 } else {                /* register */
3205                     rs2 = GET_FIELD(insn, 27, 31);
3206                     gen_movl_reg_TN(rs2, cpu_src2);
3207                     if (insn & (1 << 12)) {
3208                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3209                         tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3210                     } else {
3211                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3212                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3213                         tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3214                         tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3215                     }
3216                 }
3217                 gen_movl_TN_reg(rd, cpu_dst);
3218 #endif
3219             } else if (xop < 0x36) {
3220                 if (xop < 0x20) {
3221                     cpu_src1 = get_src1(insn, cpu_src1);
3222                     cpu_src2 = get_src2(insn, cpu_src2);
3223                     switch (xop & ~0x10) {
3224                     case 0x0: /* add */
3225                         if (IS_IMM) {
3226                             simm = GET_FIELDs(insn, 19, 31);
3227                             if (xop & 0x10) {
3228                                 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3229                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3230                                 dc->cc_op = CC_OP_ADD;
3231                             } else {
3232                                 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3233                             }
3234                         } else {
3235                             if (xop & 0x10) {
3236                                 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3237                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3238                                 dc->cc_op = CC_OP_ADD;
3239                             } else {
3240                                 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3241                             }
3242                         }
3243                         break;
3244                     case 0x1: /* and */
3245                         if (IS_IMM) {
3246                             simm = GET_FIELDs(insn, 19, 31);
3247                             tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3248                         } else {
3249                             tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3250                         }
3251                         if (xop & 0x10) {
3252                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3253                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3254                             dc->cc_op = CC_OP_LOGIC;
3255                         }
3256                         break;
3257                     case 0x2: /* or */
3258                         if (IS_IMM) {
3259                             simm = GET_FIELDs(insn, 19, 31);
3260                             tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3261                         } else {
3262                             tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3263                         }
3264                         if (xop & 0x10) {
3265                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3266                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3267                             dc->cc_op = CC_OP_LOGIC;
3268                         }
3269                         break;
3270                     case 0x3: /* xor */
3271                         if (IS_IMM) {
3272                             simm = GET_FIELDs(insn, 19, 31);
3273                             tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3274                         } else {
3275                             tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3276                         }
3277                         if (xop & 0x10) {
3278                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3279                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3280                             dc->cc_op = CC_OP_LOGIC;
3281                         }
3282                         break;
3283                     case 0x4: /* sub */
3284                         if (IS_IMM) {
3285                             simm = GET_FIELDs(insn, 19, 31);
3286                             if (xop & 0x10) {
3287                                 gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
3288                             } else {
3289                                 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3290                             }
3291                         } else {
3292                             if (xop & 0x10) {
3293                                 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3294                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3295                                 dc->cc_op = CC_OP_SUB;
3296                             } else {
3297                                 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3298                             }
3299                         }
3300                         break;
3301                     case 0x5: /* andn */
3302                         if (IS_IMM) {
3303                             simm = GET_FIELDs(insn, 19, 31);
3304                             tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3305                         } else {
3306                             tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3307                         }
3308                         if (xop & 0x10) {
3309                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3310                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3311                             dc->cc_op = CC_OP_LOGIC;
3312                         }
3313                         break;
3314                     case 0x6: /* orn */
3315                         if (IS_IMM) {
3316                             simm = GET_FIELDs(insn, 19, 31);
3317                             tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3318                         } else {
3319                             tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3320                         }
3321                         if (xop & 0x10) {
3322                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3323                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3324                             dc->cc_op = CC_OP_LOGIC;
3325                         }
3326                         break;
3327                     case 0x7: /* xorn */
3328                         if (IS_IMM) {
3329                             simm = GET_FIELDs(insn, 19, 31);
3330                             tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3331                         } else {
3332                             tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3333                             tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3334                         }
3335                         if (xop & 0x10) {
3336                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3337                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3338                             dc->cc_op = CC_OP_LOGIC;
3339                         }
3340                         break;
3341                     case 0x8: /* addx, V9 addc */
3342                         gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3343                                         (xop & 0x10));
3344                         break;
3345 #ifdef TARGET_SPARC64
3346                     case 0x9: /* V9 mulx */
3347                         if (IS_IMM) {
3348                             simm = GET_FIELDs(insn, 19, 31);
3349                             tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3350                         } else {
3351                             tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3352                         }
3353                         break;
3354 #endif
3355                     case 0xa: /* umul */
3356                         CHECK_IU_FEATURE(dc, MUL);
3357                         gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3358                         if (xop & 0x10) {
3359                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3360                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3361                             dc->cc_op = CC_OP_LOGIC;
3362                         }
3363                         break;
3364                     case 0xb: /* smul */
3365                         CHECK_IU_FEATURE(dc, MUL);
3366                         gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3367                         if (xop & 0x10) {
3368                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3369                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3370                             dc->cc_op = CC_OP_LOGIC;
3371                         }
3372                         break;
3373                     case 0xc: /* subx, V9 subc */
3374                         gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3375                                         (xop & 0x10));
3376                         break;
3377 #ifdef TARGET_SPARC64
3378                     case 0xd: /* V9 udivx */
3379                         {
3380                             TCGv r_temp1, r_temp2;
3381                             r_temp1 = tcg_temp_local_new();
3382                             r_temp2 = tcg_temp_local_new();
3383                             tcg_gen_mov_tl(r_temp1, cpu_src1);
3384                             tcg_gen_mov_tl(r_temp2, cpu_src2);
3385                             gen_trap_ifdivzero_tl(r_temp2);
3386                             tcg_gen_divu_i64(cpu_dst, r_temp1, r_temp2);
3387                             tcg_temp_free(r_temp1);
3388                             tcg_temp_free(r_temp2);
3389                         }
3390                         break;
3391 #endif
3392                     case 0xe: /* udiv */
3393                         CHECK_IU_FEATURE(dc, DIV);
3394                         if (xop & 0x10) {
3395                             gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
3396                                                cpu_src2);
3397                             dc->cc_op = CC_OP_DIV;
3398                         } else {
3399                             gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
3400                                             cpu_src2);
3401                         }
3402                         break;
3403                     case 0xf: /* sdiv */
3404                         CHECK_IU_FEATURE(dc, DIV);
3405                         if (xop & 0x10) {
3406                             gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
3407                                                cpu_src2);
3408                             dc->cc_op = CC_OP_DIV;
3409                         } else {
3410                             gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
3411                                             cpu_src2);
3412                         }
3413                         break;
3414                     default:
3415                         goto illegal_insn;
3416                     }
3417                     gen_movl_TN_reg(rd, cpu_dst);
3418                 } else {
3419                     cpu_src1 = get_src1(insn, cpu_src1);
3420                     cpu_src2 = get_src2(insn, cpu_src2);
3421                     switch (xop) {
3422                     case 0x20: /* taddcc */
3423                         gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3424                         gen_movl_TN_reg(rd, cpu_dst);
3425                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3426                         dc->cc_op = CC_OP_TADD;
3427                         break;
3428                     case 0x21: /* tsubcc */
3429                         gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3430                         gen_movl_TN_reg(rd, cpu_dst);
3431                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3432                         dc->cc_op = CC_OP_TSUB;
3433                         break;
3434                     case 0x22: /* taddcctv */
3435                         save_state(dc, cpu_cond);
3436                         gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3437                         gen_movl_TN_reg(rd, cpu_dst);
3438                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADDTV);
3439                         dc->cc_op = CC_OP_TADDTV;
3440                         break;
3441                     case 0x23: /* tsubcctv */
3442                         save_state(dc, cpu_cond);
3443                         gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3444                         gen_movl_TN_reg(rd, cpu_dst);
3445                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUBTV);
3446                         dc->cc_op = CC_OP_TSUBTV;
3447                         break;
3448                     case 0x24: /* mulscc */
3449                         gen_helper_compute_psr(cpu_env);
3450                         gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3451                         gen_movl_TN_reg(rd, cpu_dst);
3452                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3453                         dc->cc_op = CC_OP_ADD;
3454                         break;
3455 #ifndef TARGET_SPARC64
3456                     case 0x25:  /* sll */
3457                         if (IS_IMM) { /* immediate */
3458                             simm = GET_FIELDs(insn, 20, 31);
3459                             tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3460                         } else { /* register */
3461                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3462                             tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3463                         }
3464                         gen_movl_TN_reg(rd, cpu_dst);
3465                         break;
3466                     case 0x26:  /* srl */
3467                         if (IS_IMM) { /* immediate */
3468                             simm = GET_FIELDs(insn, 20, 31);
3469                             tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3470                         } else { /* register */
3471                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3472                             tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3473                         }
3474                         gen_movl_TN_reg(rd, cpu_dst);
3475                         break;
3476                     case 0x27:  /* sra */
3477                         if (IS_IMM) { /* immediate */
3478                             simm = GET_FIELDs(insn, 20, 31);
3479                             tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3480                         } else { /* register */
3481                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3482                             tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3483                         }
3484                         gen_movl_TN_reg(rd, cpu_dst);
3485                         break;
3486 #endif
3487                     case 0x30:
3488                         {
3489                             switch(rd) {
3490                             case 0: /* wry */
3491                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3492                                 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3493                                 break;
3494 #ifndef TARGET_SPARC64
3495                             case 0x01 ... 0x0f: /* undefined in the
3496                                                    SPARCv8 manual, nop
3497                                                    on the microSPARC
3498                                                    II */
3499                             case 0x10 ... 0x1f: /* implementation-dependent
3500                                                    in the SPARCv8
3501                                                    manual, nop on the
3502                                                    microSPARC II */
3503                                 break;
3504 #else
3505                             case 0x2: /* V9 wrccr */
3506                                 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3507                                 gen_helper_wrccr(cpu_env, cpu_dst);
3508                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3509                                 dc->cc_op = CC_OP_FLAGS;
3510                                 break;
3511                             case 0x3: /* V9 wrasi */
3512                                 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3513                                 tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3514                                 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3515                                 break;
3516                             case 0x6: /* V9 wrfprs */
3517                                 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3518                                 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3519                                 save_state(dc, cpu_cond);
3520                                 gen_op_next_insn();
3521                                 tcg_gen_exit_tb(0);
3522                                 dc->is_br = 1;
3523                                 break;
3524                             case 0xf: /* V9 sir, nop if user */
3525 #if !defined(CONFIG_USER_ONLY)
3526                                 if (supervisor(dc)) {
3527                                     ; // XXX
3528                                 }
3529 #endif
3530                                 break;
3531                             case 0x13: /* Graphics Status */
3532                                 if (gen_trap_ifnofpu(dc, cpu_cond))
3533                                     goto jmp_insn;
3534                                 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3535                                 break;
3536                             case 0x14: /* Softint set */
3537                                 if (!supervisor(dc))
3538                                     goto illegal_insn;
3539                                 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3540                                 gen_helper_set_softint(cpu_env, cpu_tmp64);
3541                                 break;
3542                             case 0x15: /* Softint clear */
3543                                 if (!supervisor(dc))
3544                                     goto illegal_insn;
3545                                 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3546                                 gen_helper_clear_softint(cpu_env, cpu_tmp64);
3547                                 break;
3548                             case 0x16: /* Softint write */
3549                                 if (!supervisor(dc))
3550                                     goto illegal_insn;
3551                                 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3552                                 gen_helper_write_softint(cpu_env, cpu_tmp64);
3553                                 break;
3554                             case 0x17: /* Tick compare */
3555 #if !defined(CONFIG_USER_ONLY)
3556                                 if (!supervisor(dc))
3557                                     goto illegal_insn;
3558 #endif
3559                                 {
3560                                     TCGv_ptr r_tickptr;
3561
3562                                     tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3563                                                    cpu_src2);
3564                                     r_tickptr = tcg_temp_new_ptr();
3565                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3566                                                    offsetof(CPUState, tick));
3567                                     gen_helper_tick_set_limit(r_tickptr,
3568                                                               cpu_tick_cmpr);
3569                                     tcg_temp_free_ptr(r_tickptr);
3570                                 }
3571                                 break;
3572                             case 0x18: /* System tick */
3573 #if !defined(CONFIG_USER_ONLY)
3574                                 if (!supervisor(dc))
3575                                     goto illegal_insn;
3576 #endif
3577                                 {
3578                                     TCGv_ptr r_tickptr;
3579
3580                                     tcg_gen_xor_tl(cpu_dst, cpu_src1,
3581                                                    cpu_src2);
3582                                     r_tickptr = tcg_temp_new_ptr();
3583                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3584                                                    offsetof(CPUState, stick));
3585                                     gen_helper_tick_set_count(r_tickptr,
3586                                                               cpu_dst);
3587                                     tcg_temp_free_ptr(r_tickptr);
3588                                 }
3589                                 break;
3590                             case 0x19: /* System tick compare */
3591 #if !defined(CONFIG_USER_ONLY)
3592                                 if (!supervisor(dc))
3593                                     goto illegal_insn;
3594 #endif
3595                                 {
3596                                     TCGv_ptr r_tickptr;
3597
3598                                     tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3599                                                    cpu_src2);
3600                                     r_tickptr = tcg_temp_new_ptr();
3601                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3602                                                    offsetof(CPUState, stick));
3603                                     gen_helper_tick_set_limit(r_tickptr,
3604                                                               cpu_stick_cmpr);
3605                                     tcg_temp_free_ptr(r_tickptr);
3606                                 }
3607                                 break;
3608
3609                             case 0x10: /* Performance Control */
3610                             case 0x11: /* Performance Instrumentation
3611                                           Counter */
3612                             case 0x12: /* Dispatch Control */
3613 #endif
3614                             default:
3615                                 goto illegal_insn;
3616                             }
3617                         }
3618                         break;
3619 #if !defined(CONFIG_USER_ONLY)
3620                     case 0x31: /* wrpsr, V9 saved, restored */
3621                         {
3622                             if (!supervisor(dc))
3623                                 goto priv_insn;
3624 #ifdef TARGET_SPARC64
3625                             switch (rd) {
3626                             case 0:
3627                                 gen_helper_saved(cpu_env);
3628                                 break;
3629                             case 1:
3630                                 gen_helper_restored(cpu_env);
3631                                 break;
3632                             case 2: /* UA2005 allclean */
3633                             case 3: /* UA2005 otherw */
3634                             case 4: /* UA2005 normalw */
3635                             case 5: /* UA2005 invalw */
3636                                 // XXX
3637                             default:
3638                                 goto illegal_insn;
3639                             }
3640 #else
3641                             tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3642                             gen_helper_wrpsr(cpu_env, cpu_dst);
3643                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3644                             dc->cc_op = CC_OP_FLAGS;
3645                             save_state(dc, cpu_cond);
3646                             gen_op_next_insn();
3647                             tcg_gen_exit_tb(0);
3648                             dc->is_br = 1;
3649 #endif
3650                         }
3651                         break;
3652                     case 0x32: /* wrwim, V9 wrpr */
3653                         {
3654                             if (!supervisor(dc))
3655                                 goto priv_insn;
3656                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3657 #ifdef TARGET_SPARC64
3658                             switch (rd) {
3659                             case 0: // tpc
3660                                 {
3661                                     TCGv_ptr r_tsptr;
3662
3663                                     r_tsptr = tcg_temp_new_ptr();
3664                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3665                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3666                                                   offsetof(trap_state, tpc));
3667                                     tcg_temp_free_ptr(r_tsptr);
3668                                 }
3669                                 break;
3670                             case 1: // tnpc
3671                                 {
3672                                     TCGv_ptr r_tsptr;
3673
3674                                     r_tsptr = tcg_temp_new_ptr();
3675                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3676                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3677                                                   offsetof(trap_state, tnpc));
3678                                     tcg_temp_free_ptr(r_tsptr);
3679                                 }
3680                                 break;
3681                             case 2: // tstate
3682                                 {
3683                                     TCGv_ptr r_tsptr;
3684
3685                                     r_tsptr = tcg_temp_new_ptr();
3686                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3687                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3688                                                   offsetof(trap_state,
3689                                                            tstate));
3690                                     tcg_temp_free_ptr(r_tsptr);
3691                                 }
3692                                 break;
3693                             case 3: // tt
3694                                 {
3695                                     TCGv_ptr r_tsptr;
3696
3697                                     r_tsptr = tcg_temp_new_ptr();
3698                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3699                                     tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3700                                     tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3701                                                    offsetof(trap_state, tt));
3702                                     tcg_temp_free_ptr(r_tsptr);
3703                                 }
3704                                 break;
3705                             case 4: // tick
3706                                 {
3707                                     TCGv_ptr r_tickptr;
3708
3709                                     r_tickptr = tcg_temp_new_ptr();
3710                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3711                                                    offsetof(CPUState, tick));
3712                                     gen_helper_tick_set_count(r_tickptr,
3713                                                               cpu_tmp0);
3714                                     tcg_temp_free_ptr(r_tickptr);
3715                                 }
3716                                 break;
3717                             case 5: // tba
3718                                 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3719                                 break;
3720                             case 6: // pstate
3721                                 {
3722                                     TCGv r_tmp = tcg_temp_local_new();
3723
3724                                     tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3725                                     save_state(dc, cpu_cond);
3726                                     gen_helper_wrpstate(cpu_env, r_tmp);
3727                                     tcg_temp_free(r_tmp);
3728                                     dc->npc = DYNAMIC_PC;
3729                                 }
3730                                 break;
3731                             case 7: // tl
3732                                 {
3733                                     TCGv r_tmp = tcg_temp_local_new();
3734
3735                                     tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3736                                     save_state(dc, cpu_cond);
3737                                     tcg_gen_trunc_tl_i32(cpu_tmp32, r_tmp);
3738                                     tcg_temp_free(r_tmp);
3739                                     tcg_gen_st_i32(cpu_tmp32, cpu_env,
3740                                                    offsetof(CPUSPARCState, tl));
3741                                     dc->npc = DYNAMIC_PC;
3742                                 }
3743                                 break;
3744                             case 8: // pil
3745                                 gen_helper_wrpil(cpu_env, cpu_tmp0);
3746                                 break;
3747                             case 9: // cwp
3748                                 gen_helper_wrcwp(cpu_env, cpu_tmp0);
3749                                 break;
3750                             case 10: // cansave
3751                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3752                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3753                                                offsetof(CPUSPARCState,
3754                                                         cansave));
3755                                 break;
3756                             case 11: // canrestore
3757                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3758                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3759                                                offsetof(CPUSPARCState,
3760                                                         canrestore));
3761                                 break;
3762                             case 12: // cleanwin
3763                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3764                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3765                                                offsetof(CPUSPARCState,
3766                                                         cleanwin));
3767                                 break;
3768                             case 13: // otherwin
3769                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3770                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3771                                                offsetof(CPUSPARCState,
3772                                                         otherwin));
3773                                 break;
3774                             case 14: // wstate
3775                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3776                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3777                                                offsetof(CPUSPARCState,
3778                                                         wstate));
3779                                 break;
3780                             case 16: // UA2005 gl
3781                                 CHECK_IU_FEATURE(dc, GL);
3782                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3783                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3784                                                offsetof(CPUSPARCState, gl));
3785                                 break;
3786                             case 26: // UA2005 strand status
3787                                 CHECK_IU_FEATURE(dc, HYPV);
3788                                 if (!hypervisor(dc))
3789                                     goto priv_insn;
3790                                 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3791                                 break;
3792                             default:
3793                                 goto illegal_insn;
3794                             }
3795 #else
3796                             tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3797                             if (dc->def->nwindows != 32)
3798                                 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3799                                                 (1 << dc->def->nwindows) - 1);
3800                             tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3801 #endif
3802                         }
3803                         break;
3804                     case 0x33: /* wrtbr, UA2005 wrhpr */
3805                         {
3806 #ifndef TARGET_SPARC64
3807                             if (!supervisor(dc))
3808                                 goto priv_insn;
3809                             tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3810 #else
3811                             CHECK_IU_FEATURE(dc, HYPV);
3812                             if (!hypervisor(dc))
3813                                 goto priv_insn;
3814                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3815                             switch (rd) {
3816                             case 0: // hpstate
3817                                 // XXX gen_op_wrhpstate();
3818                                 save_state(dc, cpu_cond);
3819                                 gen_op_next_insn();
3820                                 tcg_gen_exit_tb(0);
3821                                 dc->is_br = 1;
3822                                 break;
3823                             case 1: // htstate
3824                                 // XXX gen_op_wrhtstate();
3825                                 break;
3826                             case 3: // hintp
3827                                 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3828                                 break;
3829                             case 5: // htba
3830                                 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3831                                 break;
3832                             case 31: // hstick_cmpr
3833                                 {
3834                                     TCGv_ptr r_tickptr;
3835
3836                                     tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3837                                     r_tickptr = tcg_temp_new_ptr();
3838                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3839                                                    offsetof(CPUState, hstick));
3840                                     gen_helper_tick_set_limit(r_tickptr,
3841                                                               cpu_hstick_cmpr);
3842                                     tcg_temp_free_ptr(r_tickptr);
3843                                 }
3844                                 break;
3845                             case 6: // hver readonly
3846                             default:
3847                                 goto illegal_insn;
3848                             }
3849 #endif
3850                         }
3851                         break;
3852 #endif
3853 #ifdef TARGET_SPARC64
3854                     case 0x2c: /* V9 movcc */
3855                         {
3856                             int cc = GET_FIELD_SP(insn, 11, 12);
3857                             int cond = GET_FIELD_SP(insn, 14, 17);
3858                             TCGv r_cond;
3859                             int l1;
3860
3861                             r_cond = tcg_temp_new();
3862                             if (insn & (1 << 18)) {
3863                                 if (cc == 0)
3864                                     gen_cond(r_cond, 0, cond, dc);
3865                                 else if (cc == 2)
3866                                     gen_cond(r_cond, 1, cond, dc);
3867                                 else
3868                                     goto illegal_insn;
3869                             } else {
3870                                 gen_fcond(r_cond, cc, cond);
3871                             }
3872
3873                             l1 = gen_new_label();
3874
3875                             tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3876                             if (IS_IMM) {       /* immediate */
3877                                 TCGv r_const;
3878
3879                                 simm = GET_FIELD_SPs(insn, 0, 10);
3880                                 r_const = tcg_const_tl(simm);
3881                                 gen_movl_TN_reg(rd, r_const);
3882                                 tcg_temp_free(r_const);
3883                             } else {
3884                                 rs2 = GET_FIELD_SP(insn, 0, 4);
3885                                 gen_movl_reg_TN(rs2, cpu_tmp0);
3886                                 gen_movl_TN_reg(rd, cpu_tmp0);
3887                             }
3888                             gen_set_label(l1);
3889                             tcg_temp_free(r_cond);
3890                             break;
3891                         }
3892                     case 0x2d: /* V9 sdivx */
3893                         gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3894                         gen_movl_TN_reg(rd, cpu_dst);
3895                         break;
3896                     case 0x2e: /* V9 popc */
3897                         {
3898                             cpu_src2 = get_src2(insn, cpu_src2);
3899                             gen_helper_popc(cpu_dst, cpu_src2);
3900                             gen_movl_TN_reg(rd, cpu_dst);
3901                         }
3902                     case 0x2f: /* V9 movr */
3903                         {
3904                             int cond = GET_FIELD_SP(insn, 10, 12);
3905                             int l1;
3906
3907                             cpu_src1 = get_src1(insn, cpu_src1);
3908
3909                             l1 = gen_new_label();
3910
3911                             tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3912                                               cpu_src1, 0, l1);
3913                             if (IS_IMM) {       /* immediate */
3914                                 TCGv r_const;
3915
3916                                 simm = GET_FIELD_SPs(insn, 0, 9);
3917                                 r_const = tcg_const_tl(simm);
3918                                 gen_movl_TN_reg(rd, r_const);
3919                                 tcg_temp_free(r_const);
3920                             } else {
3921                                 rs2 = GET_FIELD_SP(insn, 0, 4);
3922                                 gen_movl_reg_TN(rs2, cpu_tmp0);
3923                                 gen_movl_TN_reg(rd, cpu_tmp0);
3924                             }
3925                             gen_set_label(l1);
3926                             break;
3927                         }
3928 #endif
3929                     default:
3930                         goto illegal_insn;
3931                     }
3932                 }
3933             } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3934 #ifdef TARGET_SPARC64
3935                 int opf = GET_FIELD_SP(insn, 5, 13);
3936                 rs1 = GET_FIELD(insn, 13, 17);
3937                 rs2 = GET_FIELD(insn, 27, 31);
3938                 if (gen_trap_ifnofpu(dc, cpu_cond))
3939                     goto jmp_insn;
3940
3941                 switch (opf) {
3942                 case 0x000: /* VIS I edge8cc */
3943                 case 0x001: /* VIS II edge8n */
3944                 case 0x002: /* VIS I edge8lcc */
3945                 case 0x003: /* VIS II edge8ln */
3946                 case 0x004: /* VIS I edge16cc */
3947                 case 0x005: /* VIS II edge16n */
3948                 case 0x006: /* VIS I edge16lcc */
3949                 case 0x007: /* VIS II edge16ln */
3950                 case 0x008: /* VIS I edge32cc */
3951                 case 0x009: /* VIS II edge32n */
3952                 case 0x00a: /* VIS I edge32lcc */
3953                 case 0x00b: /* VIS II edge32ln */
3954                     // XXX
3955                     goto illegal_insn;
3956                 case 0x010: /* VIS I array8 */
3957                     CHECK_FPU_FEATURE(dc, VIS1);
3958                     cpu_src1 = get_src1(insn, cpu_src1);
3959                     gen_movl_reg_TN(rs2, cpu_src2);
3960                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3961                     gen_movl_TN_reg(rd, cpu_dst);
3962                     break;
3963                 case 0x012: /* VIS I array16 */
3964                     CHECK_FPU_FEATURE(dc, VIS1);
3965                     cpu_src1 = get_src1(insn, cpu_src1);
3966                     gen_movl_reg_TN(rs2, cpu_src2);
3967                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3968                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3969                     gen_movl_TN_reg(rd, cpu_dst);
3970                     break;
3971                 case 0x014: /* VIS I array32 */
3972                     CHECK_FPU_FEATURE(dc, VIS1);
3973                     cpu_src1 = get_src1(insn, cpu_src1);
3974                     gen_movl_reg_TN(rs2, cpu_src2);
3975                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3976                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3977                     gen_movl_TN_reg(rd, cpu_dst);
3978                     break;
3979                 case 0x018: /* VIS I alignaddr */
3980                     CHECK_FPU_FEATURE(dc, VIS1);
3981                     cpu_src1 = get_src1(insn, cpu_src1);
3982                     gen_movl_reg_TN(rs2, cpu_src2);
3983                     gen_helper_alignaddr(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3984                     gen_movl_TN_reg(rd, cpu_dst);
3985                     break;
3986                 case 0x019: /* VIS II bmask */
3987                 case 0x01a: /* VIS I alignaddrl */
3988                     // XXX
3989                     goto illegal_insn;
3990                 case 0x020: /* VIS I fcmple16 */
3991                     CHECK_FPU_FEATURE(dc, VIS1);
3992                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3993                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3994                     gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
3995                     gen_movl_TN_reg(rd, cpu_dst);
3996                     break;
3997                 case 0x022: /* VIS I fcmpne16 */
3998                     CHECK_FPU_FEATURE(dc, VIS1);
3999                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4000                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4001                     gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4002                     gen_movl_TN_reg(rd, cpu_dst);
4003                     break;
4004                 case 0x024: /* VIS I fcmple32 */
4005                     CHECK_FPU_FEATURE(dc, VIS1);
4006                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4007                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4008                     gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4009                     gen_movl_TN_reg(rd, cpu_dst);
4010                     break;
4011                 case 0x026: /* VIS I fcmpne32 */
4012                     CHECK_FPU_FEATURE(dc, VIS1);
4013                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4014                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4015                     gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4016                     gen_movl_TN_reg(rd, cpu_dst);
4017                     break;
4018                 case 0x028: /* VIS I fcmpgt16 */
4019                     CHECK_FPU_FEATURE(dc, VIS1);
4020                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4021                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4022                     gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4023                     gen_movl_TN_reg(rd, cpu_dst);
4024                     break;
4025                 case 0x02a: /* VIS I fcmpeq16 */
4026                     CHECK_FPU_FEATURE(dc, VIS1);
4027                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4028                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4029                     gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4030                     gen_movl_TN_reg(rd, cpu_dst);
4031                     break;
4032                 case 0x02c: /* VIS I fcmpgt32 */
4033                     CHECK_FPU_FEATURE(dc, VIS1);
4034                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4035                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4036                     gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4037                     gen_movl_TN_reg(rd, cpu_dst);
4038                     break;
4039                 case 0x02e: /* VIS I fcmpeq32 */
4040                     CHECK_FPU_FEATURE(dc, VIS1);
4041                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4042                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4043                     gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4044                     gen_movl_TN_reg(rd, cpu_dst);
4045                     break;
4046                 case 0x031: /* VIS I fmul8x16 */
4047                     CHECK_FPU_FEATURE(dc, VIS1);
4048                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4049                     break;
4050                 case 0x033: /* VIS I fmul8x16au */
4051                     CHECK_FPU_FEATURE(dc, VIS1);
4052                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4053                     break;
4054                 case 0x035: /* VIS I fmul8x16al */
4055                     CHECK_FPU_FEATURE(dc, VIS1);
4056                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4057                     break;
4058                 case 0x036: /* VIS I fmul8sux16 */
4059                     CHECK_FPU_FEATURE(dc, VIS1);
4060                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4061                     break;
4062                 case 0x037: /* VIS I fmul8ulx16 */
4063                     CHECK_FPU_FEATURE(dc, VIS1);
4064                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4065                     break;
4066                 case 0x038: /* VIS I fmuld8sux16 */
4067                     CHECK_FPU_FEATURE(dc, VIS1);
4068                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4069                     break;
4070                 case 0x039: /* VIS I fmuld8ulx16 */
4071                     CHECK_FPU_FEATURE(dc, VIS1);
4072                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4073                     break;
4074                 case 0x03a: /* VIS I fpack32 */
4075                 case 0x03b: /* VIS I fpack16 */
4076                 case 0x03d: /* VIS I fpackfix */
4077                     goto illegal_insn;
4078                 case 0x03e: /* VIS I pdist */
4079                     CHECK_FPU_FEATURE(dc, VIS1);
4080                     gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4081                     break;
4082                 case 0x048: /* VIS I faligndata */
4083                     CHECK_FPU_FEATURE(dc, VIS1);
4084                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4085                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4086                     cpu_dst_64 = gen_dest_fpr_D();
4087                     gen_helper_faligndata(cpu_dst_64, cpu_env,
4088                                           cpu_src1_64, cpu_src2_64);
4089                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4090                     break;
4091                 case 0x04b: /* VIS I fpmerge */
4092                     CHECK_FPU_FEATURE(dc, VIS1);
4093                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4094                     break;
4095                 case 0x04c: /* VIS II bshuffle */
4096                     // XXX
4097                     goto illegal_insn;
4098                 case 0x04d: /* VIS I fexpand */
4099                     CHECK_FPU_FEATURE(dc, VIS1);
4100                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4101                     break;
4102                 case 0x050: /* VIS I fpadd16 */
4103                     CHECK_FPU_FEATURE(dc, VIS1);
4104                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4105                     break;
4106                 case 0x051: /* VIS I fpadd16s */
4107                     CHECK_FPU_FEATURE(dc, VIS1);
4108                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4109                     break;
4110                 case 0x052: /* VIS I fpadd32 */
4111                     CHECK_FPU_FEATURE(dc, VIS1);
4112                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4113                     break;
4114                 case 0x053: /* VIS I fpadd32s */
4115                     CHECK_FPU_FEATURE(dc, VIS1);
4116                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4117                     break;
4118                 case 0x054: /* VIS I fpsub16 */
4119                     CHECK_FPU_FEATURE(dc, VIS1);
4120                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4121                     break;
4122                 case 0x055: /* VIS I fpsub16s */
4123                     CHECK_FPU_FEATURE(dc, VIS1);
4124                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4125                     break;
4126                 case 0x056: /* VIS I fpsub32 */
4127                     CHECK_FPU_FEATURE(dc, VIS1);
4128                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4129                     break;
4130                 case 0x057: /* VIS I fpsub32s */
4131                     CHECK_FPU_FEATURE(dc, VIS1);
4132                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4133                     break;
4134                 case 0x060: /* VIS I fzero */
4135                     CHECK_FPU_FEATURE(dc, VIS1);
4136                     cpu_dst_64 = gen_dest_fpr_D();
4137                     tcg_gen_movi_i64(cpu_dst_64, 0);
4138                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4139                     break;
4140                 case 0x061: /* VIS I fzeros */
4141                     CHECK_FPU_FEATURE(dc, VIS1);
4142                     cpu_dst_32 = gen_dest_fpr_F();
4143                     tcg_gen_movi_i32(cpu_dst_32, 0);
4144                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4145                     break;
4146                 case 0x062: /* VIS I fnor */
4147                     CHECK_FPU_FEATURE(dc, VIS1);
4148                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4149                     break;
4150                 case 0x063: /* VIS I fnors */
4151                     CHECK_FPU_FEATURE(dc, VIS1);
4152                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4153                     break;
4154                 case 0x064: /* VIS I fandnot2 */
4155                     CHECK_FPU_FEATURE(dc, VIS1);
4156                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4157                     break;
4158                 case 0x065: /* VIS I fandnot2s */
4159                     CHECK_FPU_FEATURE(dc, VIS1);
4160                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4161                     break;
4162                 case 0x066: /* VIS I fnot2 */
4163                     CHECK_FPU_FEATURE(dc, VIS1);
4164                     gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4165                     break;
4166                 case 0x067: /* VIS I fnot2s */
4167                     CHECK_FPU_FEATURE(dc, VIS1);
4168                     gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4169                     break;
4170                 case 0x068: /* VIS I fandnot1 */
4171                     CHECK_FPU_FEATURE(dc, VIS1);
4172                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4173                     break;
4174                 case 0x069: /* VIS I fandnot1s */
4175                     CHECK_FPU_FEATURE(dc, VIS1);
4176                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4177                     break;
4178                 case 0x06a: /* VIS I fnot1 */
4179                     CHECK_FPU_FEATURE(dc, VIS1);
4180                     gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4181                     break;
4182                 case 0x06b: /* VIS I fnot1s */
4183                     CHECK_FPU_FEATURE(dc, VIS1);
4184                     gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4185                     break;
4186                 case 0x06c: /* VIS I fxor */
4187                     CHECK_FPU_FEATURE(dc, VIS1);
4188                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4189                     break;
4190                 case 0x06d: /* VIS I fxors */
4191                     CHECK_FPU_FEATURE(dc, VIS1);
4192                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4193                     break;
4194                 case 0x06e: /* VIS I fnand */
4195                     CHECK_FPU_FEATURE(dc, VIS1);
4196                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4197                     break;
4198                 case 0x06f: /* VIS I fnands */
4199                     CHECK_FPU_FEATURE(dc, VIS1);
4200                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4201                     break;
4202                 case 0x070: /* VIS I fand */
4203                     CHECK_FPU_FEATURE(dc, VIS1);
4204                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4205                     break;
4206                 case 0x071: /* VIS I fands */
4207                     CHECK_FPU_FEATURE(dc, VIS1);
4208                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4209                     break;
4210                 case 0x072: /* VIS I fxnor */
4211                     CHECK_FPU_FEATURE(dc, VIS1);
4212                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4213                     break;
4214                 case 0x073: /* VIS I fxnors */
4215                     CHECK_FPU_FEATURE(dc, VIS1);
4216                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4217                     break;
4218                 case 0x074: /* VIS I fsrc1 */
4219                     CHECK_FPU_FEATURE(dc, VIS1);
4220                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4221                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4222                     break;
4223                 case 0x075: /* VIS I fsrc1s */
4224                     CHECK_FPU_FEATURE(dc, VIS1);
4225                     cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4226                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4227                     break;
4228                 case 0x076: /* VIS I fornot2 */
4229                     CHECK_FPU_FEATURE(dc, VIS1);
4230                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4231                     break;
4232                 case 0x077: /* VIS I fornot2s */
4233                     CHECK_FPU_FEATURE(dc, VIS1);
4234                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4235                     break;
4236                 case 0x078: /* VIS I fsrc2 */
4237                     CHECK_FPU_FEATURE(dc, VIS1);
4238                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4239                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4240                     break;
4241                 case 0x079: /* VIS I fsrc2s */
4242                     CHECK_FPU_FEATURE(dc, VIS1);
4243                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4244                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4245                     break;
4246                 case 0x07a: /* VIS I fornot1 */
4247                     CHECK_FPU_FEATURE(dc, VIS1);
4248                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4249                     break;
4250                 case 0x07b: /* VIS I fornot1s */
4251                     CHECK_FPU_FEATURE(dc, VIS1);
4252                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
4253                     break;
4254                 case 0x07c: /* VIS I for */
4255                     CHECK_FPU_FEATURE(dc, VIS1);
4256                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
4257                     break;
4258                 case 0x07d: /* VIS I fors */
4259                     CHECK_FPU_FEATURE(dc, VIS1);
4260                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
4261                     break;
4262                 case 0x07e: /* VIS I fone */
4263                     CHECK_FPU_FEATURE(dc, VIS1);
4264                     cpu_dst_64 = gen_dest_fpr_D();
4265                     tcg_gen_movi_i64(cpu_dst_64, -1);
4266                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4267                     break;
4268                 case 0x07f: /* VIS I fones */
4269                     CHECK_FPU_FEATURE(dc, VIS1);
4270                     cpu_dst_32 = gen_dest_fpr_F();
4271                     tcg_gen_movi_i32(cpu_dst_32, -1);
4272                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4273                     break;
4274                 case 0x080: /* VIS I shutdown */
4275                 case 0x081: /* VIS II siam */
4276                     // XXX
4277                     goto illegal_insn;
4278                 default:
4279                     goto illegal_insn;
4280                 }
4281 #else
4282                 goto ncp_insn;
4283 #endif
4284             } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4285 #ifdef TARGET_SPARC64
4286                 goto illegal_insn;
4287 #else
4288                 goto ncp_insn;
4289 #endif
4290 #ifdef TARGET_SPARC64
4291             } else if (xop == 0x39) { /* V9 return */
4292                 TCGv_i32 r_const;
4293
4294                 save_state(dc, cpu_cond);
4295                 cpu_src1 = get_src1(insn, cpu_src1);
4296                 if (IS_IMM) {   /* immediate */
4297                     simm = GET_FIELDs(insn, 19, 31);
4298                     tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4299                 } else {                /* register */
4300                     rs2 = GET_FIELD(insn, 27, 31);
4301                     if (rs2) {
4302                         gen_movl_reg_TN(rs2, cpu_src2);
4303                         tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4304                     } else
4305                         tcg_gen_mov_tl(cpu_dst, cpu_src1);
4306                 }
4307                 gen_helper_restore(cpu_env);
4308                 gen_mov_pc_npc(dc, cpu_cond);
4309                 r_const = tcg_const_i32(3);
4310                 gen_helper_check_align(cpu_dst, r_const);
4311                 tcg_temp_free_i32(r_const);
4312                 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4313                 dc->npc = DYNAMIC_PC;
4314                 goto jmp_insn;
4315 #endif
4316             } else {
4317                 cpu_src1 = get_src1(insn, cpu_src1);
4318                 if (IS_IMM) {   /* immediate */
4319                     simm = GET_FIELDs(insn, 19, 31);
4320                     tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4321                 } else {                /* register */
4322                     rs2 = GET_FIELD(insn, 27, 31);
4323                     if (rs2) {
4324                         gen_movl_reg_TN(rs2, cpu_src2);
4325                         tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4326                     } else
4327                         tcg_gen_mov_tl(cpu_dst, cpu_src1);
4328                 }
4329                 switch (xop) {
4330                 case 0x38:      /* jmpl */
4331                     {
4332                         TCGv r_pc;
4333                         TCGv_i32 r_const;
4334
4335                         r_pc = tcg_const_tl(dc->pc);
4336                         gen_movl_TN_reg(rd, r_pc);
4337                         tcg_temp_free(r_pc);
4338                         gen_mov_pc_npc(dc, cpu_cond);
4339                         r_const = tcg_const_i32(3);
4340                         gen_helper_check_align(cpu_dst, r_const);
4341                         tcg_temp_free_i32(r_const);
4342                         tcg_gen_mov_tl(cpu_npc, cpu_dst);
4343                         dc->npc = DYNAMIC_PC;
4344                     }
4345                     goto jmp_insn;
4346 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4347                 case 0x39:      /* rett, V9 return */
4348                     {
4349                         TCGv_i32 r_const;
4350
4351                         if (!supervisor(dc))
4352                             goto priv_insn;
4353                         gen_mov_pc_npc(dc, cpu_cond);
4354                         r_const = tcg_const_i32(3);
4355                         gen_helper_check_align(cpu_dst, r_const);
4356                         tcg_temp_free_i32(r_const);
4357                         tcg_gen_mov_tl(cpu_npc, cpu_dst);
4358                         dc->npc = DYNAMIC_PC;
4359                         gen_helper_rett(cpu_env);
4360                     }
4361                     goto jmp_insn;
4362 #endif
4363                 case 0x3b: /* flush */
4364                     if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4365                         goto unimp_flush;
4366                     /* nop */
4367                     break;
4368                 case 0x3c:      /* save */
4369                     save_state(dc, cpu_cond);
4370                     gen_helper_save(cpu_env);
4371                     gen_movl_TN_reg(rd, cpu_dst);
4372                     break;
4373                 case 0x3d:      /* restore */
4374                     save_state(dc, cpu_cond);
4375                     gen_helper_restore(cpu_env);
4376                     gen_movl_TN_reg(rd, cpu_dst);
4377                     break;
4378 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4379                 case 0x3e:      /* V9 done/retry */
4380                     {
4381                         switch (rd) {
4382                         case 0:
4383                             if (!supervisor(dc))
4384                                 goto priv_insn;
4385                             dc->npc = DYNAMIC_PC;
4386                             dc->pc = DYNAMIC_PC;
4387                             gen_helper_done(cpu_env);
4388                             goto jmp_insn;
4389                         case 1:
4390                             if (!supervisor(dc))
4391                                 goto priv_insn;
4392                             dc->npc = DYNAMIC_PC;
4393                             dc->pc = DYNAMIC_PC;
4394                             gen_helper_retry(cpu_env);
4395                             goto jmp_insn;
4396                         default:
4397                             goto illegal_insn;
4398                         }
4399                     }
4400                     break;
4401 #endif
4402                 default:
4403                     goto illegal_insn;
4404                 }
4405             }
4406             break;
4407         }
4408         break;
4409     case 3:                     /* load/store instructions */
4410         {
4411             unsigned int xop = GET_FIELD(insn, 7, 12);
4412
4413             /* flush pending conditional evaluations before exposing
4414                cpu state */
4415             if (dc->cc_op != CC_OP_FLAGS) {
4416                 dc->cc_op = CC_OP_FLAGS;
4417                 gen_helper_compute_psr(cpu_env);
4418             }
4419             cpu_src1 = get_src1(insn, cpu_src1);
4420             if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4421                 rs2 = GET_FIELD(insn, 27, 31);
4422                 gen_movl_reg_TN(rs2, cpu_src2);
4423                 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4424             } else if (IS_IMM) {     /* immediate */
4425                 simm = GET_FIELDs(insn, 19, 31);
4426                 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4427             } else {            /* register */
4428                 rs2 = GET_FIELD(insn, 27, 31);
4429                 if (rs2 != 0) {
4430                     gen_movl_reg_TN(rs2, cpu_src2);
4431                     tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4432                 } else
4433                     tcg_gen_mov_tl(cpu_addr, cpu_src1);
4434             }
4435             if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4436                 (xop > 0x17 && xop <= 0x1d ) ||
4437                 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4438                 switch (xop) {
4439                 case 0x0:       /* ld, V9 lduw, load unsigned word */
4440                     gen_address_mask(dc, cpu_addr);
4441                     tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4442                     break;
4443                 case 0x1:       /* ldub, load unsigned byte */
4444                     gen_address_mask(dc, cpu_addr);
4445                     tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4446                     break;
4447                 case 0x2:       /* lduh, load unsigned halfword */
4448                     gen_address_mask(dc, cpu_addr);
4449                     tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4450                     break;
4451                 case 0x3:       /* ldd, load double word */
4452                     if (rd & 1)
4453                         goto illegal_insn;
4454                     else {
4455                         TCGv_i32 r_const;
4456
4457                         save_state(dc, cpu_cond);
4458                         r_const = tcg_const_i32(7);
4459                         gen_helper_check_align(cpu_addr, r_const); // XXX remove
4460                         tcg_temp_free_i32(r_const);
4461                         gen_address_mask(dc, cpu_addr);
4462                         tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4463                         tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4464                         tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4465                         gen_movl_TN_reg(rd + 1, cpu_tmp0);
4466                         tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4467                         tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4468                         tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4469                     }
4470                     break;
4471                 case 0x9:       /* ldsb, load signed byte */
4472                     gen_address_mask(dc, cpu_addr);
4473                     tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4474                     break;
4475                 case 0xa:       /* ldsh, load signed halfword */
4476                     gen_address_mask(dc, cpu_addr);
4477                     tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4478                     break;
4479                 case 0xd:       /* ldstub -- XXX: should be atomically */
4480                     {
4481                         TCGv r_const;
4482
4483                         gen_address_mask(dc, cpu_addr);
4484                         tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4485                         r_const = tcg_const_tl(0xff);
4486                         tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4487                         tcg_temp_free(r_const);
4488                     }
4489                     break;
4490                 case 0x0f:      /* swap, swap register with memory. Also
4491                                    atomically */
4492                     CHECK_IU_FEATURE(dc, SWAP);
4493                     gen_movl_reg_TN(rd, cpu_val);
4494                     gen_address_mask(dc, cpu_addr);
4495                     tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4496                     tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4497                     tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4498                     break;
4499 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4500                 case 0x10:      /* lda, V9 lduwa, load word alternate */
4501 #ifndef TARGET_SPARC64
4502                     if (IS_IMM)
4503                         goto illegal_insn;
4504                     if (!supervisor(dc))
4505                         goto priv_insn;
4506 #endif
4507                     save_state(dc, cpu_cond);
4508                     gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4509                     break;
4510                 case 0x11:      /* lduba, load unsigned byte alternate */
4511 #ifndef TARGET_SPARC64
4512                     if (IS_IMM)
4513                         goto illegal_insn;
4514                     if (!supervisor(dc))
4515                         goto priv_insn;
4516 #endif
4517                     save_state(dc, cpu_cond);
4518                     gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4519                     break;
4520                 case 0x12:      /* lduha, load unsigned halfword alternate */
4521 #ifndef TARGET_SPARC64
4522                     if (IS_IMM)
4523                         goto illegal_insn;
4524                     if (!supervisor(dc))
4525                         goto priv_insn;
4526 #endif
4527                     save_state(dc, cpu_cond);
4528                     gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4529                     break;
4530                 case 0x13:      /* ldda, load double word alternate */
4531 #ifndef TARGET_SPARC64
4532                     if (IS_IMM)
4533                         goto illegal_insn;
4534                     if (!supervisor(dc))
4535                         goto priv_insn;
4536 #endif
4537                     if (rd & 1)
4538                         goto illegal_insn;
4539                     save_state(dc, cpu_cond);
4540                     gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4541                     goto skip_move;
4542                 case 0x19:      /* ldsba, load signed byte alternate */
4543 #ifndef TARGET_SPARC64
4544                     if (IS_IMM)
4545                         goto illegal_insn;
4546                     if (!supervisor(dc))
4547                         goto priv_insn;
4548 #endif
4549                     save_state(dc, cpu_cond);
4550                     gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4551                     break;
4552                 case 0x1a:      /* ldsha, load signed halfword alternate */
4553 #ifndef TARGET_SPARC64
4554                     if (IS_IMM)
4555                         goto illegal_insn;
4556                     if (!supervisor(dc))
4557                         goto priv_insn;
4558 #endif
4559                     save_state(dc, cpu_cond);
4560                     gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4561                     break;
4562                 case 0x1d:      /* ldstuba -- XXX: should be atomically */
4563 #ifndef TARGET_SPARC64
4564                     if (IS_IMM)
4565                         goto illegal_insn;
4566                     if (!supervisor(dc))
4567                         goto priv_insn;
4568 #endif
4569                     save_state(dc, cpu_cond);
4570                     gen_ldstub_asi(cpu_val, cpu_addr, insn);
4571                     break;
4572                 case 0x1f:      /* swapa, swap reg with alt. memory. Also
4573                                    atomically */
4574                     CHECK_IU_FEATURE(dc, SWAP);
4575 #ifndef TARGET_SPARC64
4576                     if (IS_IMM)
4577                         goto illegal_insn;
4578                     if (!supervisor(dc))
4579                         goto priv_insn;
4580 #endif
4581                     save_state(dc, cpu_cond);
4582                     gen_movl_reg_TN(rd, cpu_val);
4583                     gen_swap_asi(cpu_val, cpu_addr, insn);
4584                     break;
4585
4586 #ifndef TARGET_SPARC64
4587                 case 0x30: /* ldc */
4588                 case 0x31: /* ldcsr */
4589                 case 0x33: /* lddc */
4590                     goto ncp_insn;
4591 #endif
4592 #endif
4593 #ifdef TARGET_SPARC64
4594                 case 0x08: /* V9 ldsw */
4595                     gen_address_mask(dc, cpu_addr);
4596                     tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4597                     break;
4598                 case 0x0b: /* V9 ldx */
4599                     gen_address_mask(dc, cpu_addr);
4600                     tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4601                     break;
4602                 case 0x18: /* V9 ldswa */
4603                     save_state(dc, cpu_cond);
4604                     gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4605                     break;
4606                 case 0x1b: /* V9 ldxa */
4607                     save_state(dc, cpu_cond);
4608                     gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4609                     break;
4610                 case 0x2d: /* V9 prefetch, no effect */
4611                     goto skip_move;
4612                 case 0x30: /* V9 ldfa */
4613                     if (gen_trap_ifnofpu(dc, cpu_cond)) {
4614                         goto jmp_insn;
4615                     }
4616                     save_state(dc, cpu_cond);
4617                     gen_ldf_asi(cpu_addr, insn, 4, rd);
4618                     gen_update_fprs_dirty(rd);
4619                     goto skip_move;
4620                 case 0x33: /* V9 lddfa */
4621                     if (gen_trap_ifnofpu(dc, cpu_cond)) {
4622                         goto jmp_insn;
4623                     }
4624                     save_state(dc, cpu_cond);
4625                     gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4626                     gen_update_fprs_dirty(DFPREG(rd));
4627                     goto skip_move;
4628                 case 0x3d: /* V9 prefetcha, no effect */
4629                     goto skip_move;
4630                 case 0x32: /* V9 ldqfa */
4631                     CHECK_FPU_FEATURE(dc, FLOAT128);
4632                     if (gen_trap_ifnofpu(dc, cpu_cond)) {
4633                         goto jmp_insn;
4634                     }
4635                     save_state(dc, cpu_cond);
4636                     gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4637                     gen_update_fprs_dirty(QFPREG(rd));
4638                     goto skip_move;
4639 #endif
4640                 default:
4641                     goto illegal_insn;
4642                 }
4643                 gen_movl_TN_reg(rd, cpu_val);
4644 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4645             skip_move: ;
4646 #endif
4647             } else if (xop >= 0x20 && xop < 0x24) {
4648                 if (gen_trap_ifnofpu(dc, cpu_cond))
4649                     goto jmp_insn;
4650                 save_state(dc, cpu_cond);
4651                 switch (xop) {
4652                 case 0x20:      /* ldf, load fpreg */
4653                     gen_address_mask(dc, cpu_addr);
4654                     tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4655                     cpu_dst_32 = gen_dest_fpr_F();
4656                     tcg_gen_trunc_tl_i32(cpu_dst_32, cpu_tmp0);
4657                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4658                     break;
4659                 case 0x21:      /* ldfsr, V9 ldxfsr */
4660 #ifdef TARGET_SPARC64
4661                     gen_address_mask(dc, cpu_addr);
4662                     if (rd == 1) {
4663                         tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4664                         gen_helper_ldxfsr(cpu_env, cpu_tmp64);
4665                     } else {
4666                         tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4667                         tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
4668                         gen_helper_ldfsr(cpu_env, cpu_tmp32);
4669                     }
4670 #else
4671                     {
4672                         tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4673                         gen_helper_ldfsr(cpu_env, cpu_tmp32);
4674                     }
4675 #endif
4676                     break;
4677                 case 0x22:      /* ldqf, load quad fpreg */
4678                     {
4679                         TCGv_i32 r_const;
4680
4681                         CHECK_FPU_FEATURE(dc, FLOAT128);
4682                         r_const = tcg_const_i32(dc->mem_idx);
4683                         gen_address_mask(dc, cpu_addr);
4684                         gen_helper_ldqf(cpu_addr, r_const);
4685                         tcg_temp_free_i32(r_const);
4686                         gen_op_store_QT0_fpr(QFPREG(rd));
4687                         gen_update_fprs_dirty(QFPREG(rd));
4688                     }
4689                     break;
4690                 case 0x23:      /* lddf, load double fpreg */
4691                     gen_address_mask(dc, cpu_addr);
4692                     cpu_dst_64 = gen_dest_fpr_D();
4693                     tcg_gen_qemu_ld64(cpu_dst_64, cpu_addr, dc->mem_idx);
4694                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4695                     break;
4696                 default:
4697                     goto illegal_insn;
4698                 }
4699             } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4700                        xop == 0xe || xop == 0x1e) {
4701                 gen_movl_reg_TN(rd, cpu_val);
4702                 switch (xop) {
4703                 case 0x4: /* st, store word */
4704                     gen_address_mask(dc, cpu_addr);
4705                     tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4706                     break;
4707                 case 0x5: /* stb, store byte */
4708                     gen_address_mask(dc, cpu_addr);
4709                     tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4710                     break;
4711                 case 0x6: /* sth, store halfword */
4712                     gen_address_mask(dc, cpu_addr);
4713                     tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4714                     break;
4715                 case 0x7: /* std, store double word */
4716                     if (rd & 1)
4717                         goto illegal_insn;
4718                     else {
4719                         TCGv_i32 r_const;
4720
4721                         save_state(dc, cpu_cond);
4722                         gen_address_mask(dc, cpu_addr);
4723                         r_const = tcg_const_i32(7);
4724                         gen_helper_check_align(cpu_addr, r_const); // XXX remove
4725                         tcg_temp_free_i32(r_const);
4726                         gen_movl_reg_TN(rd + 1, cpu_tmp0);
4727                         tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4728                         tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4729                     }
4730                     break;
4731 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4732                 case 0x14: /* sta, V9 stwa, store word alternate */
4733 #ifndef TARGET_SPARC64
4734                     if (IS_IMM)
4735                         goto illegal_insn;
4736                     if (!supervisor(dc))
4737                         goto priv_insn;
4738 #endif
4739                     save_state(dc, cpu_cond);
4740                     gen_st_asi(cpu_val, cpu_addr, insn, 4);
4741                     dc->npc = DYNAMIC_PC;
4742                     break;
4743                 case 0x15: /* stba, store byte alternate */
4744 #ifndef TARGET_SPARC64
4745                     if (IS_IMM)
4746                         goto illegal_insn;
4747                     if (!supervisor(dc))
4748                         goto priv_insn;
4749 #endif
4750                     save_state(dc, cpu_cond);
4751                     gen_st_asi(cpu_val, cpu_addr, insn, 1);
4752                     dc->npc = DYNAMIC_PC;
4753                     break;
4754                 case 0x16: /* stha, store halfword alternate */
4755 #ifndef TARGET_SPARC64
4756                     if (IS_IMM)
4757                         goto illegal_insn;
4758                     if (!supervisor(dc))
4759                         goto priv_insn;
4760 #endif
4761                     save_state(dc, cpu_cond);
4762                     gen_st_asi(cpu_val, cpu_addr, insn, 2);
4763                     dc->npc = DYNAMIC_PC;
4764                     break;
4765                 case 0x17: /* stda, store double word alternate */
4766 #ifndef TARGET_SPARC64
4767                     if (IS_IMM)
4768                         goto illegal_insn;
4769                     if (!supervisor(dc))
4770                         goto priv_insn;
4771 #endif
4772                     if (rd & 1)
4773                         goto illegal_insn;
4774                     else {
4775                         save_state(dc, cpu_cond);
4776                         gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4777                     }
4778                     break;
4779 #endif
4780 #ifdef TARGET_SPARC64
4781                 case 0x0e: /* V9 stx */
4782                     gen_address_mask(dc, cpu_addr);
4783                     tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4784                     break;
4785                 case 0x1e: /* V9 stxa */
4786                     save_state(dc, cpu_cond);
4787                     gen_st_asi(cpu_val, cpu_addr, insn, 8);
4788                     dc->npc = DYNAMIC_PC;
4789                     break;
4790 #endif
4791                 default:
4792                     goto illegal_insn;
4793                 }
4794             } else if (xop > 0x23 && xop < 0x28) {
4795                 if (gen_trap_ifnofpu(dc, cpu_cond))
4796                     goto jmp_insn;
4797                 save_state(dc, cpu_cond);
4798                 switch (xop) {
4799                 case 0x24: /* stf, store fpreg */
4800                     gen_address_mask(dc, cpu_addr);
4801                     cpu_src1_32 = gen_load_fpr_F(dc, rd);
4802                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_src1_32);
4803                     tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4804                     break;
4805                 case 0x25: /* stfsr, V9 stxfsr */
4806 #ifdef TARGET_SPARC64
4807                     gen_address_mask(dc, cpu_addr);
4808                     tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4809                     if (rd == 1)
4810                         tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4811                     else
4812                         tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4813 #else
4814                     tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4815                     tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4816 #endif
4817                     break;
4818                 case 0x26:
4819 #ifdef TARGET_SPARC64
4820                     /* V9 stqf, store quad fpreg */
4821                     {
4822                         TCGv_i32 r_const;
4823
4824                         CHECK_FPU_FEATURE(dc, FLOAT128);
4825                         gen_op_load_fpr_QT0(QFPREG(rd));
4826                         r_const = tcg_const_i32(dc->mem_idx);
4827                         gen_address_mask(dc, cpu_addr);
4828                         gen_helper_stqf(cpu_addr, r_const);
4829                         tcg_temp_free_i32(r_const);
4830                     }
4831                     break;
4832 #else /* !TARGET_SPARC64 */
4833                     /* stdfq, store floating point queue */
4834 #if defined(CONFIG_USER_ONLY)
4835                     goto illegal_insn;
4836 #else
4837                     if (!supervisor(dc))
4838                         goto priv_insn;
4839                     if (gen_trap_ifnofpu(dc, cpu_cond))
4840                         goto jmp_insn;
4841                     goto nfq_insn;
4842 #endif
4843 #endif
4844                 case 0x27: /* stdf, store double fpreg */
4845                     gen_address_mask(dc, cpu_addr);
4846                     cpu_src1_64 = gen_load_fpr_D(dc, rd);
4847                     tcg_gen_qemu_st64(cpu_src1_64, cpu_addr, dc->mem_idx);
4848                     break;
4849                 default:
4850                     goto illegal_insn;
4851                 }
4852             } else if (xop > 0x33 && xop < 0x3f) {
4853                 save_state(dc, cpu_cond);
4854                 switch (xop) {
4855 #ifdef TARGET_SPARC64
4856                 case 0x34: /* V9 stfa */
4857                     if (gen_trap_ifnofpu(dc, cpu_cond)) {
4858                         goto jmp_insn;
4859                     }
4860                     gen_stf_asi(cpu_addr, insn, 4, rd);
4861                     break;
4862                 case 0x36: /* V9 stqfa */
4863                     {
4864                         TCGv_i32 r_const;
4865
4866                         CHECK_FPU_FEATURE(dc, FLOAT128);
4867                         if (gen_trap_ifnofpu(dc, cpu_cond)) {
4868                             goto jmp_insn;
4869                         }
4870                         r_const = tcg_const_i32(7);
4871                         gen_helper_check_align(cpu_addr, r_const);
4872                         tcg_temp_free_i32(r_const);
4873                         gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4874                     }
4875                     break;
4876                 case 0x37: /* V9 stdfa */
4877                     if (gen_trap_ifnofpu(dc, cpu_cond)) {
4878                         goto jmp_insn;
4879                     }
4880                     gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4881                     break;
4882                 case 0x3c: /* V9 casa */
4883                     gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4884                     gen_movl_TN_reg(rd, cpu_val);
4885                     break;
4886                 case 0x3e: /* V9 casxa */
4887                     gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4888                     gen_movl_TN_reg(rd, cpu_val);
4889                     break;
4890 #else
4891                 case 0x34: /* stc */
4892                 case 0x35: /* stcsr */
4893                 case 0x36: /* stdcq */
4894                 case 0x37: /* stdc */
4895                     goto ncp_insn;
4896 #endif
4897                 default:
4898                     goto illegal_insn;
4899                 }
4900             } else
4901                 goto illegal_insn;
4902         }
4903         break;
4904     }
4905     /* default case for non jump instructions */
4906     if (dc->npc == DYNAMIC_PC) {
4907         dc->pc = DYNAMIC_PC;
4908         gen_op_next_insn();
4909     } else if (dc->npc == JUMP_PC) {
4910         /* we can do a static jump */
4911         gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4912         dc->is_br = 1;
4913     } else {
4914         dc->pc = dc->npc;
4915         dc->npc = dc->npc + 4;
4916     }
4917  jmp_insn:
4918     goto egress;
4919  illegal_insn:
4920     {
4921         TCGv_i32 r_const;
4922
4923         save_state(dc, cpu_cond);
4924         r_const = tcg_const_i32(TT_ILL_INSN);
4925         gen_helper_raise_exception(cpu_env, r_const);
4926         tcg_temp_free_i32(r_const);
4927         dc->is_br = 1;
4928     }
4929     goto egress;
4930  unimp_flush:
4931     {
4932         TCGv_i32 r_const;
4933
4934         save_state(dc, cpu_cond);
4935         r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4936         gen_helper_raise_exception(cpu_env, r_const);
4937         tcg_temp_free_i32(r_const);
4938         dc->is_br = 1;
4939     }
4940     goto egress;
4941 #if !defined(CONFIG_USER_ONLY)
4942  priv_insn:
4943     {
4944         TCGv_i32 r_const;
4945
4946         save_state(dc, cpu_cond);
4947         r_const = tcg_const_i32(TT_PRIV_INSN);
4948         gen_helper_raise_exception(cpu_env, r_const);
4949         tcg_temp_free_i32(r_const);
4950         dc->is_br = 1;
4951     }
4952     goto egress;
4953 #endif
4954  nfpu_insn:
4955     save_state(dc, cpu_cond);
4956     gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4957     dc->is_br = 1;
4958     goto egress;
4959 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4960  nfq_insn:
4961     save_state(dc, cpu_cond);
4962     gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4963     dc->is_br = 1;
4964     goto egress;
4965 #endif
4966 #ifndef TARGET_SPARC64
4967  ncp_insn:
4968     {
4969         TCGv r_const;
4970
4971         save_state(dc, cpu_cond);
4972         r_const = tcg_const_i32(TT_NCP_INSN);
4973         gen_helper_raise_exception(cpu_env, r_const);
4974         tcg_temp_free(r_const);
4975         dc->is_br = 1;
4976     }
4977     goto egress;
4978 #endif
4979  egress:
4980     tcg_temp_free(cpu_tmp1);
4981     tcg_temp_free(cpu_tmp2);
4982     if (dc->n_t32 != 0) {
4983         int i;
4984         for (i = dc->n_t32 - 1; i >= 0; --i) {
4985             tcg_temp_free_i32(dc->t32[i]);
4986         }
4987         dc->n_t32 = 0;
4988     }
4989 }
4990
4991 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4992                                                   int spc, CPUSPARCState *env)
4993 {
4994     target_ulong pc_start, last_pc;
4995     uint16_t *gen_opc_end;
4996     DisasContext dc1, *dc = &dc1;
4997     CPUBreakpoint *bp;
4998     int j, lj = -1;
4999     int num_insns;
5000     int max_insns;
5001
5002     memset(dc, 0, sizeof(DisasContext));
5003     dc->tb = tb;
5004     pc_start = tb->pc;
5005     dc->pc = pc_start;
5006     last_pc = dc->pc;
5007     dc->npc = (target_ulong) tb->cs_base;
5008     dc->cc_op = CC_OP_DYNAMIC;
5009     dc->mem_idx = cpu_mmu_index(env);
5010     dc->def = env->def;
5011     dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5012     dc->address_mask_32bit = tb_am_enabled(tb->flags);
5013     dc->singlestep = (env->singlestep_enabled || singlestep);
5014     gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
5015
5016     cpu_tmp0 = tcg_temp_new();
5017     cpu_tmp32 = tcg_temp_new_i32();
5018     cpu_tmp64 = tcg_temp_new_i64();
5019
5020     cpu_dst = tcg_temp_local_new();
5021
5022     // loads and stores
5023     cpu_val = tcg_temp_local_new();
5024     cpu_addr = tcg_temp_local_new();
5025
5026     num_insns = 0;
5027     max_insns = tb->cflags & CF_COUNT_MASK;
5028     if (max_insns == 0)
5029         max_insns = CF_COUNT_MASK;
5030     gen_icount_start();
5031     do {
5032         if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
5033             QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
5034                 if (bp->pc == dc->pc) {
5035                     if (dc->pc != pc_start)
5036                         save_state(dc, cpu_cond);
5037                     gen_helper_debug(cpu_env);
5038                     tcg_gen_exit_tb(0);
5039                     dc->is_br = 1;
5040                     goto exit_gen_loop;
5041                 }
5042             }
5043         }
5044         if (spc) {
5045             qemu_log("Search PC...\n");
5046             j = gen_opc_ptr - gen_opc_buf;
5047             if (lj < j) {
5048                 lj++;
5049                 while (lj < j)
5050                     gen_opc_instr_start[lj++] = 0;
5051                 gen_opc_pc[lj] = dc->pc;
5052                 gen_opc_npc[lj] = dc->npc;
5053                 gen_opc_instr_start[lj] = 1;
5054                 gen_opc_icount[lj] = num_insns;
5055             }
5056         }
5057         if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
5058             gen_io_start();
5059         last_pc = dc->pc;
5060         disas_sparc_insn(dc);
5061         num_insns++;
5062
5063         if (dc->is_br)
5064             break;
5065         /* if the next PC is different, we abort now */
5066         if (dc->pc != (last_pc + 4))
5067             break;
5068         /* if we reach a page boundary, we stop generation so that the
5069            PC of a TT_TFAULT exception is always in the right page */
5070         if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5071             break;
5072         /* if single step mode, we generate only one instruction and
5073            generate an exception */
5074         if (dc->singlestep) {
5075             break;
5076         }
5077     } while ((gen_opc_ptr < gen_opc_end) &&
5078              (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5079              num_insns < max_insns);
5080
5081  exit_gen_loop:
5082     tcg_temp_free(cpu_addr);
5083     tcg_temp_free(cpu_val);
5084     tcg_temp_free(cpu_dst);
5085     tcg_temp_free_i64(cpu_tmp64);
5086     tcg_temp_free_i32(cpu_tmp32);
5087     tcg_temp_free(cpu_tmp0);
5088
5089     if (tb->cflags & CF_LAST_IO)
5090         gen_io_end();
5091     if (!dc->is_br) {
5092         if (dc->pc != DYNAMIC_PC &&
5093             (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5094             /* static PC and NPC: we can use direct chaining */
5095             gen_goto_tb(dc, 0, dc->pc, dc->npc);
5096         } else {
5097             if (dc->pc != DYNAMIC_PC)
5098                 tcg_gen_movi_tl(cpu_pc, dc->pc);
5099             save_npc(dc, cpu_cond);
5100             tcg_gen_exit_tb(0);
5101         }
5102     }
5103     gen_icount_end(tb, num_insns);
5104     *gen_opc_ptr = INDEX_op_end;
5105     if (spc) {
5106         j = gen_opc_ptr - gen_opc_buf;
5107         lj++;
5108         while (lj <= j)
5109             gen_opc_instr_start[lj++] = 0;
5110 #if 0
5111         log_page_dump();
5112 #endif
5113         gen_opc_jump_pc[0] = dc->jump_pc[0];
5114         gen_opc_jump_pc[1] = dc->jump_pc[1];
5115     } else {
5116         tb->size = last_pc + 4 - pc_start;
5117         tb->icount = num_insns;
5118     }
5119 #ifdef DEBUG_DISAS
5120     if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5121         qemu_log("--------------\n");
5122         qemu_log("IN: %s\n", lookup_symbol(pc_start));
5123         log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
5124         qemu_log("\n");
5125     }
5126 #endif
5127 }
5128
5129 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5130 {
5131     gen_intermediate_code_internal(tb, 0, env);
5132 }
5133
5134 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5135 {
5136     gen_intermediate_code_internal(tb, 1, env);
5137 }
5138
5139 void gen_intermediate_code_init(CPUSPARCState *env)
5140 {
5141     unsigned int i;
5142     static int inited;
5143     static const char * const gregnames[8] = {
5144         NULL, // g0 not used
5145         "g1",
5146         "g2",
5147         "g3",
5148         "g4",
5149         "g5",
5150         "g6",
5151         "g7",
5152     };
5153     static const char * const fregnames[32] = {
5154         "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5155         "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5156         "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5157         "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5158     };
5159
5160     /* init various static tables */
5161     if (!inited) {
5162         inited = 1;
5163
5164         cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5165         cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5166                                              offsetof(CPUState, regwptr),
5167                                              "regwptr");
5168 #ifdef TARGET_SPARC64
5169         cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
5170                                          "xcc");
5171         cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
5172                                          "asi");
5173         cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
5174                                           "fprs");
5175         cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
5176                                      "gsr");
5177         cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5178                                            offsetof(CPUState, tick_cmpr),
5179                                            "tick_cmpr");
5180         cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5181                                             offsetof(CPUState, stick_cmpr),
5182                                             "stick_cmpr");
5183         cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5184                                              offsetof(CPUState, hstick_cmpr),
5185                                              "hstick_cmpr");
5186         cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
5187                                        "hintp");
5188         cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
5189                                       "htba");
5190         cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
5191                                       "hver");
5192         cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5193                                      offsetof(CPUState, ssr), "ssr");
5194         cpu_ver = tcg_global_mem_new(TCG_AREG0,
5195                                      offsetof(CPUState, version), "ver");
5196         cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5197                                              offsetof(CPUState, softint),
5198                                              "softint");
5199 #else
5200         cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
5201                                      "wim");
5202 #endif
5203         cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
5204                                       "cond");
5205         cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
5206                                         "cc_src");
5207         cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5208                                          offsetof(CPUState, cc_src2),
5209                                          "cc_src2");
5210         cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
5211                                         "cc_dst");
5212         cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
5213                                            "cc_op");
5214         cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
5215                                          "psr");
5216         cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
5217                                      "fsr");
5218         cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
5219                                     "pc");
5220         cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
5221                                      "npc");
5222         cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
5223 #ifndef CONFIG_USER_ONLY
5224         cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
5225                                      "tbr");
5226 #endif
5227         for (i = 1; i < 8; i++) {
5228             cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5229                                               offsetof(CPUState, gregs[i]),
5230                                               gregnames[i]);
5231         }
5232         for (i = 0; i < TARGET_DPREGS; i++) {
5233             cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
5234                                                 offsetof(CPUState, fpr[i]),
5235                                                 fregnames[i]);
5236         }
5237
5238         /* register helpers */
5239
5240 #define GEN_HELPER 2
5241 #include "helper.h"
5242     }
5243 }
5244
5245 void restore_state_to_opc(CPUState *env, TranslationBlock *tb, int pc_pos)
5246 {
5247     target_ulong npc;
5248     env->pc = gen_opc_pc[pc_pos];
5249     npc = gen_opc_npc[pc_pos];
5250     if (npc == 1) {
5251         /* dynamic NPC: already stored */
5252     } else if (npc == 2) {
5253         /* jump PC: use 'cond' and the jump targets of the translation */
5254         if (env->cond) {
5255             env->npc = gen_opc_jump_pc[0];
5256         } else {
5257             env->npc = gen_opc_jump_pc[1];
5258         }
5259     } else {
5260         env->npc = npc;
5261     }
5262
5263     /* flush pending conditional evaluations before exposing cpu state */
5264     if (CC_OP != CC_OP_FLAGS) {
5265         helper_compute_psr(env);
5266     }
5267 }
This page took 0.303457 seconds and 2 git commands to generate.