]> Git Repo - qemu.git/blob - target-sparc/translate.c
Merge remote-tracking branch 'remotes/rth/tags/pull-tcg-20150212' into staging
[qemu.git] / target-sparc / translate.c
1 /*
2    SPARC translation
3
4    Copyright (C) 2003 Thomas M. Ogrisegg <[email protected]>
5    Copyright (C) 2003-2005 Fabrice Bellard
6
7    This library is free software; you can redistribute it and/or
8    modify it under the terms of the GNU Lesser General Public
9    License as published by the Free Software Foundation; either
10    version 2 of the License, or (at your option) any later version.
11
12    This library is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15    Lesser General Public License for more details.
16
17    You should have received a copy of the GNU Lesser General Public
18    License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20
21 #include <stdarg.h>
22 #include <stdlib.h>
23 #include <stdio.h>
24 #include <string.h>
25 #include <inttypes.h>
26
27 #include "cpu.h"
28 #include "disas/disas.h"
29 #include "exec/helper-proto.h"
30 #include "tcg-op.h"
31 #include "exec/cpu_ldst.h"
32
33 #include "exec/helper-gen.h"
34
35 #include "trace-tcg.h"
36
37
38 #define DEBUG_DISAS
39
40 #define DYNAMIC_PC  1 /* dynamic pc value */
41 #define JUMP_PC     2 /* dynamic pc value which takes only two values
42                          according to jump_pc[T2] */
43
44 /* global register indexes */
45 static TCGv_ptr cpu_env, cpu_regwptr;
46 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
47 static TCGv_i32 cpu_cc_op;
48 static TCGv_i32 cpu_psr;
49 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
50 static TCGv cpu_y;
51 #ifndef CONFIG_USER_ONLY
52 static TCGv cpu_tbr;
53 #endif
54 static TCGv cpu_cond;
55 #ifdef TARGET_SPARC64
56 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
57 static TCGv cpu_gsr;
58 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
59 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
60 static TCGv_i32 cpu_softint;
61 #else
62 static TCGv cpu_wim;
63 #endif
64 /* Floating point registers */
65 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
66
67 static target_ulong gen_opc_npc[OPC_BUF_SIZE];
68 static target_ulong gen_opc_jump_pc[2];
69
70 #include "exec/gen-icount.h"
71
72 typedef struct DisasContext {
73     target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
74     target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
75     target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
76     int is_br;
77     int mem_idx;
78     int fpu_enabled;
79     int address_mask_32bit;
80     int singlestep;
81     uint32_t cc_op;  /* current CC operation */
82     struct TranslationBlock *tb;
83     sparc_def_t *def;
84     TCGv_i32 t32[3];
85     TCGv ttl[5];
86     int n_t32;
87     int n_ttl;
88 } DisasContext;
89
90 typedef struct {
91     TCGCond cond;
92     bool is_bool;
93     bool g1, g2;
94     TCGv c1, c2;
95 } DisasCompare;
96
97 // This function uses non-native bit order
98 #define GET_FIELD(X, FROM, TO)                                  \
99     ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
100
101 // This function uses the order in the manuals, i.e. bit 0 is 2^0
102 #define GET_FIELD_SP(X, FROM, TO)               \
103     GET_FIELD(X, 31 - (TO), 31 - (FROM))
104
105 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
106 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
107
108 #ifdef TARGET_SPARC64
109 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
110 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
111 #else
112 #define DFPREG(r) (r & 0x1e)
113 #define QFPREG(r) (r & 0x1c)
114 #endif
115
116 #define UA2005_HTRAP_MASK 0xff
117 #define V8_TRAP_MASK 0x7f
118
119 static int sign_extend(int x, int len)
120 {
121     len = 32 - len;
122     return (x << len) >> len;
123 }
124
125 #define IS_IMM (insn & (1<<13))
126
127 static inline TCGv_i32 get_temp_i32(DisasContext *dc)
128 {
129     TCGv_i32 t;
130     assert(dc->n_t32 < ARRAY_SIZE(dc->t32));
131     dc->t32[dc->n_t32++] = t = tcg_temp_new_i32();
132     return t;
133 }
134
135 static inline TCGv get_temp_tl(DisasContext *dc)
136 {
137     TCGv t;
138     assert(dc->n_ttl < ARRAY_SIZE(dc->ttl));
139     dc->ttl[dc->n_ttl++] = t = tcg_temp_new();
140     return t;
141 }
142
143 static inline void gen_update_fprs_dirty(int rd)
144 {
145 #if defined(TARGET_SPARC64)
146     tcg_gen_ori_i32(cpu_fprs, cpu_fprs, (rd < 32) ? 1 : 2);
147 #endif
148 }
149
150 /* floating point registers moves */
151 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
152 {
153 #if TCG_TARGET_REG_BITS == 32
154     if (src & 1) {
155         return TCGV_LOW(cpu_fpr[src / 2]);
156     } else {
157         return TCGV_HIGH(cpu_fpr[src / 2]);
158     }
159 #else
160     if (src & 1) {
161         return MAKE_TCGV_I32(GET_TCGV_I64(cpu_fpr[src / 2]));
162     } else {
163         TCGv_i32 ret = get_temp_i32(dc);
164         TCGv_i64 t = tcg_temp_new_i64();
165
166         tcg_gen_shri_i64(t, cpu_fpr[src / 2], 32);
167         tcg_gen_trunc_i64_i32(ret, t);
168         tcg_temp_free_i64(t);
169
170         return ret;
171     }
172 #endif
173 }
174
175 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
176 {
177 #if TCG_TARGET_REG_BITS == 32
178     if (dst & 1) {
179         tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
180     } else {
181         tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
182     }
183 #else
184     TCGv_i64 t = MAKE_TCGV_I64(GET_TCGV_I32(v));
185     tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
186                         (dst & 1 ? 0 : 32), 32);
187 #endif
188     gen_update_fprs_dirty(dst);
189 }
190
191 static TCGv_i32 gen_dest_fpr_F(DisasContext *dc)
192 {
193     return get_temp_i32(dc);
194 }
195
196 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
197 {
198     src = DFPREG(src);
199     return cpu_fpr[src / 2];
200 }
201
202 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
203 {
204     dst = DFPREG(dst);
205     tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
206     gen_update_fprs_dirty(dst);
207 }
208
209 static TCGv_i64 gen_dest_fpr_D(DisasContext *dc, unsigned int dst)
210 {
211     return cpu_fpr[DFPREG(dst) / 2];
212 }
213
214 static void gen_op_load_fpr_QT0(unsigned int src)
215 {
216     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
217                    offsetof(CPU_QuadU, ll.upper));
218     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
219                    offsetof(CPU_QuadU, ll.lower));
220 }
221
222 static void gen_op_load_fpr_QT1(unsigned int src)
223 {
224     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
225                    offsetof(CPU_QuadU, ll.upper));
226     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
227                    offsetof(CPU_QuadU, ll.lower));
228 }
229
230 static void gen_op_store_QT0_fpr(unsigned int dst)
231 {
232     tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
233                    offsetof(CPU_QuadU, ll.upper));
234     tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
235                    offsetof(CPU_QuadU, ll.lower));
236 }
237
238 #ifdef TARGET_SPARC64
239 static void gen_move_Q(unsigned int rd, unsigned int rs)
240 {
241     rd = QFPREG(rd);
242     rs = QFPREG(rs);
243
244     tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
245     tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
246     gen_update_fprs_dirty(rd);
247 }
248 #endif
249
250 /* moves */
251 #ifdef CONFIG_USER_ONLY
252 #define supervisor(dc) 0
253 #ifdef TARGET_SPARC64
254 #define hypervisor(dc) 0
255 #endif
256 #else
257 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
258 #ifdef TARGET_SPARC64
259 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
260 #else
261 #endif
262 #endif
263
264 #ifdef TARGET_SPARC64
265 #ifndef TARGET_ABI32
266 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
267 #else
268 #define AM_CHECK(dc) (1)
269 #endif
270 #endif
271
272 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
273 {
274 #ifdef TARGET_SPARC64
275     if (AM_CHECK(dc))
276         tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
277 #endif
278 }
279
280 static inline TCGv gen_load_gpr(DisasContext *dc, int reg)
281 {
282     if (reg == 0 || reg >= 8) {
283         TCGv t = get_temp_tl(dc);
284         if (reg == 0) {
285             tcg_gen_movi_tl(t, 0);
286         } else {
287             tcg_gen_ld_tl(t, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
288         }
289         return t;
290     } else {
291         return cpu_gregs[reg];
292     }
293 }
294
295 static inline void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
296 {
297     if (reg > 0) {
298         if (reg < 8) {
299             tcg_gen_mov_tl(cpu_gregs[reg], v);
300         } else {
301             tcg_gen_st_tl(v, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
302         }
303     }
304 }
305
306 static inline TCGv gen_dest_gpr(DisasContext *dc, int reg)
307 {
308     if (reg == 0 || reg >= 8) {
309         return get_temp_tl(dc);
310     } else {
311         return cpu_gregs[reg];
312     }
313 }
314
315 static inline void gen_goto_tb(DisasContext *s, int tb_num,
316                                target_ulong pc, target_ulong npc)
317 {
318     TranslationBlock *tb;
319
320     tb = s->tb;
321     if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
322         (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
323         !s->singlestep)  {
324         /* jump to same page: we can use a direct jump */
325         tcg_gen_goto_tb(tb_num);
326         tcg_gen_movi_tl(cpu_pc, pc);
327         tcg_gen_movi_tl(cpu_npc, npc);
328         tcg_gen_exit_tb((uintptr_t)tb + tb_num);
329     } else {
330         /* jump to another page: currently not optimized */
331         tcg_gen_movi_tl(cpu_pc, pc);
332         tcg_gen_movi_tl(cpu_npc, npc);
333         tcg_gen_exit_tb(0);
334     }
335 }
336
337 // XXX suboptimal
338 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
339 {
340     tcg_gen_extu_i32_tl(reg, src);
341     tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
342     tcg_gen_andi_tl(reg, reg, 0x1);
343 }
344
345 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
346 {
347     tcg_gen_extu_i32_tl(reg, src);
348     tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
349     tcg_gen_andi_tl(reg, reg, 0x1);
350 }
351
352 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
353 {
354     tcg_gen_extu_i32_tl(reg, src);
355     tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
356     tcg_gen_andi_tl(reg, reg, 0x1);
357 }
358
359 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
360 {
361     tcg_gen_extu_i32_tl(reg, src);
362     tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
363     tcg_gen_andi_tl(reg, reg, 0x1);
364 }
365
366 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
367 {
368     tcg_gen_mov_tl(cpu_cc_src, src1);
369     tcg_gen_mov_tl(cpu_cc_src2, src2);
370     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
371     tcg_gen_mov_tl(dst, cpu_cc_dst);
372 }
373
374 static TCGv_i32 gen_add32_carry32(void)
375 {
376     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
377
378     /* Carry is computed from a previous add: (dst < src)  */
379 #if TARGET_LONG_BITS == 64
380     cc_src1_32 = tcg_temp_new_i32();
381     cc_src2_32 = tcg_temp_new_i32();
382     tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_dst);
383     tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src);
384 #else
385     cc_src1_32 = cpu_cc_dst;
386     cc_src2_32 = cpu_cc_src;
387 #endif
388
389     carry_32 = tcg_temp_new_i32();
390     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
391
392 #if TARGET_LONG_BITS == 64
393     tcg_temp_free_i32(cc_src1_32);
394     tcg_temp_free_i32(cc_src2_32);
395 #endif
396
397     return carry_32;
398 }
399
400 static TCGv_i32 gen_sub32_carry32(void)
401 {
402     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
403
404     /* Carry is computed from a previous borrow: (src1 < src2)  */
405 #if TARGET_LONG_BITS == 64
406     cc_src1_32 = tcg_temp_new_i32();
407     cc_src2_32 = tcg_temp_new_i32();
408     tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_src);
409     tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src2);
410 #else
411     cc_src1_32 = cpu_cc_src;
412     cc_src2_32 = cpu_cc_src2;
413 #endif
414
415     carry_32 = tcg_temp_new_i32();
416     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
417
418 #if TARGET_LONG_BITS == 64
419     tcg_temp_free_i32(cc_src1_32);
420     tcg_temp_free_i32(cc_src2_32);
421 #endif
422
423     return carry_32;
424 }
425
426 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
427                             TCGv src2, int update_cc)
428 {
429     TCGv_i32 carry_32;
430     TCGv carry;
431
432     switch (dc->cc_op) {
433     case CC_OP_DIV:
434     case CC_OP_LOGIC:
435         /* Carry is known to be zero.  Fall back to plain ADD.  */
436         if (update_cc) {
437             gen_op_add_cc(dst, src1, src2);
438         } else {
439             tcg_gen_add_tl(dst, src1, src2);
440         }
441         return;
442
443     case CC_OP_ADD:
444     case CC_OP_TADD:
445     case CC_OP_TADDTV:
446         if (TARGET_LONG_BITS == 32) {
447             /* We can re-use the host's hardware carry generation by using
448                an ADD2 opcode.  We discard the low part of the output.
449                Ideally we'd combine this operation with the add that
450                generated the carry in the first place.  */
451             carry = tcg_temp_new();
452             tcg_gen_add2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
453             tcg_temp_free(carry);
454             goto add_done;
455         }
456         carry_32 = gen_add32_carry32();
457         break;
458
459     case CC_OP_SUB:
460     case CC_OP_TSUB:
461     case CC_OP_TSUBTV:
462         carry_32 = gen_sub32_carry32();
463         break;
464
465     default:
466         /* We need external help to produce the carry.  */
467         carry_32 = tcg_temp_new_i32();
468         gen_helper_compute_C_icc(carry_32, cpu_env);
469         break;
470     }
471
472 #if TARGET_LONG_BITS == 64
473     carry = tcg_temp_new();
474     tcg_gen_extu_i32_i64(carry, carry_32);
475 #else
476     carry = carry_32;
477 #endif
478
479     tcg_gen_add_tl(dst, src1, src2);
480     tcg_gen_add_tl(dst, dst, carry);
481
482     tcg_temp_free_i32(carry_32);
483 #if TARGET_LONG_BITS == 64
484     tcg_temp_free(carry);
485 #endif
486
487  add_done:
488     if (update_cc) {
489         tcg_gen_mov_tl(cpu_cc_src, src1);
490         tcg_gen_mov_tl(cpu_cc_src2, src2);
491         tcg_gen_mov_tl(cpu_cc_dst, dst);
492         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
493         dc->cc_op = CC_OP_ADDX;
494     }
495 }
496
497 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
498 {
499     tcg_gen_mov_tl(cpu_cc_src, src1);
500     tcg_gen_mov_tl(cpu_cc_src2, src2);
501     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
502     tcg_gen_mov_tl(dst, cpu_cc_dst);
503 }
504
505 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
506                             TCGv src2, int update_cc)
507 {
508     TCGv_i32 carry_32;
509     TCGv carry;
510
511     switch (dc->cc_op) {
512     case CC_OP_DIV:
513     case CC_OP_LOGIC:
514         /* Carry is known to be zero.  Fall back to plain SUB.  */
515         if (update_cc) {
516             gen_op_sub_cc(dst, src1, src2);
517         } else {
518             tcg_gen_sub_tl(dst, src1, src2);
519         }
520         return;
521
522     case CC_OP_ADD:
523     case CC_OP_TADD:
524     case CC_OP_TADDTV:
525         carry_32 = gen_add32_carry32();
526         break;
527
528     case CC_OP_SUB:
529     case CC_OP_TSUB:
530     case CC_OP_TSUBTV:
531         if (TARGET_LONG_BITS == 32) {
532             /* We can re-use the host's hardware carry generation by using
533                a SUB2 opcode.  We discard the low part of the output.
534                Ideally we'd combine this operation with the add that
535                generated the carry in the first place.  */
536             carry = tcg_temp_new();
537             tcg_gen_sub2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
538             tcg_temp_free(carry);
539             goto sub_done;
540         }
541         carry_32 = gen_sub32_carry32();
542         break;
543
544     default:
545         /* We need external help to produce the carry.  */
546         carry_32 = tcg_temp_new_i32();
547         gen_helper_compute_C_icc(carry_32, cpu_env);
548         break;
549     }
550
551 #if TARGET_LONG_BITS == 64
552     carry = tcg_temp_new();
553     tcg_gen_extu_i32_i64(carry, carry_32);
554 #else
555     carry = carry_32;
556 #endif
557
558     tcg_gen_sub_tl(dst, src1, src2);
559     tcg_gen_sub_tl(dst, dst, carry);
560
561     tcg_temp_free_i32(carry_32);
562 #if TARGET_LONG_BITS == 64
563     tcg_temp_free(carry);
564 #endif
565
566  sub_done:
567     if (update_cc) {
568         tcg_gen_mov_tl(cpu_cc_src, src1);
569         tcg_gen_mov_tl(cpu_cc_src2, src2);
570         tcg_gen_mov_tl(cpu_cc_dst, dst);
571         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
572         dc->cc_op = CC_OP_SUBX;
573     }
574 }
575
576 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
577 {
578     TCGv r_temp, zero, t0;
579
580     r_temp = tcg_temp_new();
581     t0 = tcg_temp_new();
582
583     /* old op:
584     if (!(env->y & 1))
585         T1 = 0;
586     */
587     zero = tcg_const_tl(0);
588     tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
589     tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
590     tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
591     tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
592                        zero, cpu_cc_src2);
593     tcg_temp_free(zero);
594
595     // b2 = T0 & 1;
596     // env->y = (b2 << 31) | (env->y >> 1);
597     tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
598     tcg_gen_shli_tl(r_temp, r_temp, 31);
599     tcg_gen_shri_tl(t0, cpu_y, 1);
600     tcg_gen_andi_tl(t0, t0, 0x7fffffff);
601     tcg_gen_or_tl(t0, t0, r_temp);
602     tcg_gen_andi_tl(cpu_y, t0, 0xffffffff);
603
604     // b1 = N ^ V;
605     gen_mov_reg_N(t0, cpu_psr);
606     gen_mov_reg_V(r_temp, cpu_psr);
607     tcg_gen_xor_tl(t0, t0, r_temp);
608     tcg_temp_free(r_temp);
609
610     // T0 = (b1 << 31) | (T0 >> 1);
611     // src1 = T0;
612     tcg_gen_shli_tl(t0, t0, 31);
613     tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
614     tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
615     tcg_temp_free(t0);
616
617     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
618
619     tcg_gen_mov_tl(dst, cpu_cc_dst);
620 }
621
622 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
623 {
624 #if TARGET_LONG_BITS == 32
625     if (sign_ext) {
626         tcg_gen_muls2_tl(dst, cpu_y, src1, src2);
627     } else {
628         tcg_gen_mulu2_tl(dst, cpu_y, src1, src2);
629     }
630 #else
631     TCGv t0 = tcg_temp_new_i64();
632     TCGv t1 = tcg_temp_new_i64();
633
634     if (sign_ext) {
635         tcg_gen_ext32s_i64(t0, src1);
636         tcg_gen_ext32s_i64(t1, src2);
637     } else {
638         tcg_gen_ext32u_i64(t0, src1);
639         tcg_gen_ext32u_i64(t1, src2);
640     }
641
642     tcg_gen_mul_i64(dst, t0, t1);
643     tcg_temp_free(t0);
644     tcg_temp_free(t1);
645
646     tcg_gen_shri_i64(cpu_y, dst, 32);
647 #endif
648 }
649
650 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
651 {
652     /* zero-extend truncated operands before multiplication */
653     gen_op_multiply(dst, src1, src2, 0);
654 }
655
656 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
657 {
658     /* sign-extend truncated operands before multiplication */
659     gen_op_multiply(dst, src1, src2, 1);
660 }
661
662 // 1
663 static inline void gen_op_eval_ba(TCGv dst)
664 {
665     tcg_gen_movi_tl(dst, 1);
666 }
667
668 // Z
669 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
670 {
671     gen_mov_reg_Z(dst, src);
672 }
673
674 // Z | (N ^ V)
675 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
676 {
677     TCGv t0 = tcg_temp_new();
678     gen_mov_reg_N(t0, src);
679     gen_mov_reg_V(dst, src);
680     tcg_gen_xor_tl(dst, dst, t0);
681     gen_mov_reg_Z(t0, src);
682     tcg_gen_or_tl(dst, dst, t0);
683     tcg_temp_free(t0);
684 }
685
686 // N ^ V
687 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
688 {
689     TCGv t0 = tcg_temp_new();
690     gen_mov_reg_V(t0, src);
691     gen_mov_reg_N(dst, src);
692     tcg_gen_xor_tl(dst, dst, t0);
693     tcg_temp_free(t0);
694 }
695
696 // C | Z
697 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
698 {
699     TCGv t0 = tcg_temp_new();
700     gen_mov_reg_Z(t0, src);
701     gen_mov_reg_C(dst, src);
702     tcg_gen_or_tl(dst, dst, t0);
703     tcg_temp_free(t0);
704 }
705
706 // C
707 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
708 {
709     gen_mov_reg_C(dst, src);
710 }
711
712 // V
713 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
714 {
715     gen_mov_reg_V(dst, src);
716 }
717
718 // 0
719 static inline void gen_op_eval_bn(TCGv dst)
720 {
721     tcg_gen_movi_tl(dst, 0);
722 }
723
724 // N
725 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
726 {
727     gen_mov_reg_N(dst, src);
728 }
729
730 // !Z
731 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
732 {
733     gen_mov_reg_Z(dst, src);
734     tcg_gen_xori_tl(dst, dst, 0x1);
735 }
736
737 // !(Z | (N ^ V))
738 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
739 {
740     gen_op_eval_ble(dst, src);
741     tcg_gen_xori_tl(dst, dst, 0x1);
742 }
743
744 // !(N ^ V)
745 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
746 {
747     gen_op_eval_bl(dst, src);
748     tcg_gen_xori_tl(dst, dst, 0x1);
749 }
750
751 // !(C | Z)
752 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
753 {
754     gen_op_eval_bleu(dst, src);
755     tcg_gen_xori_tl(dst, dst, 0x1);
756 }
757
758 // !C
759 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
760 {
761     gen_mov_reg_C(dst, src);
762     tcg_gen_xori_tl(dst, dst, 0x1);
763 }
764
765 // !N
766 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
767 {
768     gen_mov_reg_N(dst, src);
769     tcg_gen_xori_tl(dst, dst, 0x1);
770 }
771
772 // !V
773 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
774 {
775     gen_mov_reg_V(dst, src);
776     tcg_gen_xori_tl(dst, dst, 0x1);
777 }
778
779 /*
780   FPSR bit field FCC1 | FCC0:
781    0 =
782    1 <
783    2 >
784    3 unordered
785 */
786 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
787                                     unsigned int fcc_offset)
788 {
789     tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
790     tcg_gen_andi_tl(reg, reg, 0x1);
791 }
792
793 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
794                                     unsigned int fcc_offset)
795 {
796     tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
797     tcg_gen_andi_tl(reg, reg, 0x1);
798 }
799
800 // !0: FCC0 | FCC1
801 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
802                                     unsigned int fcc_offset)
803 {
804     TCGv t0 = tcg_temp_new();
805     gen_mov_reg_FCC0(dst, src, fcc_offset);
806     gen_mov_reg_FCC1(t0, src, fcc_offset);
807     tcg_gen_or_tl(dst, dst, t0);
808     tcg_temp_free(t0);
809 }
810
811 // 1 or 2: FCC0 ^ FCC1
812 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
813                                     unsigned int fcc_offset)
814 {
815     TCGv t0 = tcg_temp_new();
816     gen_mov_reg_FCC0(dst, src, fcc_offset);
817     gen_mov_reg_FCC1(t0, src, fcc_offset);
818     tcg_gen_xor_tl(dst, dst, t0);
819     tcg_temp_free(t0);
820 }
821
822 // 1 or 3: FCC0
823 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
824                                     unsigned int fcc_offset)
825 {
826     gen_mov_reg_FCC0(dst, src, fcc_offset);
827 }
828
829 // 1: FCC0 & !FCC1
830 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
831                                     unsigned int fcc_offset)
832 {
833     TCGv t0 = tcg_temp_new();
834     gen_mov_reg_FCC0(dst, src, fcc_offset);
835     gen_mov_reg_FCC1(t0, src, fcc_offset);
836     tcg_gen_andc_tl(dst, dst, t0);
837     tcg_temp_free(t0);
838 }
839
840 // 2 or 3: FCC1
841 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
842                                     unsigned int fcc_offset)
843 {
844     gen_mov_reg_FCC1(dst, src, fcc_offset);
845 }
846
847 // 2: !FCC0 & FCC1
848 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
849                                     unsigned int fcc_offset)
850 {
851     TCGv t0 = tcg_temp_new();
852     gen_mov_reg_FCC0(dst, src, fcc_offset);
853     gen_mov_reg_FCC1(t0, src, fcc_offset);
854     tcg_gen_andc_tl(dst, t0, dst);
855     tcg_temp_free(t0);
856 }
857
858 // 3: FCC0 & FCC1
859 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
860                                     unsigned int fcc_offset)
861 {
862     TCGv t0 = tcg_temp_new();
863     gen_mov_reg_FCC0(dst, src, fcc_offset);
864     gen_mov_reg_FCC1(t0, src, fcc_offset);
865     tcg_gen_and_tl(dst, dst, t0);
866     tcg_temp_free(t0);
867 }
868
869 // 0: !(FCC0 | FCC1)
870 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
871                                     unsigned int fcc_offset)
872 {
873     TCGv t0 = tcg_temp_new();
874     gen_mov_reg_FCC0(dst, src, fcc_offset);
875     gen_mov_reg_FCC1(t0, src, fcc_offset);
876     tcg_gen_or_tl(dst, dst, t0);
877     tcg_gen_xori_tl(dst, dst, 0x1);
878     tcg_temp_free(t0);
879 }
880
881 // 0 or 3: !(FCC0 ^ FCC1)
882 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
883                                     unsigned int fcc_offset)
884 {
885     TCGv t0 = tcg_temp_new();
886     gen_mov_reg_FCC0(dst, src, fcc_offset);
887     gen_mov_reg_FCC1(t0, src, fcc_offset);
888     tcg_gen_xor_tl(dst, dst, t0);
889     tcg_gen_xori_tl(dst, dst, 0x1);
890     tcg_temp_free(t0);
891 }
892
893 // 0 or 2: !FCC0
894 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
895                                     unsigned int fcc_offset)
896 {
897     gen_mov_reg_FCC0(dst, src, fcc_offset);
898     tcg_gen_xori_tl(dst, dst, 0x1);
899 }
900
901 // !1: !(FCC0 & !FCC1)
902 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
903                                     unsigned int fcc_offset)
904 {
905     TCGv t0 = tcg_temp_new();
906     gen_mov_reg_FCC0(dst, src, fcc_offset);
907     gen_mov_reg_FCC1(t0, src, fcc_offset);
908     tcg_gen_andc_tl(dst, dst, t0);
909     tcg_gen_xori_tl(dst, dst, 0x1);
910     tcg_temp_free(t0);
911 }
912
913 // 0 or 1: !FCC1
914 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
915                                     unsigned int fcc_offset)
916 {
917     gen_mov_reg_FCC1(dst, src, fcc_offset);
918     tcg_gen_xori_tl(dst, dst, 0x1);
919 }
920
921 // !2: !(!FCC0 & FCC1)
922 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
923                                     unsigned int fcc_offset)
924 {
925     TCGv t0 = tcg_temp_new();
926     gen_mov_reg_FCC0(dst, src, fcc_offset);
927     gen_mov_reg_FCC1(t0, src, fcc_offset);
928     tcg_gen_andc_tl(dst, t0, dst);
929     tcg_gen_xori_tl(dst, dst, 0x1);
930     tcg_temp_free(t0);
931 }
932
933 // !3: !(FCC0 & FCC1)
934 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
935                                     unsigned int fcc_offset)
936 {
937     TCGv t0 = tcg_temp_new();
938     gen_mov_reg_FCC0(dst, src, fcc_offset);
939     gen_mov_reg_FCC1(t0, src, fcc_offset);
940     tcg_gen_and_tl(dst, dst, t0);
941     tcg_gen_xori_tl(dst, dst, 0x1);
942     tcg_temp_free(t0);
943 }
944
945 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
946                                target_ulong pc2, TCGv r_cond)
947 {
948     int l1;
949
950     l1 = gen_new_label();
951
952     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
953
954     gen_goto_tb(dc, 0, pc1, pc1 + 4);
955
956     gen_set_label(l1);
957     gen_goto_tb(dc, 1, pc2, pc2 + 4);
958 }
959
960 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
961                                 target_ulong pc2, TCGv r_cond)
962 {
963     int l1;
964
965     l1 = gen_new_label();
966
967     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
968
969     gen_goto_tb(dc, 0, pc2, pc1);
970
971     gen_set_label(l1);
972     gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
973 }
974
975 static inline void gen_generic_branch(DisasContext *dc)
976 {
977     TCGv npc0 = tcg_const_tl(dc->jump_pc[0]);
978     TCGv npc1 = tcg_const_tl(dc->jump_pc[1]);
979     TCGv zero = tcg_const_tl(0);
980
981     tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
982
983     tcg_temp_free(npc0);
984     tcg_temp_free(npc1);
985     tcg_temp_free(zero);
986 }
987
988 /* call this function before using the condition register as it may
989    have been set for a jump */
990 static inline void flush_cond(DisasContext *dc)
991 {
992     if (dc->npc == JUMP_PC) {
993         gen_generic_branch(dc);
994         dc->npc = DYNAMIC_PC;
995     }
996 }
997
998 static inline void save_npc(DisasContext *dc)
999 {
1000     if (dc->npc == JUMP_PC) {
1001         gen_generic_branch(dc);
1002         dc->npc = DYNAMIC_PC;
1003     } else if (dc->npc != DYNAMIC_PC) {
1004         tcg_gen_movi_tl(cpu_npc, dc->npc);
1005     }
1006 }
1007
1008 static inline void update_psr(DisasContext *dc)
1009 {
1010     if (dc->cc_op != CC_OP_FLAGS) {
1011         dc->cc_op = CC_OP_FLAGS;
1012         gen_helper_compute_psr(cpu_env);
1013     }
1014 }
1015
1016 static inline void save_state(DisasContext *dc)
1017 {
1018     tcg_gen_movi_tl(cpu_pc, dc->pc);
1019     save_npc(dc);
1020 }
1021
1022 static inline void gen_mov_pc_npc(DisasContext *dc)
1023 {
1024     if (dc->npc == JUMP_PC) {
1025         gen_generic_branch(dc);
1026         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1027         dc->pc = DYNAMIC_PC;
1028     } else if (dc->npc == DYNAMIC_PC) {
1029         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1030         dc->pc = DYNAMIC_PC;
1031     } else {
1032         dc->pc = dc->npc;
1033     }
1034 }
1035
1036 static inline void gen_op_next_insn(void)
1037 {
1038     tcg_gen_mov_tl(cpu_pc, cpu_npc);
1039     tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1040 }
1041
1042 static void free_compare(DisasCompare *cmp)
1043 {
1044     if (!cmp->g1) {
1045         tcg_temp_free(cmp->c1);
1046     }
1047     if (!cmp->g2) {
1048         tcg_temp_free(cmp->c2);
1049     }
1050 }
1051
1052 static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1053                         DisasContext *dc)
1054 {
1055     static int subcc_cond[16] = {
1056         TCG_COND_NEVER,
1057         TCG_COND_EQ,
1058         TCG_COND_LE,
1059         TCG_COND_LT,
1060         TCG_COND_LEU,
1061         TCG_COND_LTU,
1062         -1, /* neg */
1063         -1, /* overflow */
1064         TCG_COND_ALWAYS,
1065         TCG_COND_NE,
1066         TCG_COND_GT,
1067         TCG_COND_GE,
1068         TCG_COND_GTU,
1069         TCG_COND_GEU,
1070         -1, /* pos */
1071         -1, /* no overflow */
1072     };
1073
1074     static int logic_cond[16] = {
1075         TCG_COND_NEVER,
1076         TCG_COND_EQ,     /* eq:  Z */
1077         TCG_COND_LE,     /* le:  Z | (N ^ V) -> Z | N */
1078         TCG_COND_LT,     /* lt:  N ^ V -> N */
1079         TCG_COND_EQ,     /* leu: C | Z -> Z */
1080         TCG_COND_NEVER,  /* ltu: C -> 0 */
1081         TCG_COND_LT,     /* neg: N */
1082         TCG_COND_NEVER,  /* vs:  V -> 0 */
1083         TCG_COND_ALWAYS,
1084         TCG_COND_NE,     /* ne:  !Z */
1085         TCG_COND_GT,     /* gt:  !(Z | (N ^ V)) -> !(Z | N) */
1086         TCG_COND_GE,     /* ge:  !(N ^ V) -> !N */
1087         TCG_COND_NE,     /* gtu: !(C | Z) -> !Z */
1088         TCG_COND_ALWAYS, /* geu: !C -> 1 */
1089         TCG_COND_GE,     /* pos: !N */
1090         TCG_COND_ALWAYS, /* vc:  !V -> 1 */
1091     };
1092
1093     TCGv_i32 r_src;
1094     TCGv r_dst;
1095
1096 #ifdef TARGET_SPARC64
1097     if (xcc) {
1098         r_src = cpu_xcc;
1099     } else {
1100         r_src = cpu_psr;
1101     }
1102 #else
1103     r_src = cpu_psr;
1104 #endif
1105
1106     switch (dc->cc_op) {
1107     case CC_OP_LOGIC:
1108         cmp->cond = logic_cond[cond];
1109     do_compare_dst_0:
1110         cmp->is_bool = false;
1111         cmp->g2 = false;
1112         cmp->c2 = tcg_const_tl(0);
1113 #ifdef TARGET_SPARC64
1114         if (!xcc) {
1115             cmp->g1 = false;
1116             cmp->c1 = tcg_temp_new();
1117             tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1118             break;
1119         }
1120 #endif
1121         cmp->g1 = true;
1122         cmp->c1 = cpu_cc_dst;
1123         break;
1124
1125     case CC_OP_SUB:
1126         switch (cond) {
1127         case 6:  /* neg */
1128         case 14: /* pos */
1129             cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1130             goto do_compare_dst_0;
1131
1132         case 7: /* overflow */
1133         case 15: /* !overflow */
1134             goto do_dynamic;
1135
1136         default:
1137             cmp->cond = subcc_cond[cond];
1138             cmp->is_bool = false;
1139 #ifdef TARGET_SPARC64
1140             if (!xcc) {
1141                 /* Note that sign-extension works for unsigned compares as
1142                    long as both operands are sign-extended.  */
1143                 cmp->g1 = cmp->g2 = false;
1144                 cmp->c1 = tcg_temp_new();
1145                 cmp->c2 = tcg_temp_new();
1146                 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1147                 tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1148                 break;
1149             }
1150 #endif
1151             cmp->g1 = cmp->g2 = true;
1152             cmp->c1 = cpu_cc_src;
1153             cmp->c2 = cpu_cc_src2;
1154             break;
1155         }
1156         break;
1157
1158     default:
1159     do_dynamic:
1160         gen_helper_compute_psr(cpu_env);
1161         dc->cc_op = CC_OP_FLAGS;
1162         /* FALLTHRU */
1163
1164     case CC_OP_FLAGS:
1165         /* We're going to generate a boolean result.  */
1166         cmp->cond = TCG_COND_NE;
1167         cmp->is_bool = true;
1168         cmp->g1 = cmp->g2 = false;
1169         cmp->c1 = r_dst = tcg_temp_new();
1170         cmp->c2 = tcg_const_tl(0);
1171
1172         switch (cond) {
1173         case 0x0:
1174             gen_op_eval_bn(r_dst);
1175             break;
1176         case 0x1:
1177             gen_op_eval_be(r_dst, r_src);
1178             break;
1179         case 0x2:
1180             gen_op_eval_ble(r_dst, r_src);
1181             break;
1182         case 0x3:
1183             gen_op_eval_bl(r_dst, r_src);
1184             break;
1185         case 0x4:
1186             gen_op_eval_bleu(r_dst, r_src);
1187             break;
1188         case 0x5:
1189             gen_op_eval_bcs(r_dst, r_src);
1190             break;
1191         case 0x6:
1192             gen_op_eval_bneg(r_dst, r_src);
1193             break;
1194         case 0x7:
1195             gen_op_eval_bvs(r_dst, r_src);
1196             break;
1197         case 0x8:
1198             gen_op_eval_ba(r_dst);
1199             break;
1200         case 0x9:
1201             gen_op_eval_bne(r_dst, r_src);
1202             break;
1203         case 0xa:
1204             gen_op_eval_bg(r_dst, r_src);
1205             break;
1206         case 0xb:
1207             gen_op_eval_bge(r_dst, r_src);
1208             break;
1209         case 0xc:
1210             gen_op_eval_bgu(r_dst, r_src);
1211             break;
1212         case 0xd:
1213             gen_op_eval_bcc(r_dst, r_src);
1214             break;
1215         case 0xe:
1216             gen_op_eval_bpos(r_dst, r_src);
1217             break;
1218         case 0xf:
1219             gen_op_eval_bvc(r_dst, r_src);
1220             break;
1221         }
1222         break;
1223     }
1224 }
1225
1226 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1227 {
1228     unsigned int offset;
1229     TCGv r_dst;
1230
1231     /* For now we still generate a straight boolean result.  */
1232     cmp->cond = TCG_COND_NE;
1233     cmp->is_bool = true;
1234     cmp->g1 = cmp->g2 = false;
1235     cmp->c1 = r_dst = tcg_temp_new();
1236     cmp->c2 = tcg_const_tl(0);
1237
1238     switch (cc) {
1239     default:
1240     case 0x0:
1241         offset = 0;
1242         break;
1243     case 0x1:
1244         offset = 32 - 10;
1245         break;
1246     case 0x2:
1247         offset = 34 - 10;
1248         break;
1249     case 0x3:
1250         offset = 36 - 10;
1251         break;
1252     }
1253
1254     switch (cond) {
1255     case 0x0:
1256         gen_op_eval_bn(r_dst);
1257         break;
1258     case 0x1:
1259         gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1260         break;
1261     case 0x2:
1262         gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1263         break;
1264     case 0x3:
1265         gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1266         break;
1267     case 0x4:
1268         gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1269         break;
1270     case 0x5:
1271         gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1272         break;
1273     case 0x6:
1274         gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1275         break;
1276     case 0x7:
1277         gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1278         break;
1279     case 0x8:
1280         gen_op_eval_ba(r_dst);
1281         break;
1282     case 0x9:
1283         gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1284         break;
1285     case 0xa:
1286         gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1287         break;
1288     case 0xb:
1289         gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1290         break;
1291     case 0xc:
1292         gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1293         break;
1294     case 0xd:
1295         gen_op_eval_fble(r_dst, cpu_fsr, offset);
1296         break;
1297     case 0xe:
1298         gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1299         break;
1300     case 0xf:
1301         gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1302         break;
1303     }
1304 }
1305
1306 static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1307                      DisasContext *dc)
1308 {
1309     DisasCompare cmp;
1310     gen_compare(&cmp, cc, cond, dc);
1311
1312     /* The interface is to return a boolean in r_dst.  */
1313     if (cmp.is_bool) {
1314         tcg_gen_mov_tl(r_dst, cmp.c1);
1315     } else {
1316         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1317     }
1318
1319     free_compare(&cmp);
1320 }
1321
1322 static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1323 {
1324     DisasCompare cmp;
1325     gen_fcompare(&cmp, cc, cond);
1326
1327     /* The interface is to return a boolean in r_dst.  */
1328     if (cmp.is_bool) {
1329         tcg_gen_mov_tl(r_dst, cmp.c1);
1330     } else {
1331         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1332     }
1333
1334     free_compare(&cmp);
1335 }
1336
1337 #ifdef TARGET_SPARC64
1338 // Inverted logic
1339 static const int gen_tcg_cond_reg[8] = {
1340     -1,
1341     TCG_COND_NE,
1342     TCG_COND_GT,
1343     TCG_COND_GE,
1344     -1,
1345     TCG_COND_EQ,
1346     TCG_COND_LE,
1347     TCG_COND_LT,
1348 };
1349
1350 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1351 {
1352     cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1353     cmp->is_bool = false;
1354     cmp->g1 = true;
1355     cmp->g2 = false;
1356     cmp->c1 = r_src;
1357     cmp->c2 = tcg_const_tl(0);
1358 }
1359
1360 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1361 {
1362     DisasCompare cmp;
1363     gen_compare_reg(&cmp, cond, r_src);
1364
1365     /* The interface is to return a boolean in r_dst.  */
1366     tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1367
1368     free_compare(&cmp);
1369 }
1370 #endif
1371
1372 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1373 {
1374     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1375     target_ulong target = dc->pc + offset;
1376
1377 #ifdef TARGET_SPARC64
1378     if (unlikely(AM_CHECK(dc))) {
1379         target &= 0xffffffffULL;
1380     }
1381 #endif
1382     if (cond == 0x0) {
1383         /* unconditional not taken */
1384         if (a) {
1385             dc->pc = dc->npc + 4;
1386             dc->npc = dc->pc + 4;
1387         } else {
1388             dc->pc = dc->npc;
1389             dc->npc = dc->pc + 4;
1390         }
1391     } else if (cond == 0x8) {
1392         /* unconditional taken */
1393         if (a) {
1394             dc->pc = target;
1395             dc->npc = dc->pc + 4;
1396         } else {
1397             dc->pc = dc->npc;
1398             dc->npc = target;
1399             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1400         }
1401     } else {
1402         flush_cond(dc);
1403         gen_cond(cpu_cond, cc, cond, dc);
1404         if (a) {
1405             gen_branch_a(dc, target, dc->npc, cpu_cond);
1406             dc->is_br = 1;
1407         } else {
1408             dc->pc = dc->npc;
1409             dc->jump_pc[0] = target;
1410             if (unlikely(dc->npc == DYNAMIC_PC)) {
1411                 dc->jump_pc[1] = DYNAMIC_PC;
1412                 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1413             } else {
1414                 dc->jump_pc[1] = dc->npc + 4;
1415                 dc->npc = JUMP_PC;
1416             }
1417         }
1418     }
1419 }
1420
1421 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1422 {
1423     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1424     target_ulong target = dc->pc + offset;
1425
1426 #ifdef TARGET_SPARC64
1427     if (unlikely(AM_CHECK(dc))) {
1428         target &= 0xffffffffULL;
1429     }
1430 #endif
1431     if (cond == 0x0) {
1432         /* unconditional not taken */
1433         if (a) {
1434             dc->pc = dc->npc + 4;
1435             dc->npc = dc->pc + 4;
1436         } else {
1437             dc->pc = dc->npc;
1438             dc->npc = dc->pc + 4;
1439         }
1440     } else if (cond == 0x8) {
1441         /* unconditional taken */
1442         if (a) {
1443             dc->pc = target;
1444             dc->npc = dc->pc + 4;
1445         } else {
1446             dc->pc = dc->npc;
1447             dc->npc = target;
1448             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1449         }
1450     } else {
1451         flush_cond(dc);
1452         gen_fcond(cpu_cond, cc, cond);
1453         if (a) {
1454             gen_branch_a(dc, target, dc->npc, cpu_cond);
1455             dc->is_br = 1;
1456         } else {
1457             dc->pc = dc->npc;
1458             dc->jump_pc[0] = target;
1459             if (unlikely(dc->npc == DYNAMIC_PC)) {
1460                 dc->jump_pc[1] = DYNAMIC_PC;
1461                 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1462             } else {
1463                 dc->jump_pc[1] = dc->npc + 4;
1464                 dc->npc = JUMP_PC;
1465             }
1466         }
1467     }
1468 }
1469
1470 #ifdef TARGET_SPARC64
1471 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1472                           TCGv r_reg)
1473 {
1474     unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1475     target_ulong target = dc->pc + offset;
1476
1477     if (unlikely(AM_CHECK(dc))) {
1478         target &= 0xffffffffULL;
1479     }
1480     flush_cond(dc);
1481     gen_cond_reg(cpu_cond, cond, r_reg);
1482     if (a) {
1483         gen_branch_a(dc, target, dc->npc, cpu_cond);
1484         dc->is_br = 1;
1485     } else {
1486         dc->pc = dc->npc;
1487         dc->jump_pc[0] = target;
1488         if (unlikely(dc->npc == DYNAMIC_PC)) {
1489             dc->jump_pc[1] = DYNAMIC_PC;
1490             tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1491         } else {
1492             dc->jump_pc[1] = dc->npc + 4;
1493             dc->npc = JUMP_PC;
1494         }
1495     }
1496 }
1497
1498 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1499 {
1500     switch (fccno) {
1501     case 0:
1502         gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1503         break;
1504     case 1:
1505         gen_helper_fcmps_fcc1(cpu_env, r_rs1, r_rs2);
1506         break;
1507     case 2:
1508         gen_helper_fcmps_fcc2(cpu_env, r_rs1, r_rs2);
1509         break;
1510     case 3:
1511         gen_helper_fcmps_fcc3(cpu_env, r_rs1, r_rs2);
1512         break;
1513     }
1514 }
1515
1516 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1517 {
1518     switch (fccno) {
1519     case 0:
1520         gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1521         break;
1522     case 1:
1523         gen_helper_fcmpd_fcc1(cpu_env, r_rs1, r_rs2);
1524         break;
1525     case 2:
1526         gen_helper_fcmpd_fcc2(cpu_env, r_rs1, r_rs2);
1527         break;
1528     case 3:
1529         gen_helper_fcmpd_fcc3(cpu_env, r_rs1, r_rs2);
1530         break;
1531     }
1532 }
1533
1534 static inline void gen_op_fcmpq(int fccno)
1535 {
1536     switch (fccno) {
1537     case 0:
1538         gen_helper_fcmpq(cpu_env);
1539         break;
1540     case 1:
1541         gen_helper_fcmpq_fcc1(cpu_env);
1542         break;
1543     case 2:
1544         gen_helper_fcmpq_fcc2(cpu_env);
1545         break;
1546     case 3:
1547         gen_helper_fcmpq_fcc3(cpu_env);
1548         break;
1549     }
1550 }
1551
1552 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1553 {
1554     switch (fccno) {
1555     case 0:
1556         gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1557         break;
1558     case 1:
1559         gen_helper_fcmpes_fcc1(cpu_env, r_rs1, r_rs2);
1560         break;
1561     case 2:
1562         gen_helper_fcmpes_fcc2(cpu_env, r_rs1, r_rs2);
1563         break;
1564     case 3:
1565         gen_helper_fcmpes_fcc3(cpu_env, r_rs1, r_rs2);
1566         break;
1567     }
1568 }
1569
1570 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1571 {
1572     switch (fccno) {
1573     case 0:
1574         gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1575         break;
1576     case 1:
1577         gen_helper_fcmped_fcc1(cpu_env, r_rs1, r_rs2);
1578         break;
1579     case 2:
1580         gen_helper_fcmped_fcc2(cpu_env, r_rs1, r_rs2);
1581         break;
1582     case 3:
1583         gen_helper_fcmped_fcc3(cpu_env, r_rs1, r_rs2);
1584         break;
1585     }
1586 }
1587
1588 static inline void gen_op_fcmpeq(int fccno)
1589 {
1590     switch (fccno) {
1591     case 0:
1592         gen_helper_fcmpeq(cpu_env);
1593         break;
1594     case 1:
1595         gen_helper_fcmpeq_fcc1(cpu_env);
1596         break;
1597     case 2:
1598         gen_helper_fcmpeq_fcc2(cpu_env);
1599         break;
1600     case 3:
1601         gen_helper_fcmpeq_fcc3(cpu_env);
1602         break;
1603     }
1604 }
1605
1606 #else
1607
1608 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1609 {
1610     gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1611 }
1612
1613 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1614 {
1615     gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1616 }
1617
1618 static inline void gen_op_fcmpq(int fccno)
1619 {
1620     gen_helper_fcmpq(cpu_env);
1621 }
1622
1623 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1624 {
1625     gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1626 }
1627
1628 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1629 {
1630     gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1631 }
1632
1633 static inline void gen_op_fcmpeq(int fccno)
1634 {
1635     gen_helper_fcmpeq(cpu_env);
1636 }
1637 #endif
1638
1639 static inline void gen_op_fpexception_im(int fsr_flags)
1640 {
1641     TCGv_i32 r_const;
1642
1643     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1644     tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1645     r_const = tcg_const_i32(TT_FP_EXCP);
1646     gen_helper_raise_exception(cpu_env, r_const);
1647     tcg_temp_free_i32(r_const);
1648 }
1649
1650 static int gen_trap_ifnofpu(DisasContext *dc)
1651 {
1652 #if !defined(CONFIG_USER_ONLY)
1653     if (!dc->fpu_enabled) {
1654         TCGv_i32 r_const;
1655
1656         save_state(dc);
1657         r_const = tcg_const_i32(TT_NFPU_INSN);
1658         gen_helper_raise_exception(cpu_env, r_const);
1659         tcg_temp_free_i32(r_const);
1660         dc->is_br = 1;
1661         return 1;
1662     }
1663 #endif
1664     return 0;
1665 }
1666
1667 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1668 {
1669     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1670 }
1671
1672 static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1673                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1674 {
1675     TCGv_i32 dst, src;
1676
1677     src = gen_load_fpr_F(dc, rs);
1678     dst = gen_dest_fpr_F(dc);
1679
1680     gen(dst, cpu_env, src);
1681
1682     gen_store_fpr_F(dc, rd, dst);
1683 }
1684
1685 static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1686                                  void (*gen)(TCGv_i32, TCGv_i32))
1687 {
1688     TCGv_i32 dst, src;
1689
1690     src = gen_load_fpr_F(dc, rs);
1691     dst = gen_dest_fpr_F(dc);
1692
1693     gen(dst, src);
1694
1695     gen_store_fpr_F(dc, rd, dst);
1696 }
1697
1698 static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1699                         void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1700 {
1701     TCGv_i32 dst, src1, src2;
1702
1703     src1 = gen_load_fpr_F(dc, rs1);
1704     src2 = gen_load_fpr_F(dc, rs2);
1705     dst = gen_dest_fpr_F(dc);
1706
1707     gen(dst, cpu_env, src1, src2);
1708
1709     gen_store_fpr_F(dc, rd, dst);
1710 }
1711
1712 #ifdef TARGET_SPARC64
1713 static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1714                                   void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1715 {
1716     TCGv_i32 dst, src1, src2;
1717
1718     src1 = gen_load_fpr_F(dc, rs1);
1719     src2 = gen_load_fpr_F(dc, rs2);
1720     dst = gen_dest_fpr_F(dc);
1721
1722     gen(dst, src1, src2);
1723
1724     gen_store_fpr_F(dc, rd, dst);
1725 }
1726 #endif
1727
1728 static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1729                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1730 {
1731     TCGv_i64 dst, src;
1732
1733     src = gen_load_fpr_D(dc, rs);
1734     dst = gen_dest_fpr_D(dc, rd);
1735
1736     gen(dst, cpu_env, src);
1737
1738     gen_store_fpr_D(dc, rd, dst);
1739 }
1740
1741 #ifdef TARGET_SPARC64
1742 static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1743                                  void (*gen)(TCGv_i64, TCGv_i64))
1744 {
1745     TCGv_i64 dst, src;
1746
1747     src = gen_load_fpr_D(dc, rs);
1748     dst = gen_dest_fpr_D(dc, rd);
1749
1750     gen(dst, src);
1751
1752     gen_store_fpr_D(dc, rd, dst);
1753 }
1754 #endif
1755
1756 static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1757                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1758 {
1759     TCGv_i64 dst, src1, src2;
1760
1761     src1 = gen_load_fpr_D(dc, rs1);
1762     src2 = gen_load_fpr_D(dc, rs2);
1763     dst = gen_dest_fpr_D(dc, rd);
1764
1765     gen(dst, cpu_env, src1, src2);
1766
1767     gen_store_fpr_D(dc, rd, dst);
1768 }
1769
1770 #ifdef TARGET_SPARC64
1771 static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1772                                   void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1773 {
1774     TCGv_i64 dst, src1, src2;
1775
1776     src1 = gen_load_fpr_D(dc, rs1);
1777     src2 = gen_load_fpr_D(dc, rs2);
1778     dst = gen_dest_fpr_D(dc, rd);
1779
1780     gen(dst, src1, src2);
1781
1782     gen_store_fpr_D(dc, rd, dst);
1783 }
1784
1785 static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1786                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1787 {
1788     TCGv_i64 dst, src1, src2;
1789
1790     src1 = gen_load_fpr_D(dc, rs1);
1791     src2 = gen_load_fpr_D(dc, rs2);
1792     dst = gen_dest_fpr_D(dc, rd);
1793
1794     gen(dst, cpu_gsr, src1, src2);
1795
1796     gen_store_fpr_D(dc, rd, dst);
1797 }
1798
1799 static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1800                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1801 {
1802     TCGv_i64 dst, src0, src1, src2;
1803
1804     src1 = gen_load_fpr_D(dc, rs1);
1805     src2 = gen_load_fpr_D(dc, rs2);
1806     src0 = gen_load_fpr_D(dc, rd);
1807     dst = gen_dest_fpr_D(dc, rd);
1808
1809     gen(dst, src0, src1, src2);
1810
1811     gen_store_fpr_D(dc, rd, dst);
1812 }
1813 #endif
1814
1815 static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1816                               void (*gen)(TCGv_ptr))
1817 {
1818     gen_op_load_fpr_QT1(QFPREG(rs));
1819
1820     gen(cpu_env);
1821
1822     gen_op_store_QT0_fpr(QFPREG(rd));
1823     gen_update_fprs_dirty(QFPREG(rd));
1824 }
1825
1826 #ifdef TARGET_SPARC64
1827 static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1828                                  void (*gen)(TCGv_ptr))
1829 {
1830     gen_op_load_fpr_QT1(QFPREG(rs));
1831
1832     gen(cpu_env);
1833
1834     gen_op_store_QT0_fpr(QFPREG(rd));
1835     gen_update_fprs_dirty(QFPREG(rd));
1836 }
1837 #endif
1838
1839 static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1840                                void (*gen)(TCGv_ptr))
1841 {
1842     gen_op_load_fpr_QT0(QFPREG(rs1));
1843     gen_op_load_fpr_QT1(QFPREG(rs2));
1844
1845     gen(cpu_env);
1846
1847     gen_op_store_QT0_fpr(QFPREG(rd));
1848     gen_update_fprs_dirty(QFPREG(rd));
1849 }
1850
1851 static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1852                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1853 {
1854     TCGv_i64 dst;
1855     TCGv_i32 src1, src2;
1856
1857     src1 = gen_load_fpr_F(dc, rs1);
1858     src2 = gen_load_fpr_F(dc, rs2);
1859     dst = gen_dest_fpr_D(dc, rd);
1860
1861     gen(dst, cpu_env, src1, src2);
1862
1863     gen_store_fpr_D(dc, rd, dst);
1864 }
1865
1866 static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1867                                void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1868 {
1869     TCGv_i64 src1, src2;
1870
1871     src1 = gen_load_fpr_D(dc, rs1);
1872     src2 = gen_load_fpr_D(dc, rs2);
1873
1874     gen(cpu_env, src1, src2);
1875
1876     gen_op_store_QT0_fpr(QFPREG(rd));
1877     gen_update_fprs_dirty(QFPREG(rd));
1878 }
1879
1880 #ifdef TARGET_SPARC64
1881 static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1882                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1883 {
1884     TCGv_i64 dst;
1885     TCGv_i32 src;
1886
1887     src = gen_load_fpr_F(dc, rs);
1888     dst = gen_dest_fpr_D(dc, rd);
1889
1890     gen(dst, cpu_env, src);
1891
1892     gen_store_fpr_D(dc, rd, dst);
1893 }
1894 #endif
1895
1896 static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1897                                  void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1898 {
1899     TCGv_i64 dst;
1900     TCGv_i32 src;
1901
1902     src = gen_load_fpr_F(dc, rs);
1903     dst = gen_dest_fpr_D(dc, rd);
1904
1905     gen(dst, cpu_env, src);
1906
1907     gen_store_fpr_D(dc, rd, dst);
1908 }
1909
1910 static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1911                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1912 {
1913     TCGv_i32 dst;
1914     TCGv_i64 src;
1915
1916     src = gen_load_fpr_D(dc, rs);
1917     dst = gen_dest_fpr_F(dc);
1918
1919     gen(dst, cpu_env, src);
1920
1921     gen_store_fpr_F(dc, rd, dst);
1922 }
1923
1924 static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1925                               void (*gen)(TCGv_i32, TCGv_ptr))
1926 {
1927     TCGv_i32 dst;
1928
1929     gen_op_load_fpr_QT1(QFPREG(rs));
1930     dst = gen_dest_fpr_F(dc);
1931
1932     gen(dst, cpu_env);
1933
1934     gen_store_fpr_F(dc, rd, dst);
1935 }
1936
1937 static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1938                               void (*gen)(TCGv_i64, TCGv_ptr))
1939 {
1940     TCGv_i64 dst;
1941
1942     gen_op_load_fpr_QT1(QFPREG(rs));
1943     dst = gen_dest_fpr_D(dc, rd);
1944
1945     gen(dst, cpu_env);
1946
1947     gen_store_fpr_D(dc, rd, dst);
1948 }
1949
1950 static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1951                                  void (*gen)(TCGv_ptr, TCGv_i32))
1952 {
1953     TCGv_i32 src;
1954
1955     src = gen_load_fpr_F(dc, rs);
1956
1957     gen(cpu_env, src);
1958
1959     gen_op_store_QT0_fpr(QFPREG(rd));
1960     gen_update_fprs_dirty(QFPREG(rd));
1961 }
1962
1963 static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1964                                  void (*gen)(TCGv_ptr, TCGv_i64))
1965 {
1966     TCGv_i64 src;
1967
1968     src = gen_load_fpr_D(dc, rs);
1969
1970     gen(cpu_env, src);
1971
1972     gen_op_store_QT0_fpr(QFPREG(rd));
1973     gen_update_fprs_dirty(QFPREG(rd));
1974 }
1975
1976 /* asi moves */
1977 #ifdef TARGET_SPARC64
1978 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1979 {
1980     int asi;
1981     TCGv_i32 r_asi;
1982
1983     if (IS_IMM) {
1984         r_asi = tcg_temp_new_i32();
1985         tcg_gen_mov_i32(r_asi, cpu_asi);
1986     } else {
1987         asi = GET_FIELD(insn, 19, 26);
1988         r_asi = tcg_const_i32(asi);
1989     }
1990     return r_asi;
1991 }
1992
1993 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1994                               int sign)
1995 {
1996     TCGv_i32 r_asi, r_size, r_sign;
1997
1998     r_asi = gen_get_asi(insn, addr);
1999     r_size = tcg_const_i32(size);
2000     r_sign = tcg_const_i32(sign);
2001     gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_size, r_sign);
2002     tcg_temp_free_i32(r_sign);
2003     tcg_temp_free_i32(r_size);
2004     tcg_temp_free_i32(r_asi);
2005 }
2006
2007 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2008 {
2009     TCGv_i32 r_asi, r_size;
2010
2011     r_asi = gen_get_asi(insn, addr);
2012     r_size = tcg_const_i32(size);
2013     gen_helper_st_asi(cpu_env, addr, src, r_asi, r_size);
2014     tcg_temp_free_i32(r_size);
2015     tcg_temp_free_i32(r_asi);
2016 }
2017
2018 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
2019 {
2020     TCGv_i32 r_asi, r_size, r_rd;
2021
2022     r_asi = gen_get_asi(insn, addr);
2023     r_size = tcg_const_i32(size);
2024     r_rd = tcg_const_i32(rd);
2025     gen_helper_ldf_asi(cpu_env, addr, r_asi, r_size, r_rd);
2026     tcg_temp_free_i32(r_rd);
2027     tcg_temp_free_i32(r_size);
2028     tcg_temp_free_i32(r_asi);
2029 }
2030
2031 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
2032 {
2033     TCGv_i32 r_asi, r_size, r_rd;
2034
2035     r_asi = gen_get_asi(insn, addr);
2036     r_size = tcg_const_i32(size);
2037     r_rd = tcg_const_i32(rd);
2038     gen_helper_stf_asi(cpu_env, addr, r_asi, r_size, r_rd);
2039     tcg_temp_free_i32(r_rd);
2040     tcg_temp_free_i32(r_size);
2041     tcg_temp_free_i32(r_asi);
2042 }
2043
2044 static inline void gen_swap_asi(TCGv dst, TCGv src, TCGv addr, int insn)
2045 {
2046     TCGv_i32 r_asi, r_size, r_sign;
2047     TCGv_i64 t64 = tcg_temp_new_i64();
2048
2049     r_asi = gen_get_asi(insn, addr);
2050     r_size = tcg_const_i32(4);
2051     r_sign = tcg_const_i32(0);
2052     gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2053     tcg_temp_free_i32(r_sign);
2054     gen_helper_st_asi(cpu_env, addr, src, r_asi, r_size);
2055     tcg_temp_free_i32(r_size);
2056     tcg_temp_free_i32(r_asi);
2057     tcg_gen_trunc_i64_tl(dst, t64);
2058     tcg_temp_free_i64(t64);
2059 }
2060
2061 static inline void gen_ldda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2062                                 int insn, int rd)
2063 {
2064     TCGv_i32 r_asi, r_rd;
2065
2066     r_asi = gen_get_asi(insn, addr);
2067     r_rd = tcg_const_i32(rd);
2068     gen_helper_ldda_asi(cpu_env, addr, r_asi, r_rd);
2069     tcg_temp_free_i32(r_rd);
2070     tcg_temp_free_i32(r_asi);
2071 }
2072
2073 static inline void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2074                                 int insn, int rd)
2075 {
2076     TCGv_i32 r_asi, r_size;
2077     TCGv lo = gen_load_gpr(dc, rd + 1);
2078     TCGv_i64 t64 = tcg_temp_new_i64();
2079
2080     tcg_gen_concat_tl_i64(t64, lo, hi);
2081     r_asi = gen_get_asi(insn, addr);
2082     r_size = tcg_const_i32(8);
2083     gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_size);
2084     tcg_temp_free_i32(r_size);
2085     tcg_temp_free_i32(r_asi);
2086     tcg_temp_free_i64(t64);
2087 }
2088
2089 static inline void gen_casx_asi(DisasContext *dc, TCGv addr,
2090                                 TCGv val2, int insn, int rd)
2091 {
2092     TCGv val1 = gen_load_gpr(dc, rd);
2093     TCGv dst = gen_dest_gpr(dc, rd);
2094     TCGv_i32 r_asi = gen_get_asi(insn, addr);
2095
2096     gen_helper_casx_asi(dst, cpu_env, addr, val1, val2, r_asi);
2097     tcg_temp_free_i32(r_asi);
2098     gen_store_gpr(dc, rd, dst);
2099 }
2100
2101 #elif !defined(CONFIG_USER_ONLY)
2102
2103 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
2104                               int sign)
2105 {
2106     TCGv_i32 r_asi, r_size, r_sign;
2107     TCGv_i64 t64 = tcg_temp_new_i64();
2108
2109     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2110     r_size = tcg_const_i32(size);
2111     r_sign = tcg_const_i32(sign);
2112     gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2113     tcg_temp_free_i32(r_sign);
2114     tcg_temp_free_i32(r_size);
2115     tcg_temp_free_i32(r_asi);
2116     tcg_gen_trunc_i64_tl(dst, t64);
2117     tcg_temp_free_i64(t64);
2118 }
2119
2120 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2121 {
2122     TCGv_i32 r_asi, r_size;
2123     TCGv_i64 t64 = tcg_temp_new_i64();
2124
2125     tcg_gen_extu_tl_i64(t64, src);
2126     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2127     r_size = tcg_const_i32(size);
2128     gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_size);
2129     tcg_temp_free_i32(r_size);
2130     tcg_temp_free_i32(r_asi);
2131     tcg_temp_free_i64(t64);
2132 }
2133
2134 static inline void gen_swap_asi(TCGv dst, TCGv src, TCGv addr, int insn)
2135 {
2136     TCGv_i32 r_asi, r_size, r_sign;
2137     TCGv_i64 r_val, t64;
2138
2139     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2140     r_size = tcg_const_i32(4);
2141     r_sign = tcg_const_i32(0);
2142     t64 = tcg_temp_new_i64();
2143     gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2144     tcg_temp_free(r_sign);
2145     r_val = tcg_temp_new_i64();
2146     tcg_gen_extu_tl_i64(r_val, src);
2147     gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2148     tcg_temp_free_i64(r_val);
2149     tcg_temp_free_i32(r_size);
2150     tcg_temp_free_i32(r_asi);
2151     tcg_gen_trunc_i64_tl(dst, t64);
2152     tcg_temp_free_i64(t64);
2153 }
2154
2155 static inline void gen_ldda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2156                                 int insn, int rd)
2157 {
2158     TCGv_i32 r_asi, r_size, r_sign;
2159     TCGv t;
2160     TCGv_i64 t64;
2161
2162     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2163     r_size = tcg_const_i32(8);
2164     r_sign = tcg_const_i32(0);
2165     t64 = tcg_temp_new_i64();
2166     gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2167     tcg_temp_free_i32(r_sign);
2168     tcg_temp_free_i32(r_size);
2169     tcg_temp_free_i32(r_asi);
2170
2171     t = gen_dest_gpr(dc, rd + 1);
2172     tcg_gen_trunc_i64_tl(t, t64);
2173     gen_store_gpr(dc, rd + 1, t);
2174
2175     tcg_gen_shri_i64(t64, t64, 32);
2176     tcg_gen_trunc_i64_tl(hi, t64);
2177     tcg_temp_free_i64(t64);
2178     gen_store_gpr(dc, rd, hi);
2179 }
2180
2181 static inline void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2182                                 int insn, int rd)
2183 {
2184     TCGv_i32 r_asi, r_size;
2185     TCGv lo = gen_load_gpr(dc, rd + 1);
2186     TCGv_i64 t64 = tcg_temp_new_i64();
2187
2188     tcg_gen_concat_tl_i64(t64, lo, hi);
2189     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2190     r_size = tcg_const_i32(8);
2191     gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_size);
2192     tcg_temp_free_i32(r_size);
2193     tcg_temp_free_i32(r_asi);
2194     tcg_temp_free_i64(t64);
2195 }
2196 #endif
2197
2198 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2199 static inline void gen_cas_asi(DisasContext *dc, TCGv addr,
2200                                TCGv val2, int insn, int rd)
2201 {
2202     TCGv val1 = gen_load_gpr(dc, rd);
2203     TCGv dst = gen_dest_gpr(dc, rd);
2204 #ifdef TARGET_SPARC64
2205     TCGv_i32 r_asi = gen_get_asi(insn, addr);
2206 #else
2207     TCGv_i32 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2208 #endif
2209
2210     gen_helper_cas_asi(dst, cpu_env, addr, val1, val2, r_asi);
2211     tcg_temp_free_i32(r_asi);
2212     gen_store_gpr(dc, rd, dst);
2213 }
2214
2215 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
2216 {
2217     TCGv_i64 r_val;
2218     TCGv_i32 r_asi, r_size;
2219
2220     gen_ld_asi(dst, addr, insn, 1, 0);
2221
2222     r_val = tcg_const_i64(0xffULL);
2223     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2224     r_size = tcg_const_i32(1);
2225     gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2226     tcg_temp_free_i32(r_size);
2227     tcg_temp_free_i32(r_asi);
2228     tcg_temp_free_i64(r_val);
2229 }
2230 #endif
2231
2232 static TCGv get_src1(DisasContext *dc, unsigned int insn)
2233 {
2234     unsigned int rs1 = GET_FIELD(insn, 13, 17);
2235     return gen_load_gpr(dc, rs1);
2236 }
2237
2238 static TCGv get_src2(DisasContext *dc, unsigned int insn)
2239 {
2240     if (IS_IMM) { /* immediate */
2241         target_long simm = GET_FIELDs(insn, 19, 31);
2242         TCGv t = get_temp_tl(dc);
2243         tcg_gen_movi_tl(t, simm);
2244         return t;
2245     } else {      /* register */
2246         unsigned int rs2 = GET_FIELD(insn, 27, 31);
2247         return gen_load_gpr(dc, rs2);
2248     }
2249 }
2250
2251 #ifdef TARGET_SPARC64
2252 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2253 {
2254     TCGv_i32 c32, zero, dst, s1, s2;
2255
2256     /* We have two choices here: extend the 32 bit data and use movcond_i64,
2257        or fold the comparison down to 32 bits and use movcond_i32.  Choose
2258        the later.  */
2259     c32 = tcg_temp_new_i32();
2260     if (cmp->is_bool) {
2261         tcg_gen_trunc_i64_i32(c32, cmp->c1);
2262     } else {
2263         TCGv_i64 c64 = tcg_temp_new_i64();
2264         tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2265         tcg_gen_trunc_i64_i32(c32, c64);
2266         tcg_temp_free_i64(c64);
2267     }
2268
2269     s1 = gen_load_fpr_F(dc, rs);
2270     s2 = gen_load_fpr_F(dc, rd);
2271     dst = gen_dest_fpr_F(dc);
2272     zero = tcg_const_i32(0);
2273
2274     tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2275
2276     tcg_temp_free_i32(c32);
2277     tcg_temp_free_i32(zero);
2278     gen_store_fpr_F(dc, rd, dst);
2279 }
2280
2281 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2282 {
2283     TCGv_i64 dst = gen_dest_fpr_D(dc, rd);
2284     tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2285                         gen_load_fpr_D(dc, rs),
2286                         gen_load_fpr_D(dc, rd));
2287     gen_store_fpr_D(dc, rd, dst);
2288 }
2289
2290 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2291 {
2292     int qd = QFPREG(rd);
2293     int qs = QFPREG(rs);
2294
2295     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2296                         cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2297     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2298                         cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2299
2300     gen_update_fprs_dirty(qd);
2301 }
2302
2303 #ifndef CONFIG_USER_ONLY
2304 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
2305 {
2306     TCGv_i32 r_tl = tcg_temp_new_i32();
2307
2308     /* load env->tl into r_tl */
2309     tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2310
2311     /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2312     tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2313
2314     /* calculate offset to current trap state from env->ts, reuse r_tl */
2315     tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2316     tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2317
2318     /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2319     {
2320         TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2321         tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2322         tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2323         tcg_temp_free_ptr(r_tl_tmp);
2324     }
2325
2326     tcg_temp_free_i32(r_tl);
2327 }
2328 #endif
2329
2330 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2331                      int width, bool cc, bool left)
2332 {
2333     TCGv lo1, lo2, t1, t2;
2334     uint64_t amask, tabl, tabr;
2335     int shift, imask, omask;
2336
2337     if (cc) {
2338         tcg_gen_mov_tl(cpu_cc_src, s1);
2339         tcg_gen_mov_tl(cpu_cc_src2, s2);
2340         tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2341         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2342         dc->cc_op = CC_OP_SUB;
2343     }
2344
2345     /* Theory of operation: there are two tables, left and right (not to
2346        be confused with the left and right versions of the opcode).  These
2347        are indexed by the low 3 bits of the inputs.  To make things "easy",
2348        these tables are loaded into two constants, TABL and TABR below.
2349        The operation index = (input & imask) << shift calculates the index
2350        into the constant, while val = (table >> index) & omask calculates
2351        the value we're looking for.  */
2352     switch (width) {
2353     case 8:
2354         imask = 0x7;
2355         shift = 3;
2356         omask = 0xff;
2357         if (left) {
2358             tabl = 0x80c0e0f0f8fcfeffULL;
2359             tabr = 0xff7f3f1f0f070301ULL;
2360         } else {
2361             tabl = 0x0103070f1f3f7fffULL;
2362             tabr = 0xfffefcf8f0e0c080ULL;
2363         }
2364         break;
2365     case 16:
2366         imask = 0x6;
2367         shift = 1;
2368         omask = 0xf;
2369         if (left) {
2370             tabl = 0x8cef;
2371             tabr = 0xf731;
2372         } else {
2373             tabl = 0x137f;
2374             tabr = 0xfec8;
2375         }
2376         break;
2377     case 32:
2378         imask = 0x4;
2379         shift = 0;
2380         omask = 0x3;
2381         if (left) {
2382             tabl = (2 << 2) | 3;
2383             tabr = (3 << 2) | 1;
2384         } else {
2385             tabl = (1 << 2) | 3;
2386             tabr = (3 << 2) | 2;
2387         }
2388         break;
2389     default:
2390         abort();
2391     }
2392
2393     lo1 = tcg_temp_new();
2394     lo2 = tcg_temp_new();
2395     tcg_gen_andi_tl(lo1, s1, imask);
2396     tcg_gen_andi_tl(lo2, s2, imask);
2397     tcg_gen_shli_tl(lo1, lo1, shift);
2398     tcg_gen_shli_tl(lo2, lo2, shift);
2399
2400     t1 = tcg_const_tl(tabl);
2401     t2 = tcg_const_tl(tabr);
2402     tcg_gen_shr_tl(lo1, t1, lo1);
2403     tcg_gen_shr_tl(lo2, t2, lo2);
2404     tcg_gen_andi_tl(dst, lo1, omask);
2405     tcg_gen_andi_tl(lo2, lo2, omask);
2406
2407     amask = -8;
2408     if (AM_CHECK(dc)) {
2409         amask &= 0xffffffffULL;
2410     }
2411     tcg_gen_andi_tl(s1, s1, amask);
2412     tcg_gen_andi_tl(s2, s2, amask);
2413
2414     /* We want to compute
2415         dst = (s1 == s2 ? lo1 : lo1 & lo2).
2416        We've already done dst = lo1, so this reduces to
2417         dst &= (s1 == s2 ? -1 : lo2)
2418        Which we perform by
2419         lo2 |= -(s1 == s2)
2420         dst &= lo2
2421     */
2422     tcg_gen_setcond_tl(TCG_COND_EQ, t1, s1, s2);
2423     tcg_gen_neg_tl(t1, t1);
2424     tcg_gen_or_tl(lo2, lo2, t1);
2425     tcg_gen_and_tl(dst, dst, lo2);
2426
2427     tcg_temp_free(lo1);
2428     tcg_temp_free(lo2);
2429     tcg_temp_free(t1);
2430     tcg_temp_free(t2);
2431 }
2432
2433 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2434 {
2435     TCGv tmp = tcg_temp_new();
2436
2437     tcg_gen_add_tl(tmp, s1, s2);
2438     tcg_gen_andi_tl(dst, tmp, -8);
2439     if (left) {
2440         tcg_gen_neg_tl(tmp, tmp);
2441     }
2442     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2443
2444     tcg_temp_free(tmp);
2445 }
2446
2447 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2448 {
2449     TCGv t1, t2, shift;
2450
2451     t1 = tcg_temp_new();
2452     t2 = tcg_temp_new();
2453     shift = tcg_temp_new();
2454
2455     tcg_gen_andi_tl(shift, gsr, 7);
2456     tcg_gen_shli_tl(shift, shift, 3);
2457     tcg_gen_shl_tl(t1, s1, shift);
2458
2459     /* A shift of 64 does not produce 0 in TCG.  Divide this into a
2460        shift of (up to 63) followed by a constant shift of 1.  */
2461     tcg_gen_xori_tl(shift, shift, 63);
2462     tcg_gen_shr_tl(t2, s2, shift);
2463     tcg_gen_shri_tl(t2, t2, 1);
2464
2465     tcg_gen_or_tl(dst, t1, t2);
2466
2467     tcg_temp_free(t1);
2468     tcg_temp_free(t2);
2469     tcg_temp_free(shift);
2470 }
2471 #endif
2472
2473 #define CHECK_IU_FEATURE(dc, FEATURE)                      \
2474     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2475         goto illegal_insn;
2476 #define CHECK_FPU_FEATURE(dc, FEATURE)                     \
2477     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2478         goto nfpu_insn;
2479
2480 /* before an instruction, dc->pc must be static */
2481 static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
2482 {
2483     unsigned int opc, rs1, rs2, rd;
2484     TCGv cpu_src1, cpu_src2;
2485     TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
2486     TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
2487     target_long simm;
2488
2489     if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP | CPU_LOG_TB_OP_OPT))) {
2490         tcg_gen_debug_insn_start(dc->pc);
2491     }
2492
2493     opc = GET_FIELD(insn, 0, 1);
2494     rd = GET_FIELD(insn, 2, 6);
2495
2496     switch (opc) {
2497     case 0:                     /* branches/sethi */
2498         {
2499             unsigned int xop = GET_FIELD(insn, 7, 9);
2500             int32_t target;
2501             switch (xop) {
2502 #ifdef TARGET_SPARC64
2503             case 0x1:           /* V9 BPcc */
2504                 {
2505                     int cc;
2506
2507                     target = GET_FIELD_SP(insn, 0, 18);
2508                     target = sign_extend(target, 19);
2509                     target <<= 2;
2510                     cc = GET_FIELD_SP(insn, 20, 21);
2511                     if (cc == 0)
2512                         do_branch(dc, target, insn, 0);
2513                     else if (cc == 2)
2514                         do_branch(dc, target, insn, 1);
2515                     else
2516                         goto illegal_insn;
2517                     goto jmp_insn;
2518                 }
2519             case 0x3:           /* V9 BPr */
2520                 {
2521                     target = GET_FIELD_SP(insn, 0, 13) |
2522                         (GET_FIELD_SP(insn, 20, 21) << 14);
2523                     target = sign_extend(target, 16);
2524                     target <<= 2;
2525                     cpu_src1 = get_src1(dc, insn);
2526                     do_branch_reg(dc, target, insn, cpu_src1);
2527                     goto jmp_insn;
2528                 }
2529             case 0x5:           /* V9 FBPcc */
2530                 {
2531                     int cc = GET_FIELD_SP(insn, 20, 21);
2532                     if (gen_trap_ifnofpu(dc)) {
2533                         goto jmp_insn;
2534                     }
2535                     target = GET_FIELD_SP(insn, 0, 18);
2536                     target = sign_extend(target, 19);
2537                     target <<= 2;
2538                     do_fbranch(dc, target, insn, cc);
2539                     goto jmp_insn;
2540                 }
2541 #else
2542             case 0x7:           /* CBN+x */
2543                 {
2544                     goto ncp_insn;
2545                 }
2546 #endif
2547             case 0x2:           /* BN+x */
2548                 {
2549                     target = GET_FIELD(insn, 10, 31);
2550                     target = sign_extend(target, 22);
2551                     target <<= 2;
2552                     do_branch(dc, target, insn, 0);
2553                     goto jmp_insn;
2554                 }
2555             case 0x6:           /* FBN+x */
2556                 {
2557                     if (gen_trap_ifnofpu(dc)) {
2558                         goto jmp_insn;
2559                     }
2560                     target = GET_FIELD(insn, 10, 31);
2561                     target = sign_extend(target, 22);
2562                     target <<= 2;
2563                     do_fbranch(dc, target, insn, 0);
2564                     goto jmp_insn;
2565                 }
2566             case 0x4:           /* SETHI */
2567                 /* Special-case %g0 because that's the canonical nop.  */
2568                 if (rd) {
2569                     uint32_t value = GET_FIELD(insn, 10, 31);
2570                     TCGv t = gen_dest_gpr(dc, rd);
2571                     tcg_gen_movi_tl(t, value << 10);
2572                     gen_store_gpr(dc, rd, t);
2573                 }
2574                 break;
2575             case 0x0:           /* UNIMPL */
2576             default:
2577                 goto illegal_insn;
2578             }
2579             break;
2580         }
2581         break;
2582     case 1:                     /*CALL*/
2583         {
2584             target_long target = GET_FIELDs(insn, 2, 31) << 2;
2585             TCGv o7 = gen_dest_gpr(dc, 15);
2586
2587             tcg_gen_movi_tl(o7, dc->pc);
2588             gen_store_gpr(dc, 15, o7);
2589             target += dc->pc;
2590             gen_mov_pc_npc(dc);
2591 #ifdef TARGET_SPARC64
2592             if (unlikely(AM_CHECK(dc))) {
2593                 target &= 0xffffffffULL;
2594             }
2595 #endif
2596             dc->npc = target;
2597         }
2598         goto jmp_insn;
2599     case 2:                     /* FPU & Logical Operations */
2600         {
2601             unsigned int xop = GET_FIELD(insn, 7, 12);
2602             TCGv cpu_dst = get_temp_tl(dc);
2603             TCGv cpu_tmp0;
2604
2605             if (xop == 0x3a) {  /* generate trap */
2606                 int cond = GET_FIELD(insn, 3, 6);
2607                 TCGv_i32 trap;
2608                 int l1 = -1, mask;
2609
2610                 if (cond == 0) {
2611                     /* Trap never.  */
2612                     break;
2613                 }
2614
2615                 save_state(dc);
2616
2617                 if (cond != 8) {
2618                     /* Conditional trap.  */
2619                     DisasCompare cmp;
2620 #ifdef TARGET_SPARC64
2621                     /* V9 icc/xcc */
2622                     int cc = GET_FIELD_SP(insn, 11, 12);
2623                     if (cc == 0) {
2624                         gen_compare(&cmp, 0, cond, dc);
2625                     } else if (cc == 2) {
2626                         gen_compare(&cmp, 1, cond, dc);
2627                     } else {
2628                         goto illegal_insn;
2629                     }
2630 #else
2631                     gen_compare(&cmp, 0, cond, dc);
2632 #endif
2633                     l1 = gen_new_label();
2634                     tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
2635                                       cmp.c1, cmp.c2, l1);
2636                     free_compare(&cmp);
2637                 }
2638
2639                 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
2640                         ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
2641
2642                 /* Don't use the normal temporaries, as they may well have
2643                    gone out of scope with the branch above.  While we're
2644                    doing that we might as well pre-truncate to 32-bit.  */
2645                 trap = tcg_temp_new_i32();
2646
2647                 rs1 = GET_FIELD_SP(insn, 14, 18);
2648                 if (IS_IMM) {
2649                     rs2 = GET_FIELD_SP(insn, 0, 6);
2650                     if (rs1 == 0) {
2651                         tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
2652                         /* Signal that the trap value is fully constant.  */
2653                         mask = 0;
2654                     } else {
2655                         TCGv t1 = gen_load_gpr(dc, rs1);
2656                         tcg_gen_trunc_tl_i32(trap, t1);
2657                         tcg_gen_addi_i32(trap, trap, rs2);
2658                     }
2659                 } else {
2660                     TCGv t1, t2;
2661                     rs2 = GET_FIELD_SP(insn, 0, 4);
2662                     t1 = gen_load_gpr(dc, rs1);
2663                     t2 = gen_load_gpr(dc, rs2);
2664                     tcg_gen_add_tl(t1, t1, t2);
2665                     tcg_gen_trunc_tl_i32(trap, t1);
2666                 }
2667                 if (mask != 0) {
2668                     tcg_gen_andi_i32(trap, trap, mask);
2669                     tcg_gen_addi_i32(trap, trap, TT_TRAP);
2670                 }
2671
2672                 gen_helper_raise_exception(cpu_env, trap);
2673                 tcg_temp_free_i32(trap);
2674
2675                 if (cond == 8) {
2676                     /* An unconditional trap ends the TB.  */
2677                     dc->is_br = 1;
2678                     goto jmp_insn;
2679                 } else {
2680                     /* A conditional trap falls through to the next insn.  */
2681                     gen_set_label(l1);
2682                     break;
2683                 }
2684             } else if (xop == 0x28) {
2685                 rs1 = GET_FIELD(insn, 13, 17);
2686                 switch(rs1) {
2687                 case 0: /* rdy */
2688 #ifndef TARGET_SPARC64
2689                 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2690                                        manual, rdy on the microSPARC
2691                                        II */
2692                 case 0x0f:          /* stbar in the SPARCv8 manual,
2693                                        rdy on the microSPARC II */
2694                 case 0x10 ... 0x1f: /* implementation-dependent in the
2695                                        SPARCv8 manual, rdy on the
2696                                        microSPARC II */
2697                     /* Read Asr17 */
2698                     if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2699                         TCGv t = gen_dest_gpr(dc, rd);
2700                         /* Read Asr17 for a Leon3 monoprocessor */
2701                         tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
2702                         gen_store_gpr(dc, rd, t);
2703                         break;
2704                     }
2705 #endif
2706                     gen_store_gpr(dc, rd, cpu_y);
2707                     break;
2708 #ifdef TARGET_SPARC64
2709                 case 0x2: /* V9 rdccr */
2710                     update_psr(dc);
2711                     gen_helper_rdccr(cpu_dst, cpu_env);
2712                     gen_store_gpr(dc, rd, cpu_dst);
2713                     break;
2714                 case 0x3: /* V9 rdasi */
2715                     tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2716                     gen_store_gpr(dc, rd, cpu_dst);
2717                     break;
2718                 case 0x4: /* V9 rdtick */
2719                     {
2720                         TCGv_ptr r_tickptr;
2721
2722                         r_tickptr = tcg_temp_new_ptr();
2723                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2724                                        offsetof(CPUSPARCState, tick));
2725                         gen_helper_tick_get_count(cpu_dst, r_tickptr);
2726                         tcg_temp_free_ptr(r_tickptr);
2727                         gen_store_gpr(dc, rd, cpu_dst);
2728                     }
2729                     break;
2730                 case 0x5: /* V9 rdpc */
2731                     {
2732                         TCGv t = gen_dest_gpr(dc, rd);
2733                         if (unlikely(AM_CHECK(dc))) {
2734                             tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
2735                         } else {
2736                             tcg_gen_movi_tl(t, dc->pc);
2737                         }
2738                         gen_store_gpr(dc, rd, t);
2739                     }
2740                     break;
2741                 case 0x6: /* V9 rdfprs */
2742                     tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2743                     gen_store_gpr(dc, rd, cpu_dst);
2744                     break;
2745                 case 0xf: /* V9 membar */
2746                     break; /* no effect */
2747                 case 0x13: /* Graphics Status */
2748                     if (gen_trap_ifnofpu(dc)) {
2749                         goto jmp_insn;
2750                     }
2751                     gen_store_gpr(dc, rd, cpu_gsr);
2752                     break;
2753                 case 0x16: /* Softint */
2754                     tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2755                     gen_store_gpr(dc, rd, cpu_dst);
2756                     break;
2757                 case 0x17: /* Tick compare */
2758                     gen_store_gpr(dc, rd, cpu_tick_cmpr);
2759                     break;
2760                 case 0x18: /* System tick */
2761                     {
2762                         TCGv_ptr r_tickptr;
2763
2764                         r_tickptr = tcg_temp_new_ptr();
2765                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2766                                        offsetof(CPUSPARCState, stick));
2767                         gen_helper_tick_get_count(cpu_dst, r_tickptr);
2768                         tcg_temp_free_ptr(r_tickptr);
2769                         gen_store_gpr(dc, rd, cpu_dst);
2770                     }
2771                     break;
2772                 case 0x19: /* System tick compare */
2773                     gen_store_gpr(dc, rd, cpu_stick_cmpr);
2774                     break;
2775                 case 0x10: /* Performance Control */
2776                 case 0x11: /* Performance Instrumentation Counter */
2777                 case 0x12: /* Dispatch Control */
2778                 case 0x14: /* Softint set, WO */
2779                 case 0x15: /* Softint clear, WO */
2780 #endif
2781                 default:
2782                     goto illegal_insn;
2783                 }
2784 #if !defined(CONFIG_USER_ONLY)
2785             } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2786 #ifndef TARGET_SPARC64
2787                 if (!supervisor(dc)) {
2788                     goto priv_insn;
2789                 }
2790                 update_psr(dc);
2791                 gen_helper_rdpsr(cpu_dst, cpu_env);
2792 #else
2793                 CHECK_IU_FEATURE(dc, HYPV);
2794                 if (!hypervisor(dc))
2795                     goto priv_insn;
2796                 rs1 = GET_FIELD(insn, 13, 17);
2797                 switch (rs1) {
2798                 case 0: // hpstate
2799                     // gen_op_rdhpstate();
2800                     break;
2801                 case 1: // htstate
2802                     // gen_op_rdhtstate();
2803                     break;
2804                 case 3: // hintp
2805                     tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2806                     break;
2807                 case 5: // htba
2808                     tcg_gen_mov_tl(cpu_dst, cpu_htba);
2809                     break;
2810                 case 6: // hver
2811                     tcg_gen_mov_tl(cpu_dst, cpu_hver);
2812                     break;
2813                 case 31: // hstick_cmpr
2814                     tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2815                     break;
2816                 default:
2817                     goto illegal_insn;
2818                 }
2819 #endif
2820                 gen_store_gpr(dc, rd, cpu_dst);
2821                 break;
2822             } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2823                 if (!supervisor(dc)) {
2824                     goto priv_insn;
2825                 }
2826                 cpu_tmp0 = get_temp_tl(dc);
2827 #ifdef TARGET_SPARC64
2828                 rs1 = GET_FIELD(insn, 13, 17);
2829                 switch (rs1) {
2830                 case 0: // tpc
2831                     {
2832                         TCGv_ptr r_tsptr;
2833
2834                         r_tsptr = tcg_temp_new_ptr();
2835                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2836                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2837                                       offsetof(trap_state, tpc));
2838                         tcg_temp_free_ptr(r_tsptr);
2839                     }
2840                     break;
2841                 case 1: // tnpc
2842                     {
2843                         TCGv_ptr r_tsptr;
2844
2845                         r_tsptr = tcg_temp_new_ptr();
2846                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2847                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2848                                       offsetof(trap_state, tnpc));
2849                         tcg_temp_free_ptr(r_tsptr);
2850                     }
2851                     break;
2852                 case 2: // tstate
2853                     {
2854                         TCGv_ptr r_tsptr;
2855
2856                         r_tsptr = tcg_temp_new_ptr();
2857                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2858                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2859                                       offsetof(trap_state, tstate));
2860                         tcg_temp_free_ptr(r_tsptr);
2861                     }
2862                     break;
2863                 case 3: // tt
2864                     {
2865                         TCGv_ptr r_tsptr = tcg_temp_new_ptr();
2866
2867                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2868                         tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
2869                                          offsetof(trap_state, tt));
2870                         tcg_temp_free_ptr(r_tsptr);
2871                     }
2872                     break;
2873                 case 4: // tick
2874                     {
2875                         TCGv_ptr r_tickptr;
2876
2877                         r_tickptr = tcg_temp_new_ptr();
2878                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2879                                        offsetof(CPUSPARCState, tick));
2880                         gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2881                         tcg_temp_free_ptr(r_tickptr);
2882                     }
2883                     break;
2884                 case 5: // tba
2885                     tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2886                     break;
2887                 case 6: // pstate
2888                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2889                                      offsetof(CPUSPARCState, pstate));
2890                     break;
2891                 case 7: // tl
2892                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2893                                      offsetof(CPUSPARCState, tl));
2894                     break;
2895                 case 8: // pil
2896                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2897                                      offsetof(CPUSPARCState, psrpil));
2898                     break;
2899                 case 9: // cwp
2900                     gen_helper_rdcwp(cpu_tmp0, cpu_env);
2901                     break;
2902                 case 10: // cansave
2903                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2904                                      offsetof(CPUSPARCState, cansave));
2905                     break;
2906                 case 11: // canrestore
2907                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2908                                      offsetof(CPUSPARCState, canrestore));
2909                     break;
2910                 case 12: // cleanwin
2911                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2912                                      offsetof(CPUSPARCState, cleanwin));
2913                     break;
2914                 case 13: // otherwin
2915                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2916                                      offsetof(CPUSPARCState, otherwin));
2917                     break;
2918                 case 14: // wstate
2919                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2920                                      offsetof(CPUSPARCState, wstate));
2921                     break;
2922                 case 16: // UA2005 gl
2923                     CHECK_IU_FEATURE(dc, GL);
2924                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2925                                      offsetof(CPUSPARCState, gl));
2926                     break;
2927                 case 26: // UA2005 strand status
2928                     CHECK_IU_FEATURE(dc, HYPV);
2929                     if (!hypervisor(dc))
2930                         goto priv_insn;
2931                     tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2932                     break;
2933                 case 31: // ver
2934                     tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2935                     break;
2936                 case 15: // fq
2937                 default:
2938                     goto illegal_insn;
2939                 }
2940 #else
2941                 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2942 #endif
2943                 gen_store_gpr(dc, rd, cpu_tmp0);
2944                 break;
2945             } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2946 #ifdef TARGET_SPARC64
2947                 save_state(dc);
2948                 gen_helper_flushw(cpu_env);
2949 #else
2950                 if (!supervisor(dc))
2951                     goto priv_insn;
2952                 gen_store_gpr(dc, rd, cpu_tbr);
2953 #endif
2954                 break;
2955 #endif
2956             } else if (xop == 0x34) {   /* FPU Operations */
2957                 if (gen_trap_ifnofpu(dc)) {
2958                     goto jmp_insn;
2959                 }
2960                 gen_op_clear_ieee_excp_and_FTT();
2961                 rs1 = GET_FIELD(insn, 13, 17);
2962                 rs2 = GET_FIELD(insn, 27, 31);
2963                 xop = GET_FIELD(insn, 18, 26);
2964                 save_state(dc);
2965                 switch (xop) {
2966                 case 0x1: /* fmovs */
2967                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2968                     gen_store_fpr_F(dc, rd, cpu_src1_32);
2969                     break;
2970                 case 0x5: /* fnegs */
2971                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
2972                     break;
2973                 case 0x9: /* fabss */
2974                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
2975                     break;
2976                 case 0x29: /* fsqrts */
2977                     CHECK_FPU_FEATURE(dc, FSQRT);
2978                     gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
2979                     break;
2980                 case 0x2a: /* fsqrtd */
2981                     CHECK_FPU_FEATURE(dc, FSQRT);
2982                     gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
2983                     break;
2984                 case 0x2b: /* fsqrtq */
2985                     CHECK_FPU_FEATURE(dc, FLOAT128);
2986                     gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
2987                     break;
2988                 case 0x41: /* fadds */
2989                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
2990                     break;
2991                 case 0x42: /* faddd */
2992                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
2993                     break;
2994                 case 0x43: /* faddq */
2995                     CHECK_FPU_FEATURE(dc, FLOAT128);
2996                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
2997                     break;
2998                 case 0x45: /* fsubs */
2999                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3000                     break;
3001                 case 0x46: /* fsubd */
3002                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3003                     break;
3004                 case 0x47: /* fsubq */
3005                     CHECK_FPU_FEATURE(dc, FLOAT128);
3006                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3007                     break;
3008                 case 0x49: /* fmuls */
3009                     CHECK_FPU_FEATURE(dc, FMUL);
3010                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3011                     break;
3012                 case 0x4a: /* fmuld */
3013                     CHECK_FPU_FEATURE(dc, FMUL);
3014                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3015                     break;
3016                 case 0x4b: /* fmulq */
3017                     CHECK_FPU_FEATURE(dc, FLOAT128);
3018                     CHECK_FPU_FEATURE(dc, FMUL);
3019                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3020                     break;
3021                 case 0x4d: /* fdivs */
3022                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3023                     break;
3024                 case 0x4e: /* fdivd */
3025                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3026                     break;
3027                 case 0x4f: /* fdivq */
3028                     CHECK_FPU_FEATURE(dc, FLOAT128);
3029                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3030                     break;
3031                 case 0x69: /* fsmuld */
3032                     CHECK_FPU_FEATURE(dc, FSMULD);
3033                     gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3034                     break;
3035                 case 0x6e: /* fdmulq */
3036                     CHECK_FPU_FEATURE(dc, FLOAT128);
3037                     gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3038                     break;
3039                 case 0xc4: /* fitos */
3040                     gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3041                     break;
3042                 case 0xc6: /* fdtos */
3043                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3044                     break;
3045                 case 0xc7: /* fqtos */
3046                     CHECK_FPU_FEATURE(dc, FLOAT128);
3047                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3048                     break;
3049                 case 0xc8: /* fitod */
3050                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3051                     break;
3052                 case 0xc9: /* fstod */
3053                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3054                     break;
3055                 case 0xcb: /* fqtod */
3056                     CHECK_FPU_FEATURE(dc, FLOAT128);
3057                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3058                     break;
3059                 case 0xcc: /* fitoq */
3060                     CHECK_FPU_FEATURE(dc, FLOAT128);
3061                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3062                     break;
3063                 case 0xcd: /* fstoq */
3064                     CHECK_FPU_FEATURE(dc, FLOAT128);
3065                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3066                     break;
3067                 case 0xce: /* fdtoq */
3068                     CHECK_FPU_FEATURE(dc, FLOAT128);
3069                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3070                     break;
3071                 case 0xd1: /* fstoi */
3072                     gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3073                     break;
3074                 case 0xd2: /* fdtoi */
3075                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3076                     break;
3077                 case 0xd3: /* fqtoi */
3078                     CHECK_FPU_FEATURE(dc, FLOAT128);
3079                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3080                     break;
3081 #ifdef TARGET_SPARC64
3082                 case 0x2: /* V9 fmovd */
3083                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3084                     gen_store_fpr_D(dc, rd, cpu_src1_64);
3085                     break;
3086                 case 0x3: /* V9 fmovq */
3087                     CHECK_FPU_FEATURE(dc, FLOAT128);
3088                     gen_move_Q(rd, rs2);
3089                     break;
3090                 case 0x6: /* V9 fnegd */
3091                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3092                     break;
3093                 case 0x7: /* V9 fnegq */
3094                     CHECK_FPU_FEATURE(dc, FLOAT128);
3095                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3096                     break;
3097                 case 0xa: /* V9 fabsd */
3098                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3099                     break;
3100                 case 0xb: /* V9 fabsq */
3101                     CHECK_FPU_FEATURE(dc, FLOAT128);
3102                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3103                     break;
3104                 case 0x81: /* V9 fstox */
3105                     gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3106                     break;
3107                 case 0x82: /* V9 fdtox */
3108                     gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3109                     break;
3110                 case 0x83: /* V9 fqtox */
3111                     CHECK_FPU_FEATURE(dc, FLOAT128);
3112                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3113                     break;
3114                 case 0x84: /* V9 fxtos */
3115                     gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3116                     break;
3117                 case 0x88: /* V9 fxtod */
3118                     gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3119                     break;
3120                 case 0x8c: /* V9 fxtoq */
3121                     CHECK_FPU_FEATURE(dc, FLOAT128);
3122                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3123                     break;
3124 #endif
3125                 default:
3126                     goto illegal_insn;
3127                 }
3128             } else if (xop == 0x35) {   /* FPU Operations */
3129 #ifdef TARGET_SPARC64
3130                 int cond;
3131 #endif
3132                 if (gen_trap_ifnofpu(dc)) {
3133                     goto jmp_insn;
3134                 }
3135                 gen_op_clear_ieee_excp_and_FTT();
3136                 rs1 = GET_FIELD(insn, 13, 17);
3137                 rs2 = GET_FIELD(insn, 27, 31);
3138                 xop = GET_FIELD(insn, 18, 26);
3139                 save_state(dc);
3140
3141 #ifdef TARGET_SPARC64
3142 #define FMOVR(sz)                                                  \
3143                 do {                                               \
3144                     DisasCompare cmp;                              \
3145                     cond = GET_FIELD_SP(insn, 10, 12);             \
3146                     cpu_src1 = get_src1(dc, insn);                 \
3147                     gen_compare_reg(&cmp, cond, cpu_src1);         \
3148                     gen_fmov##sz(dc, &cmp, rd, rs2);               \
3149                     free_compare(&cmp);                            \
3150                 } while (0)
3151
3152                 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3153                     FMOVR(s);
3154                     break;
3155                 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3156                     FMOVR(d);
3157                     break;
3158                 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3159                     CHECK_FPU_FEATURE(dc, FLOAT128);
3160                     FMOVR(q);
3161                     break;
3162                 }
3163 #undef FMOVR
3164 #endif
3165                 switch (xop) {
3166 #ifdef TARGET_SPARC64
3167 #define FMOVCC(fcc, sz)                                                 \
3168                     do {                                                \
3169                         DisasCompare cmp;                               \
3170                         cond = GET_FIELD_SP(insn, 14, 17);              \
3171                         gen_fcompare(&cmp, fcc, cond);                  \
3172                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3173                         free_compare(&cmp);                             \
3174                     } while (0)
3175
3176                     case 0x001: /* V9 fmovscc %fcc0 */
3177                         FMOVCC(0, s);
3178                         break;
3179                     case 0x002: /* V9 fmovdcc %fcc0 */
3180                         FMOVCC(0, d);
3181                         break;
3182                     case 0x003: /* V9 fmovqcc %fcc0 */
3183                         CHECK_FPU_FEATURE(dc, FLOAT128);
3184                         FMOVCC(0, q);
3185                         break;
3186                     case 0x041: /* V9 fmovscc %fcc1 */
3187                         FMOVCC(1, s);
3188                         break;
3189                     case 0x042: /* V9 fmovdcc %fcc1 */
3190                         FMOVCC(1, d);
3191                         break;
3192                     case 0x043: /* V9 fmovqcc %fcc1 */
3193                         CHECK_FPU_FEATURE(dc, FLOAT128);
3194                         FMOVCC(1, q);
3195                         break;
3196                     case 0x081: /* V9 fmovscc %fcc2 */
3197                         FMOVCC(2, s);
3198                         break;
3199                     case 0x082: /* V9 fmovdcc %fcc2 */
3200                         FMOVCC(2, d);
3201                         break;
3202                     case 0x083: /* V9 fmovqcc %fcc2 */
3203                         CHECK_FPU_FEATURE(dc, FLOAT128);
3204                         FMOVCC(2, q);
3205                         break;
3206                     case 0x0c1: /* V9 fmovscc %fcc3 */
3207                         FMOVCC(3, s);
3208                         break;
3209                     case 0x0c2: /* V9 fmovdcc %fcc3 */
3210                         FMOVCC(3, d);
3211                         break;
3212                     case 0x0c3: /* V9 fmovqcc %fcc3 */
3213                         CHECK_FPU_FEATURE(dc, FLOAT128);
3214                         FMOVCC(3, q);
3215                         break;
3216 #undef FMOVCC
3217 #define FMOVCC(xcc, sz)                                                 \
3218                     do {                                                \
3219                         DisasCompare cmp;                               \
3220                         cond = GET_FIELD_SP(insn, 14, 17);              \
3221                         gen_compare(&cmp, xcc, cond, dc);               \
3222                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3223                         free_compare(&cmp);                             \
3224                     } while (0)
3225
3226                     case 0x101: /* V9 fmovscc %icc */
3227                         FMOVCC(0, s);
3228                         break;
3229                     case 0x102: /* V9 fmovdcc %icc */
3230                         FMOVCC(0, d);
3231                         break;
3232                     case 0x103: /* V9 fmovqcc %icc */
3233                         CHECK_FPU_FEATURE(dc, FLOAT128);
3234                         FMOVCC(0, q);
3235                         break;
3236                     case 0x181: /* V9 fmovscc %xcc */
3237                         FMOVCC(1, s);
3238                         break;
3239                     case 0x182: /* V9 fmovdcc %xcc */
3240                         FMOVCC(1, d);
3241                         break;
3242                     case 0x183: /* V9 fmovqcc %xcc */
3243                         CHECK_FPU_FEATURE(dc, FLOAT128);
3244                         FMOVCC(1, q);
3245                         break;
3246 #undef FMOVCC
3247 #endif
3248                     case 0x51: /* fcmps, V9 %fcc */
3249                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3250                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3251                         gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3252                         break;
3253                     case 0x52: /* fcmpd, V9 %fcc */
3254                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3255                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3256                         gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3257                         break;
3258                     case 0x53: /* fcmpq, V9 %fcc */
3259                         CHECK_FPU_FEATURE(dc, FLOAT128);
3260                         gen_op_load_fpr_QT0(QFPREG(rs1));
3261                         gen_op_load_fpr_QT1(QFPREG(rs2));
3262                         gen_op_fcmpq(rd & 3);
3263                         break;
3264                     case 0x55: /* fcmpes, V9 %fcc */
3265                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3266                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3267                         gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3268                         break;
3269                     case 0x56: /* fcmped, V9 %fcc */
3270                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3271                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3272                         gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3273                         break;
3274                     case 0x57: /* fcmpeq, V9 %fcc */
3275                         CHECK_FPU_FEATURE(dc, FLOAT128);
3276                         gen_op_load_fpr_QT0(QFPREG(rs1));
3277                         gen_op_load_fpr_QT1(QFPREG(rs2));
3278                         gen_op_fcmpeq(rd & 3);
3279                         break;
3280                     default:
3281                         goto illegal_insn;
3282                 }
3283             } else if (xop == 0x2) {
3284                 TCGv dst = gen_dest_gpr(dc, rd);
3285                 rs1 = GET_FIELD(insn, 13, 17);
3286                 if (rs1 == 0) {
3287                     /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3288                     if (IS_IMM) {       /* immediate */
3289                         simm = GET_FIELDs(insn, 19, 31);
3290                         tcg_gen_movi_tl(dst, simm);
3291                         gen_store_gpr(dc, rd, dst);
3292                     } else {            /* register */
3293                         rs2 = GET_FIELD(insn, 27, 31);
3294                         if (rs2 == 0) {
3295                             tcg_gen_movi_tl(dst, 0);
3296                             gen_store_gpr(dc, rd, dst);
3297                         } else {
3298                             cpu_src2 = gen_load_gpr(dc, rs2);
3299                             gen_store_gpr(dc, rd, cpu_src2);
3300                         }
3301                     }
3302                 } else {
3303                     cpu_src1 = get_src1(dc, insn);
3304                     if (IS_IMM) {       /* immediate */
3305                         simm = GET_FIELDs(insn, 19, 31);
3306                         tcg_gen_ori_tl(dst, cpu_src1, simm);
3307                         gen_store_gpr(dc, rd, dst);
3308                     } else {            /* register */
3309                         rs2 = GET_FIELD(insn, 27, 31);
3310                         if (rs2 == 0) {
3311                             /* mov shortcut:  or x, %g0, y -> mov x, y */
3312                             gen_store_gpr(dc, rd, cpu_src1);
3313                         } else {
3314                             cpu_src2 = gen_load_gpr(dc, rs2);
3315                             tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
3316                             gen_store_gpr(dc, rd, dst);
3317                         }
3318                     }
3319                 }
3320 #ifdef TARGET_SPARC64
3321             } else if (xop == 0x25) { /* sll, V9 sllx */
3322                 cpu_src1 = get_src1(dc, insn);
3323                 if (IS_IMM) {   /* immediate */
3324                     simm = GET_FIELDs(insn, 20, 31);
3325                     if (insn & (1 << 12)) {
3326                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3327                     } else {
3328                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3329                     }
3330                 } else {                /* register */
3331                     rs2 = GET_FIELD(insn, 27, 31);
3332                     cpu_src2 = gen_load_gpr(dc, rs2);
3333                     cpu_tmp0 = get_temp_tl(dc);
3334                     if (insn & (1 << 12)) {
3335                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3336                     } else {
3337                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3338                     }
3339                     tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3340                 }
3341                 gen_store_gpr(dc, rd, cpu_dst);
3342             } else if (xop == 0x26) { /* srl, V9 srlx */
3343                 cpu_src1 = get_src1(dc, insn);
3344                 if (IS_IMM) {   /* immediate */
3345                     simm = GET_FIELDs(insn, 20, 31);
3346                     if (insn & (1 << 12)) {
3347                         tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3348                     } else {
3349                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3350                         tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3351                     }
3352                 } else {                /* register */
3353                     rs2 = GET_FIELD(insn, 27, 31);
3354                     cpu_src2 = gen_load_gpr(dc, rs2);
3355                     cpu_tmp0 = get_temp_tl(dc);
3356                     if (insn & (1 << 12)) {
3357                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3358                         tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3359                     } else {
3360                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3361                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3362                         tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3363                     }
3364                 }
3365                 gen_store_gpr(dc, rd, cpu_dst);
3366             } else if (xop == 0x27) { /* sra, V9 srax */
3367                 cpu_src1 = get_src1(dc, insn);
3368                 if (IS_IMM) {   /* immediate */
3369                     simm = GET_FIELDs(insn, 20, 31);
3370                     if (insn & (1 << 12)) {
3371                         tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3372                     } else {
3373                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3374                         tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3375                     }
3376                 } else {                /* register */
3377                     rs2 = GET_FIELD(insn, 27, 31);
3378                     cpu_src2 = gen_load_gpr(dc, rs2);
3379                     cpu_tmp0 = get_temp_tl(dc);
3380                     if (insn & (1 << 12)) {
3381                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3382                         tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3383                     } else {
3384                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3385                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3386                         tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3387                     }
3388                 }
3389                 gen_store_gpr(dc, rd, cpu_dst);
3390 #endif
3391             } else if (xop < 0x36) {
3392                 if (xop < 0x20) {
3393                     cpu_src1 = get_src1(dc, insn);
3394                     cpu_src2 = get_src2(dc, insn);
3395                     switch (xop & ~0x10) {
3396                     case 0x0: /* add */
3397                         if (xop & 0x10) {
3398                             gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3399                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3400                             dc->cc_op = CC_OP_ADD;
3401                         } else {
3402                             tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3403                         }
3404                         break;
3405                     case 0x1: /* and */
3406                         tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3407                         if (xop & 0x10) {
3408                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3409                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3410                             dc->cc_op = CC_OP_LOGIC;
3411                         }
3412                         break;
3413                     case 0x2: /* or */
3414                         tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3415                         if (xop & 0x10) {
3416                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3417                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3418                             dc->cc_op = CC_OP_LOGIC;
3419                         }
3420                         break;
3421                     case 0x3: /* xor */
3422                         tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3423                         if (xop & 0x10) {
3424                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3425                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3426                             dc->cc_op = CC_OP_LOGIC;
3427                         }
3428                         break;
3429                     case 0x4: /* sub */
3430                         if (xop & 0x10) {
3431                             gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3432                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3433                             dc->cc_op = CC_OP_SUB;
3434                         } else {
3435                             tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3436                         }
3437                         break;
3438                     case 0x5: /* andn */
3439                         tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3440                         if (xop & 0x10) {
3441                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3442                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3443                             dc->cc_op = CC_OP_LOGIC;
3444                         }
3445                         break;
3446                     case 0x6: /* orn */
3447                         tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3448                         if (xop & 0x10) {
3449                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3450                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3451                             dc->cc_op = CC_OP_LOGIC;
3452                         }
3453                         break;
3454                     case 0x7: /* xorn */
3455                         tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
3456                         if (xop & 0x10) {
3457                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3458                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3459                             dc->cc_op = CC_OP_LOGIC;
3460                         }
3461                         break;
3462                     case 0x8: /* addx, V9 addc */
3463                         gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3464                                         (xop & 0x10));
3465                         break;
3466 #ifdef TARGET_SPARC64
3467                     case 0x9: /* V9 mulx */
3468                         tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3469                         break;
3470 #endif
3471                     case 0xa: /* umul */
3472                         CHECK_IU_FEATURE(dc, MUL);
3473                         gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3474                         if (xop & 0x10) {
3475                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3476                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3477                             dc->cc_op = CC_OP_LOGIC;
3478                         }
3479                         break;
3480                     case 0xb: /* smul */
3481                         CHECK_IU_FEATURE(dc, MUL);
3482                         gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3483                         if (xop & 0x10) {
3484                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3485                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3486                             dc->cc_op = CC_OP_LOGIC;
3487                         }
3488                         break;
3489                     case 0xc: /* subx, V9 subc */
3490                         gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3491                                         (xop & 0x10));
3492                         break;
3493 #ifdef TARGET_SPARC64
3494                     case 0xd: /* V9 udivx */
3495                         gen_helper_udivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3496                         break;
3497 #endif
3498                     case 0xe: /* udiv */
3499                         CHECK_IU_FEATURE(dc, DIV);
3500                         if (xop & 0x10) {
3501                             gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
3502                                                cpu_src2);
3503                             dc->cc_op = CC_OP_DIV;
3504                         } else {
3505                             gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
3506                                             cpu_src2);
3507                         }
3508                         break;
3509                     case 0xf: /* sdiv */
3510                         CHECK_IU_FEATURE(dc, DIV);
3511                         if (xop & 0x10) {
3512                             gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
3513                                                cpu_src2);
3514                             dc->cc_op = CC_OP_DIV;
3515                         } else {
3516                             gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
3517                                             cpu_src2);
3518                         }
3519                         break;
3520                     default:
3521                         goto illegal_insn;
3522                     }
3523                     gen_store_gpr(dc, rd, cpu_dst);
3524                 } else {
3525                     cpu_src1 = get_src1(dc, insn);
3526                     cpu_src2 = get_src2(dc, insn);
3527                     switch (xop) {
3528                     case 0x20: /* taddcc */
3529                         gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3530                         gen_store_gpr(dc, rd, cpu_dst);
3531                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3532                         dc->cc_op = CC_OP_TADD;
3533                         break;
3534                     case 0x21: /* tsubcc */
3535                         gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3536                         gen_store_gpr(dc, rd, cpu_dst);
3537                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3538                         dc->cc_op = CC_OP_TSUB;
3539                         break;
3540                     case 0x22: /* taddcctv */
3541                         gen_helper_taddcctv(cpu_dst, cpu_env,
3542                                             cpu_src1, cpu_src2);
3543                         gen_store_gpr(dc, rd, cpu_dst);
3544                         dc->cc_op = CC_OP_TADDTV;
3545                         break;
3546                     case 0x23: /* tsubcctv */
3547                         gen_helper_tsubcctv(cpu_dst, cpu_env,
3548                                             cpu_src1, cpu_src2);
3549                         gen_store_gpr(dc, rd, cpu_dst);
3550                         dc->cc_op = CC_OP_TSUBTV;
3551                         break;
3552                     case 0x24: /* mulscc */
3553                         update_psr(dc);
3554                         gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3555                         gen_store_gpr(dc, rd, cpu_dst);
3556                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3557                         dc->cc_op = CC_OP_ADD;
3558                         break;
3559 #ifndef TARGET_SPARC64
3560                     case 0x25:  /* sll */
3561                         if (IS_IMM) { /* immediate */
3562                             simm = GET_FIELDs(insn, 20, 31);
3563                             tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3564                         } else { /* register */
3565                             cpu_tmp0 = get_temp_tl(dc);
3566                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3567                             tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3568                         }
3569                         gen_store_gpr(dc, rd, cpu_dst);
3570                         break;
3571                     case 0x26:  /* srl */
3572                         if (IS_IMM) { /* immediate */
3573                             simm = GET_FIELDs(insn, 20, 31);
3574                             tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3575                         } else { /* register */
3576                             cpu_tmp0 = get_temp_tl(dc);
3577                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3578                             tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3579                         }
3580                         gen_store_gpr(dc, rd, cpu_dst);
3581                         break;
3582                     case 0x27:  /* sra */
3583                         if (IS_IMM) { /* immediate */
3584                             simm = GET_FIELDs(insn, 20, 31);
3585                             tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3586                         } else { /* register */
3587                             cpu_tmp0 = get_temp_tl(dc);
3588                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3589                             tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3590                         }
3591                         gen_store_gpr(dc, rd, cpu_dst);
3592                         break;
3593 #endif
3594                     case 0x30:
3595                         {
3596                             cpu_tmp0 = get_temp_tl(dc);
3597                             switch(rd) {
3598                             case 0: /* wry */
3599                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3600                                 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3601                                 break;
3602 #ifndef TARGET_SPARC64
3603                             case 0x01 ... 0x0f: /* undefined in the
3604                                                    SPARCv8 manual, nop
3605                                                    on the microSPARC
3606                                                    II */
3607                             case 0x10 ... 0x1f: /* implementation-dependent
3608                                                    in the SPARCv8
3609                                                    manual, nop on the
3610                                                    microSPARC II */
3611                                 if ((rd == 0x13) && (dc->def->features &
3612                                                      CPU_FEATURE_POWERDOWN)) {
3613                                     /* LEON3 power-down */
3614                                     save_state(dc);
3615                                     gen_helper_power_down(cpu_env);
3616                                 }
3617                                 break;
3618 #else
3619                             case 0x2: /* V9 wrccr */
3620                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3621                                 gen_helper_wrccr(cpu_env, cpu_tmp0);
3622                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3623                                 dc->cc_op = CC_OP_FLAGS;
3624                                 break;
3625                             case 0x3: /* V9 wrasi */
3626                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3627                                 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xff);
3628                                 tcg_gen_trunc_tl_i32(cpu_asi, cpu_tmp0);
3629                                 break;
3630                             case 0x6: /* V9 wrfprs */
3631                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3632                                 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_tmp0);
3633                                 save_state(dc);
3634                                 gen_op_next_insn();
3635                                 tcg_gen_exit_tb(0);
3636                                 dc->is_br = 1;
3637                                 break;
3638                             case 0xf: /* V9 sir, nop if user */
3639 #if !defined(CONFIG_USER_ONLY)
3640                                 if (supervisor(dc)) {
3641                                     ; // XXX
3642                                 }
3643 #endif
3644                                 break;
3645                             case 0x13: /* Graphics Status */
3646                                 if (gen_trap_ifnofpu(dc)) {
3647                                     goto jmp_insn;
3648                                 }
3649                                 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3650                                 break;
3651                             case 0x14: /* Softint set */
3652                                 if (!supervisor(dc))
3653                                     goto illegal_insn;
3654                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3655                                 gen_helper_set_softint(cpu_env, cpu_tmp0);
3656                                 break;
3657                             case 0x15: /* Softint clear */
3658                                 if (!supervisor(dc))
3659                                     goto illegal_insn;
3660                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3661                                 gen_helper_clear_softint(cpu_env, cpu_tmp0);
3662                                 break;
3663                             case 0x16: /* Softint write */
3664                                 if (!supervisor(dc))
3665                                     goto illegal_insn;
3666                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3667                                 gen_helper_write_softint(cpu_env, cpu_tmp0);
3668                                 break;
3669                             case 0x17: /* Tick compare */
3670 #if !defined(CONFIG_USER_ONLY)
3671                                 if (!supervisor(dc))
3672                                     goto illegal_insn;
3673 #endif
3674                                 {
3675                                     TCGv_ptr r_tickptr;
3676
3677                                     tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3678                                                    cpu_src2);
3679                                     r_tickptr = tcg_temp_new_ptr();
3680                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3681                                                    offsetof(CPUSPARCState, tick));
3682                                     gen_helper_tick_set_limit(r_tickptr,
3683                                                               cpu_tick_cmpr);
3684                                     tcg_temp_free_ptr(r_tickptr);
3685                                 }
3686                                 break;
3687                             case 0x18: /* System tick */
3688 #if !defined(CONFIG_USER_ONLY)
3689                                 if (!supervisor(dc))
3690                                     goto illegal_insn;
3691 #endif
3692                                 {
3693                                     TCGv_ptr r_tickptr;
3694
3695                                     tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
3696                                                    cpu_src2);
3697                                     r_tickptr = tcg_temp_new_ptr();
3698                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3699                                                    offsetof(CPUSPARCState, stick));
3700                                     gen_helper_tick_set_count(r_tickptr,
3701                                                               cpu_tmp0);
3702                                     tcg_temp_free_ptr(r_tickptr);
3703                                 }
3704                                 break;
3705                             case 0x19: /* System tick compare */
3706 #if !defined(CONFIG_USER_ONLY)
3707                                 if (!supervisor(dc))
3708                                     goto illegal_insn;
3709 #endif
3710                                 {
3711                                     TCGv_ptr r_tickptr;
3712
3713                                     tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3714                                                    cpu_src2);
3715                                     r_tickptr = tcg_temp_new_ptr();
3716                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3717                                                    offsetof(CPUSPARCState, stick));
3718                                     gen_helper_tick_set_limit(r_tickptr,
3719                                                               cpu_stick_cmpr);
3720                                     tcg_temp_free_ptr(r_tickptr);
3721                                 }
3722                                 break;
3723
3724                             case 0x10: /* Performance Control */
3725                             case 0x11: /* Performance Instrumentation
3726                                           Counter */
3727                             case 0x12: /* Dispatch Control */
3728 #endif
3729                             default:
3730                                 goto illegal_insn;
3731                             }
3732                         }
3733                         break;
3734 #if !defined(CONFIG_USER_ONLY)
3735                     case 0x31: /* wrpsr, V9 saved, restored */
3736                         {
3737                             if (!supervisor(dc))
3738                                 goto priv_insn;
3739 #ifdef TARGET_SPARC64
3740                             switch (rd) {
3741                             case 0:
3742                                 gen_helper_saved(cpu_env);
3743                                 break;
3744                             case 1:
3745                                 gen_helper_restored(cpu_env);
3746                                 break;
3747                             case 2: /* UA2005 allclean */
3748                             case 3: /* UA2005 otherw */
3749                             case 4: /* UA2005 normalw */
3750                             case 5: /* UA2005 invalw */
3751                                 // XXX
3752                             default:
3753                                 goto illegal_insn;
3754                             }
3755 #else
3756                             cpu_tmp0 = get_temp_tl(dc);
3757                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3758                             gen_helper_wrpsr(cpu_env, cpu_tmp0);
3759                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3760                             dc->cc_op = CC_OP_FLAGS;
3761                             save_state(dc);
3762                             gen_op_next_insn();
3763                             tcg_gen_exit_tb(0);
3764                             dc->is_br = 1;
3765 #endif
3766                         }
3767                         break;
3768                     case 0x32: /* wrwim, V9 wrpr */
3769                         {
3770                             if (!supervisor(dc))
3771                                 goto priv_insn;
3772                             cpu_tmp0 = get_temp_tl(dc);
3773                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3774 #ifdef TARGET_SPARC64
3775                             switch (rd) {
3776                             case 0: // tpc
3777                                 {
3778                                     TCGv_ptr r_tsptr;
3779
3780                                     r_tsptr = tcg_temp_new_ptr();
3781                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3782                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3783                                                   offsetof(trap_state, tpc));
3784                                     tcg_temp_free_ptr(r_tsptr);
3785                                 }
3786                                 break;
3787                             case 1: // tnpc
3788                                 {
3789                                     TCGv_ptr r_tsptr;
3790
3791                                     r_tsptr = tcg_temp_new_ptr();
3792                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3793                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3794                                                   offsetof(trap_state, tnpc));
3795                                     tcg_temp_free_ptr(r_tsptr);
3796                                 }
3797                                 break;
3798                             case 2: // tstate
3799                                 {
3800                                     TCGv_ptr r_tsptr;
3801
3802                                     r_tsptr = tcg_temp_new_ptr();
3803                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3804                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3805                                                   offsetof(trap_state,
3806                                                            tstate));
3807                                     tcg_temp_free_ptr(r_tsptr);
3808                                 }
3809                                 break;
3810                             case 3: // tt
3811                                 {
3812                                     TCGv_ptr r_tsptr;
3813
3814                                     r_tsptr = tcg_temp_new_ptr();
3815                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3816                                     tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
3817                                                     offsetof(trap_state, tt));
3818                                     tcg_temp_free_ptr(r_tsptr);
3819                                 }
3820                                 break;
3821                             case 4: // tick
3822                                 {
3823                                     TCGv_ptr r_tickptr;
3824
3825                                     r_tickptr = tcg_temp_new_ptr();
3826                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3827                                                    offsetof(CPUSPARCState, tick));
3828                                     gen_helper_tick_set_count(r_tickptr,
3829                                                               cpu_tmp0);
3830                                     tcg_temp_free_ptr(r_tickptr);
3831                                 }
3832                                 break;
3833                             case 5: // tba
3834                                 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3835                                 break;
3836                             case 6: // pstate
3837                                 save_state(dc);
3838                                 gen_helper_wrpstate(cpu_env, cpu_tmp0);
3839                                 dc->npc = DYNAMIC_PC;
3840                                 break;
3841                             case 7: // tl
3842                                 save_state(dc);
3843                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3844                                                offsetof(CPUSPARCState, tl));
3845                                 dc->npc = DYNAMIC_PC;
3846                                 break;
3847                             case 8: // pil
3848                                 gen_helper_wrpil(cpu_env, cpu_tmp0);
3849                                 break;
3850                             case 9: // cwp
3851                                 gen_helper_wrcwp(cpu_env, cpu_tmp0);
3852                                 break;
3853                             case 10: // cansave
3854                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3855                                                 offsetof(CPUSPARCState,
3856                                                          cansave));
3857                                 break;
3858                             case 11: // canrestore
3859                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3860                                                 offsetof(CPUSPARCState,
3861                                                          canrestore));
3862                                 break;
3863                             case 12: // cleanwin
3864                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3865                                                 offsetof(CPUSPARCState,
3866                                                          cleanwin));
3867                                 break;
3868                             case 13: // otherwin
3869                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3870                                                 offsetof(CPUSPARCState,
3871                                                          otherwin));
3872                                 break;
3873                             case 14: // wstate
3874                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3875                                                 offsetof(CPUSPARCState,
3876                                                          wstate));
3877                                 break;
3878                             case 16: // UA2005 gl
3879                                 CHECK_IU_FEATURE(dc, GL);
3880                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3881                                                 offsetof(CPUSPARCState, gl));
3882                                 break;
3883                             case 26: // UA2005 strand status
3884                                 CHECK_IU_FEATURE(dc, HYPV);
3885                                 if (!hypervisor(dc))
3886                                     goto priv_insn;
3887                                 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3888                                 break;
3889                             default:
3890                                 goto illegal_insn;
3891                             }
3892 #else
3893                             tcg_gen_trunc_tl_i32(cpu_wim, cpu_tmp0);
3894                             if (dc->def->nwindows != 32) {
3895                                 tcg_gen_andi_tl(cpu_wim, cpu_wim,
3896                                                 (1 << dc->def->nwindows) - 1);
3897                             }
3898 #endif
3899                         }
3900                         break;
3901                     case 0x33: /* wrtbr, UA2005 wrhpr */
3902                         {
3903 #ifndef TARGET_SPARC64
3904                             if (!supervisor(dc))
3905                                 goto priv_insn;
3906                             tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3907 #else
3908                             CHECK_IU_FEATURE(dc, HYPV);
3909                             if (!hypervisor(dc))
3910                                 goto priv_insn;
3911                             cpu_tmp0 = get_temp_tl(dc);
3912                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3913                             switch (rd) {
3914                             case 0: // hpstate
3915                                 // XXX gen_op_wrhpstate();
3916                                 save_state(dc);
3917                                 gen_op_next_insn();
3918                                 tcg_gen_exit_tb(0);
3919                                 dc->is_br = 1;
3920                                 break;
3921                             case 1: // htstate
3922                                 // XXX gen_op_wrhtstate();
3923                                 break;
3924                             case 3: // hintp
3925                                 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3926                                 break;
3927                             case 5: // htba
3928                                 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3929                                 break;
3930                             case 31: // hstick_cmpr
3931                                 {
3932                                     TCGv_ptr r_tickptr;
3933
3934                                     tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3935                                     r_tickptr = tcg_temp_new_ptr();
3936                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3937                                                    offsetof(CPUSPARCState, hstick));
3938                                     gen_helper_tick_set_limit(r_tickptr,
3939                                                               cpu_hstick_cmpr);
3940                                     tcg_temp_free_ptr(r_tickptr);
3941                                 }
3942                                 break;
3943                             case 6: // hver readonly
3944                             default:
3945                                 goto illegal_insn;
3946                             }
3947 #endif
3948                         }
3949                         break;
3950 #endif
3951 #ifdef TARGET_SPARC64
3952                     case 0x2c: /* V9 movcc */
3953                         {
3954                             int cc = GET_FIELD_SP(insn, 11, 12);
3955                             int cond = GET_FIELD_SP(insn, 14, 17);
3956                             DisasCompare cmp;
3957                             TCGv dst;
3958
3959                             if (insn & (1 << 18)) {
3960                                 if (cc == 0) {
3961                                     gen_compare(&cmp, 0, cond, dc);
3962                                 } else if (cc == 2) {
3963                                     gen_compare(&cmp, 1, cond, dc);
3964                                 } else {
3965                                     goto illegal_insn;
3966                                 }
3967                             } else {
3968                                 gen_fcompare(&cmp, cc, cond);
3969                             }
3970
3971                             /* The get_src2 above loaded the normal 13-bit
3972                                immediate field, not the 11-bit field we have
3973                                in movcc.  But it did handle the reg case.  */
3974                             if (IS_IMM) {
3975                                 simm = GET_FIELD_SPs(insn, 0, 10);
3976                                 tcg_gen_movi_tl(cpu_src2, simm);
3977                             }
3978
3979                             dst = gen_load_gpr(dc, rd);
3980                             tcg_gen_movcond_tl(cmp.cond, dst,
3981                                                cmp.c1, cmp.c2,
3982                                                cpu_src2, dst);
3983                             free_compare(&cmp);
3984                             gen_store_gpr(dc, rd, dst);
3985                             break;
3986                         }
3987                     case 0x2d: /* V9 sdivx */
3988                         gen_helper_sdivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3989                         gen_store_gpr(dc, rd, cpu_dst);
3990                         break;
3991                     case 0x2e: /* V9 popc */
3992                         gen_helper_popc(cpu_dst, cpu_src2);
3993                         gen_store_gpr(dc, rd, cpu_dst);
3994                         break;
3995                     case 0x2f: /* V9 movr */
3996                         {
3997                             int cond = GET_FIELD_SP(insn, 10, 12);
3998                             DisasCompare cmp;
3999                             TCGv dst;
4000
4001                             gen_compare_reg(&cmp, cond, cpu_src1);
4002
4003                             /* The get_src2 above loaded the normal 13-bit
4004                                immediate field, not the 10-bit field we have
4005                                in movr.  But it did handle the reg case.  */
4006                             if (IS_IMM) {
4007                                 simm = GET_FIELD_SPs(insn, 0, 9);
4008                                 tcg_gen_movi_tl(cpu_src2, simm);
4009                             }
4010
4011                             dst = gen_load_gpr(dc, rd);
4012                             tcg_gen_movcond_tl(cmp.cond, dst,
4013                                                cmp.c1, cmp.c2,
4014                                                cpu_src2, dst);
4015                             free_compare(&cmp);
4016                             gen_store_gpr(dc, rd, dst);
4017                             break;
4018                         }
4019 #endif
4020                     default:
4021                         goto illegal_insn;
4022                     }
4023                 }
4024             } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4025 #ifdef TARGET_SPARC64
4026                 int opf = GET_FIELD_SP(insn, 5, 13);
4027                 rs1 = GET_FIELD(insn, 13, 17);
4028                 rs2 = GET_FIELD(insn, 27, 31);
4029                 if (gen_trap_ifnofpu(dc)) {
4030                     goto jmp_insn;
4031                 }
4032
4033                 switch (opf) {
4034                 case 0x000: /* VIS I edge8cc */
4035                     CHECK_FPU_FEATURE(dc, VIS1);
4036                     cpu_src1 = gen_load_gpr(dc, rs1);
4037                     cpu_src2 = gen_load_gpr(dc, rs2);
4038                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4039                     gen_store_gpr(dc, rd, cpu_dst);
4040                     break;
4041                 case 0x001: /* VIS II edge8n */
4042                     CHECK_FPU_FEATURE(dc, VIS2);
4043                     cpu_src1 = gen_load_gpr(dc, rs1);
4044                     cpu_src2 = gen_load_gpr(dc, rs2);
4045                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4046                     gen_store_gpr(dc, rd, cpu_dst);
4047                     break;
4048                 case 0x002: /* VIS I edge8lcc */
4049                     CHECK_FPU_FEATURE(dc, VIS1);
4050                     cpu_src1 = gen_load_gpr(dc, rs1);
4051                     cpu_src2 = gen_load_gpr(dc, rs2);
4052                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4053                     gen_store_gpr(dc, rd, cpu_dst);
4054                     break;
4055                 case 0x003: /* VIS II edge8ln */
4056                     CHECK_FPU_FEATURE(dc, VIS2);
4057                     cpu_src1 = gen_load_gpr(dc, rs1);
4058                     cpu_src2 = gen_load_gpr(dc, rs2);
4059                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4060                     gen_store_gpr(dc, rd, cpu_dst);
4061                     break;
4062                 case 0x004: /* VIS I edge16cc */
4063                     CHECK_FPU_FEATURE(dc, VIS1);
4064                     cpu_src1 = gen_load_gpr(dc, rs1);
4065                     cpu_src2 = gen_load_gpr(dc, rs2);
4066                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4067                     gen_store_gpr(dc, rd, cpu_dst);
4068                     break;
4069                 case 0x005: /* VIS II edge16n */
4070                     CHECK_FPU_FEATURE(dc, VIS2);
4071                     cpu_src1 = gen_load_gpr(dc, rs1);
4072                     cpu_src2 = gen_load_gpr(dc, rs2);
4073                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4074                     gen_store_gpr(dc, rd, cpu_dst);
4075                     break;
4076                 case 0x006: /* VIS I edge16lcc */
4077                     CHECK_FPU_FEATURE(dc, VIS1);
4078                     cpu_src1 = gen_load_gpr(dc, rs1);
4079                     cpu_src2 = gen_load_gpr(dc, rs2);
4080                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4081                     gen_store_gpr(dc, rd, cpu_dst);
4082                     break;
4083                 case 0x007: /* VIS II edge16ln */
4084                     CHECK_FPU_FEATURE(dc, VIS2);
4085                     cpu_src1 = gen_load_gpr(dc, rs1);
4086                     cpu_src2 = gen_load_gpr(dc, rs2);
4087                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4088                     gen_store_gpr(dc, rd, cpu_dst);
4089                     break;
4090                 case 0x008: /* VIS I edge32cc */
4091                     CHECK_FPU_FEATURE(dc, VIS1);
4092                     cpu_src1 = gen_load_gpr(dc, rs1);
4093                     cpu_src2 = gen_load_gpr(dc, rs2);
4094                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4095                     gen_store_gpr(dc, rd, cpu_dst);
4096                     break;
4097                 case 0x009: /* VIS II edge32n */
4098                     CHECK_FPU_FEATURE(dc, VIS2);
4099                     cpu_src1 = gen_load_gpr(dc, rs1);
4100                     cpu_src2 = gen_load_gpr(dc, rs2);
4101                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4102                     gen_store_gpr(dc, rd, cpu_dst);
4103                     break;
4104                 case 0x00a: /* VIS I edge32lcc */
4105                     CHECK_FPU_FEATURE(dc, VIS1);
4106                     cpu_src1 = gen_load_gpr(dc, rs1);
4107                     cpu_src2 = gen_load_gpr(dc, rs2);
4108                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4109                     gen_store_gpr(dc, rd, cpu_dst);
4110                     break;
4111                 case 0x00b: /* VIS II edge32ln */
4112                     CHECK_FPU_FEATURE(dc, VIS2);
4113                     cpu_src1 = gen_load_gpr(dc, rs1);
4114                     cpu_src2 = gen_load_gpr(dc, rs2);
4115                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4116                     gen_store_gpr(dc, rd, cpu_dst);
4117                     break;
4118                 case 0x010: /* VIS I array8 */
4119                     CHECK_FPU_FEATURE(dc, VIS1);
4120                     cpu_src1 = gen_load_gpr(dc, rs1);
4121                     cpu_src2 = gen_load_gpr(dc, rs2);
4122                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4123                     gen_store_gpr(dc, rd, cpu_dst);
4124                     break;
4125                 case 0x012: /* VIS I array16 */
4126                     CHECK_FPU_FEATURE(dc, VIS1);
4127                     cpu_src1 = gen_load_gpr(dc, rs1);
4128                     cpu_src2 = gen_load_gpr(dc, rs2);
4129                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4130                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4131                     gen_store_gpr(dc, rd, cpu_dst);
4132                     break;
4133                 case 0x014: /* VIS I array32 */
4134                     CHECK_FPU_FEATURE(dc, VIS1);
4135                     cpu_src1 = gen_load_gpr(dc, rs1);
4136                     cpu_src2 = gen_load_gpr(dc, rs2);
4137                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4138                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4139                     gen_store_gpr(dc, rd, cpu_dst);
4140                     break;
4141                 case 0x018: /* VIS I alignaddr */
4142                     CHECK_FPU_FEATURE(dc, VIS1);
4143                     cpu_src1 = gen_load_gpr(dc, rs1);
4144                     cpu_src2 = gen_load_gpr(dc, rs2);
4145                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4146                     gen_store_gpr(dc, rd, cpu_dst);
4147                     break;
4148                 case 0x01a: /* VIS I alignaddrl */
4149                     CHECK_FPU_FEATURE(dc, VIS1);
4150                     cpu_src1 = gen_load_gpr(dc, rs1);
4151                     cpu_src2 = gen_load_gpr(dc, rs2);
4152                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4153                     gen_store_gpr(dc, rd, cpu_dst);
4154                     break;
4155                 case 0x019: /* VIS II bmask */
4156                     CHECK_FPU_FEATURE(dc, VIS2);
4157                     cpu_src1 = gen_load_gpr(dc, rs1);
4158                     cpu_src2 = gen_load_gpr(dc, rs2);
4159                     tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4160                     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4161                     gen_store_gpr(dc, rd, cpu_dst);
4162                     break;
4163                 case 0x020: /* VIS I fcmple16 */
4164                     CHECK_FPU_FEATURE(dc, VIS1);
4165                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4166                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4167                     gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4168                     gen_store_gpr(dc, rd, cpu_dst);
4169                     break;
4170                 case 0x022: /* VIS I fcmpne16 */
4171                     CHECK_FPU_FEATURE(dc, VIS1);
4172                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4173                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4174                     gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4175                     gen_store_gpr(dc, rd, cpu_dst);
4176                     break;
4177                 case 0x024: /* VIS I fcmple32 */
4178                     CHECK_FPU_FEATURE(dc, VIS1);
4179                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4180                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4181                     gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4182                     gen_store_gpr(dc, rd, cpu_dst);
4183                     break;
4184                 case 0x026: /* VIS I fcmpne32 */
4185                     CHECK_FPU_FEATURE(dc, VIS1);
4186                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4187                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4188                     gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4189                     gen_store_gpr(dc, rd, cpu_dst);
4190                     break;
4191                 case 0x028: /* VIS I fcmpgt16 */
4192                     CHECK_FPU_FEATURE(dc, VIS1);
4193                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4194                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4195                     gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4196                     gen_store_gpr(dc, rd, cpu_dst);
4197                     break;
4198                 case 0x02a: /* VIS I fcmpeq16 */
4199                     CHECK_FPU_FEATURE(dc, VIS1);
4200                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4201                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4202                     gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4203                     gen_store_gpr(dc, rd, cpu_dst);
4204                     break;
4205                 case 0x02c: /* VIS I fcmpgt32 */
4206                     CHECK_FPU_FEATURE(dc, VIS1);
4207                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4208                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4209                     gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4210                     gen_store_gpr(dc, rd, cpu_dst);
4211                     break;
4212                 case 0x02e: /* VIS I fcmpeq32 */
4213                     CHECK_FPU_FEATURE(dc, VIS1);
4214                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4215                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4216                     gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4217                     gen_store_gpr(dc, rd, cpu_dst);
4218                     break;
4219                 case 0x031: /* VIS I fmul8x16 */
4220                     CHECK_FPU_FEATURE(dc, VIS1);
4221                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4222                     break;
4223                 case 0x033: /* VIS I fmul8x16au */
4224                     CHECK_FPU_FEATURE(dc, VIS1);
4225                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4226                     break;
4227                 case 0x035: /* VIS I fmul8x16al */
4228                     CHECK_FPU_FEATURE(dc, VIS1);
4229                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4230                     break;
4231                 case 0x036: /* VIS I fmul8sux16 */
4232                     CHECK_FPU_FEATURE(dc, VIS1);
4233                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4234                     break;
4235                 case 0x037: /* VIS I fmul8ulx16 */
4236                     CHECK_FPU_FEATURE(dc, VIS1);
4237                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4238                     break;
4239                 case 0x038: /* VIS I fmuld8sux16 */
4240                     CHECK_FPU_FEATURE(dc, VIS1);
4241                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4242                     break;
4243                 case 0x039: /* VIS I fmuld8ulx16 */
4244                     CHECK_FPU_FEATURE(dc, VIS1);
4245                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4246                     break;
4247                 case 0x03a: /* VIS I fpack32 */
4248                     CHECK_FPU_FEATURE(dc, VIS1);
4249                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4250                     break;
4251                 case 0x03b: /* VIS I fpack16 */
4252                     CHECK_FPU_FEATURE(dc, VIS1);
4253                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4254                     cpu_dst_32 = gen_dest_fpr_F(dc);
4255                     gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4256                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4257                     break;
4258                 case 0x03d: /* VIS I fpackfix */
4259                     CHECK_FPU_FEATURE(dc, VIS1);
4260                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4261                     cpu_dst_32 = gen_dest_fpr_F(dc);
4262                     gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4263                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4264                     break;
4265                 case 0x03e: /* VIS I pdist */
4266                     CHECK_FPU_FEATURE(dc, VIS1);
4267                     gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4268                     break;
4269                 case 0x048: /* VIS I faligndata */
4270                     CHECK_FPU_FEATURE(dc, VIS1);
4271                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4272                     break;
4273                 case 0x04b: /* VIS I fpmerge */
4274                     CHECK_FPU_FEATURE(dc, VIS1);
4275                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4276                     break;
4277                 case 0x04c: /* VIS II bshuffle */
4278                     CHECK_FPU_FEATURE(dc, VIS2);
4279                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4280                     break;
4281                 case 0x04d: /* VIS I fexpand */
4282                     CHECK_FPU_FEATURE(dc, VIS1);
4283                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4284                     break;
4285                 case 0x050: /* VIS I fpadd16 */
4286                     CHECK_FPU_FEATURE(dc, VIS1);
4287                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4288                     break;
4289                 case 0x051: /* VIS I fpadd16s */
4290                     CHECK_FPU_FEATURE(dc, VIS1);
4291                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4292                     break;
4293                 case 0x052: /* VIS I fpadd32 */
4294                     CHECK_FPU_FEATURE(dc, VIS1);
4295                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4296                     break;
4297                 case 0x053: /* VIS I fpadd32s */
4298                     CHECK_FPU_FEATURE(dc, VIS1);
4299                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4300                     break;
4301                 case 0x054: /* VIS I fpsub16 */
4302                     CHECK_FPU_FEATURE(dc, VIS1);
4303                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4304                     break;
4305                 case 0x055: /* VIS I fpsub16s */
4306                     CHECK_FPU_FEATURE(dc, VIS1);
4307                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4308                     break;
4309                 case 0x056: /* VIS I fpsub32 */
4310                     CHECK_FPU_FEATURE(dc, VIS1);
4311                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4312                     break;
4313                 case 0x057: /* VIS I fpsub32s */
4314                     CHECK_FPU_FEATURE(dc, VIS1);
4315                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4316                     break;
4317                 case 0x060: /* VIS I fzero */
4318                     CHECK_FPU_FEATURE(dc, VIS1);
4319                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4320                     tcg_gen_movi_i64(cpu_dst_64, 0);
4321                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4322                     break;
4323                 case 0x061: /* VIS I fzeros */
4324                     CHECK_FPU_FEATURE(dc, VIS1);
4325                     cpu_dst_32 = gen_dest_fpr_F(dc);
4326                     tcg_gen_movi_i32(cpu_dst_32, 0);
4327                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4328                     break;
4329                 case 0x062: /* VIS I fnor */
4330                     CHECK_FPU_FEATURE(dc, VIS1);
4331                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4332                     break;
4333                 case 0x063: /* VIS I fnors */
4334                     CHECK_FPU_FEATURE(dc, VIS1);
4335                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4336                     break;
4337                 case 0x064: /* VIS I fandnot2 */
4338                     CHECK_FPU_FEATURE(dc, VIS1);
4339                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4340                     break;
4341                 case 0x065: /* VIS I fandnot2s */
4342                     CHECK_FPU_FEATURE(dc, VIS1);
4343                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4344                     break;
4345                 case 0x066: /* VIS I fnot2 */
4346                     CHECK_FPU_FEATURE(dc, VIS1);
4347                     gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4348                     break;
4349                 case 0x067: /* VIS I fnot2s */
4350                     CHECK_FPU_FEATURE(dc, VIS1);
4351                     gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4352                     break;
4353                 case 0x068: /* VIS I fandnot1 */
4354                     CHECK_FPU_FEATURE(dc, VIS1);
4355                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4356                     break;
4357                 case 0x069: /* VIS I fandnot1s */
4358                     CHECK_FPU_FEATURE(dc, VIS1);
4359                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4360                     break;
4361                 case 0x06a: /* VIS I fnot1 */
4362                     CHECK_FPU_FEATURE(dc, VIS1);
4363                     gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4364                     break;
4365                 case 0x06b: /* VIS I fnot1s */
4366                     CHECK_FPU_FEATURE(dc, VIS1);
4367                     gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4368                     break;
4369                 case 0x06c: /* VIS I fxor */
4370                     CHECK_FPU_FEATURE(dc, VIS1);
4371                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4372                     break;
4373                 case 0x06d: /* VIS I fxors */
4374                     CHECK_FPU_FEATURE(dc, VIS1);
4375                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4376                     break;
4377                 case 0x06e: /* VIS I fnand */
4378                     CHECK_FPU_FEATURE(dc, VIS1);
4379                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4380                     break;
4381                 case 0x06f: /* VIS I fnands */
4382                     CHECK_FPU_FEATURE(dc, VIS1);
4383                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4384                     break;
4385                 case 0x070: /* VIS I fand */
4386                     CHECK_FPU_FEATURE(dc, VIS1);
4387                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4388                     break;
4389                 case 0x071: /* VIS I fands */
4390                     CHECK_FPU_FEATURE(dc, VIS1);
4391                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4392                     break;
4393                 case 0x072: /* VIS I fxnor */
4394                     CHECK_FPU_FEATURE(dc, VIS1);
4395                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4396                     break;
4397                 case 0x073: /* VIS I fxnors */
4398                     CHECK_FPU_FEATURE(dc, VIS1);
4399                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4400                     break;
4401                 case 0x074: /* VIS I fsrc1 */
4402                     CHECK_FPU_FEATURE(dc, VIS1);
4403                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4404                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4405                     break;
4406                 case 0x075: /* VIS I fsrc1s */
4407                     CHECK_FPU_FEATURE(dc, VIS1);
4408                     cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4409                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4410                     break;
4411                 case 0x076: /* VIS I fornot2 */
4412                     CHECK_FPU_FEATURE(dc, VIS1);
4413                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4414                     break;
4415                 case 0x077: /* VIS I fornot2s */
4416                     CHECK_FPU_FEATURE(dc, VIS1);
4417                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4418                     break;
4419                 case 0x078: /* VIS I fsrc2 */
4420                     CHECK_FPU_FEATURE(dc, VIS1);
4421                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4422                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4423                     break;
4424                 case 0x079: /* VIS I fsrc2s */
4425                     CHECK_FPU_FEATURE(dc, VIS1);
4426                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4427                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4428                     break;
4429                 case 0x07a: /* VIS I fornot1 */
4430                     CHECK_FPU_FEATURE(dc, VIS1);
4431                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4432                     break;
4433                 case 0x07b: /* VIS I fornot1s */
4434                     CHECK_FPU_FEATURE(dc, VIS1);
4435                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
4436                     break;
4437                 case 0x07c: /* VIS I for */
4438                     CHECK_FPU_FEATURE(dc, VIS1);
4439                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
4440                     break;
4441                 case 0x07d: /* VIS I fors */
4442                     CHECK_FPU_FEATURE(dc, VIS1);
4443                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
4444                     break;
4445                 case 0x07e: /* VIS I fone */
4446                     CHECK_FPU_FEATURE(dc, VIS1);
4447                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4448                     tcg_gen_movi_i64(cpu_dst_64, -1);
4449                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4450                     break;
4451                 case 0x07f: /* VIS I fones */
4452                     CHECK_FPU_FEATURE(dc, VIS1);
4453                     cpu_dst_32 = gen_dest_fpr_F(dc);
4454                     tcg_gen_movi_i32(cpu_dst_32, -1);
4455                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4456                     break;
4457                 case 0x080: /* VIS I shutdown */
4458                 case 0x081: /* VIS II siam */
4459                     // XXX
4460                     goto illegal_insn;
4461                 default:
4462                     goto illegal_insn;
4463                 }
4464 #else
4465                 goto ncp_insn;
4466 #endif
4467             } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4468 #ifdef TARGET_SPARC64
4469                 goto illegal_insn;
4470 #else
4471                 goto ncp_insn;
4472 #endif
4473 #ifdef TARGET_SPARC64
4474             } else if (xop == 0x39) { /* V9 return */
4475                 TCGv_i32 r_const;
4476
4477                 save_state(dc);
4478                 cpu_src1 = get_src1(dc, insn);
4479                 cpu_tmp0 = get_temp_tl(dc);
4480                 if (IS_IMM) {   /* immediate */
4481                     simm = GET_FIELDs(insn, 19, 31);
4482                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
4483                 } else {                /* register */
4484                     rs2 = GET_FIELD(insn, 27, 31);
4485                     if (rs2) {
4486                         cpu_src2 = gen_load_gpr(dc, rs2);
4487                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
4488                     } else {
4489                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
4490                     }
4491                 }
4492                 gen_helper_restore(cpu_env);
4493                 gen_mov_pc_npc(dc);
4494                 r_const = tcg_const_i32(3);
4495                 gen_helper_check_align(cpu_env, cpu_tmp0, r_const);
4496                 tcg_temp_free_i32(r_const);
4497                 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
4498                 dc->npc = DYNAMIC_PC;
4499                 goto jmp_insn;
4500 #endif
4501             } else {
4502                 cpu_src1 = get_src1(dc, insn);
4503                 cpu_tmp0 = get_temp_tl(dc);
4504                 if (IS_IMM) {   /* immediate */
4505                     simm = GET_FIELDs(insn, 19, 31);
4506                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
4507                 } else {                /* register */
4508                     rs2 = GET_FIELD(insn, 27, 31);
4509                     if (rs2) {
4510                         cpu_src2 = gen_load_gpr(dc, rs2);
4511                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
4512                     } else {
4513                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
4514                     }
4515                 }
4516                 switch (xop) {
4517                 case 0x38:      /* jmpl */
4518                     {
4519                         TCGv t;
4520                         TCGv_i32 r_const;
4521
4522                         t = gen_dest_gpr(dc, rd);
4523                         tcg_gen_movi_tl(t, dc->pc);
4524                         gen_store_gpr(dc, rd, t);
4525                         gen_mov_pc_npc(dc);
4526                         r_const = tcg_const_i32(3);
4527                         gen_helper_check_align(cpu_env, cpu_tmp0, r_const);
4528                         tcg_temp_free_i32(r_const);
4529                         gen_address_mask(dc, cpu_tmp0);
4530                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
4531                         dc->npc = DYNAMIC_PC;
4532                     }
4533                     goto jmp_insn;
4534 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4535                 case 0x39:      /* rett, V9 return */
4536                     {
4537                         TCGv_i32 r_const;
4538
4539                         if (!supervisor(dc))
4540                             goto priv_insn;
4541                         gen_mov_pc_npc(dc);
4542                         r_const = tcg_const_i32(3);
4543                         gen_helper_check_align(cpu_env, cpu_tmp0, r_const);
4544                         tcg_temp_free_i32(r_const);
4545                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
4546                         dc->npc = DYNAMIC_PC;
4547                         gen_helper_rett(cpu_env);
4548                     }
4549                     goto jmp_insn;
4550 #endif
4551                 case 0x3b: /* flush */
4552                     if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4553                         goto unimp_flush;
4554                     /* nop */
4555                     break;
4556                 case 0x3c:      /* save */
4557                     save_state(dc);
4558                     gen_helper_save(cpu_env);
4559                     gen_store_gpr(dc, rd, cpu_tmp0);
4560                     break;
4561                 case 0x3d:      /* restore */
4562                     save_state(dc);
4563                     gen_helper_restore(cpu_env);
4564                     gen_store_gpr(dc, rd, cpu_tmp0);
4565                     break;
4566 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4567                 case 0x3e:      /* V9 done/retry */
4568                     {
4569                         switch (rd) {
4570                         case 0:
4571                             if (!supervisor(dc))
4572                                 goto priv_insn;
4573                             dc->npc = DYNAMIC_PC;
4574                             dc->pc = DYNAMIC_PC;
4575                             gen_helper_done(cpu_env);
4576                             goto jmp_insn;
4577                         case 1:
4578                             if (!supervisor(dc))
4579                                 goto priv_insn;
4580                             dc->npc = DYNAMIC_PC;
4581                             dc->pc = DYNAMIC_PC;
4582                             gen_helper_retry(cpu_env);
4583                             goto jmp_insn;
4584                         default:
4585                             goto illegal_insn;
4586                         }
4587                     }
4588                     break;
4589 #endif
4590                 default:
4591                     goto illegal_insn;
4592                 }
4593             }
4594             break;
4595         }
4596         break;
4597     case 3:                     /* load/store instructions */
4598         {
4599             unsigned int xop = GET_FIELD(insn, 7, 12);
4600             /* ??? gen_address_mask prevents us from using a source
4601                register directly.  Always generate a temporary.  */
4602             TCGv cpu_addr = get_temp_tl(dc);
4603
4604             tcg_gen_mov_tl(cpu_addr, get_src1(dc, insn));
4605             if (xop == 0x3c || xop == 0x3e) {
4606                 /* V9 casa/casxa : no offset */
4607             } else if (IS_IMM) {     /* immediate */
4608                 simm = GET_FIELDs(insn, 19, 31);
4609                 if (simm != 0) {
4610                     tcg_gen_addi_tl(cpu_addr, cpu_addr, simm);
4611                 }
4612             } else {            /* register */
4613                 rs2 = GET_FIELD(insn, 27, 31);
4614                 if (rs2 != 0) {
4615                     tcg_gen_add_tl(cpu_addr, cpu_addr, gen_load_gpr(dc, rs2));
4616                 }
4617             }
4618             if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4619                 (xop > 0x17 && xop <= 0x1d ) ||
4620                 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4621                 TCGv cpu_val = gen_dest_gpr(dc, rd);
4622
4623                 switch (xop) {
4624                 case 0x0:       /* ld, V9 lduw, load unsigned word */
4625                     gen_address_mask(dc, cpu_addr);
4626                     tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4627                     break;
4628                 case 0x1:       /* ldub, load unsigned byte */
4629                     gen_address_mask(dc, cpu_addr);
4630                     tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4631                     break;
4632                 case 0x2:       /* lduh, load unsigned halfword */
4633                     gen_address_mask(dc, cpu_addr);
4634                     tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4635                     break;
4636                 case 0x3:       /* ldd, load double word */
4637                     if (rd & 1)
4638                         goto illegal_insn;
4639                     else {
4640                         TCGv_i32 r_const;
4641                         TCGv_i64 t64;
4642
4643                         save_state(dc);
4644                         r_const = tcg_const_i32(7);
4645                         /* XXX remove alignment check */
4646                         gen_helper_check_align(cpu_env, cpu_addr, r_const);
4647                         tcg_temp_free_i32(r_const);
4648                         gen_address_mask(dc, cpu_addr);
4649                         t64 = tcg_temp_new_i64();
4650                         tcg_gen_qemu_ld64(t64, cpu_addr, dc->mem_idx);
4651                         tcg_gen_trunc_i64_tl(cpu_val, t64);
4652                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
4653                         gen_store_gpr(dc, rd + 1, cpu_val);
4654                         tcg_gen_shri_i64(t64, t64, 32);
4655                         tcg_gen_trunc_i64_tl(cpu_val, t64);
4656                         tcg_temp_free_i64(t64);
4657                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
4658                     }
4659                     break;
4660                 case 0x9:       /* ldsb, load signed byte */
4661                     gen_address_mask(dc, cpu_addr);
4662                     tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4663                     break;
4664                 case 0xa:       /* ldsh, load signed halfword */
4665                     gen_address_mask(dc, cpu_addr);
4666                     tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4667                     break;
4668                 case 0xd:       /* ldstub -- XXX: should be atomically */
4669                     {
4670                         TCGv r_const;
4671
4672                         gen_address_mask(dc, cpu_addr);
4673                         tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4674                         r_const = tcg_const_tl(0xff);
4675                         tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4676                         tcg_temp_free(r_const);
4677                     }
4678                     break;
4679                 case 0x0f:
4680                     /* swap, swap register with memory. Also atomically */
4681                     {
4682                         TCGv t0 = get_temp_tl(dc);
4683                         CHECK_IU_FEATURE(dc, SWAP);
4684                         cpu_src1 = gen_load_gpr(dc, rd);
4685                         gen_address_mask(dc, cpu_addr);
4686                         tcg_gen_qemu_ld32u(t0, cpu_addr, dc->mem_idx);
4687                         tcg_gen_qemu_st32(cpu_src1, cpu_addr, dc->mem_idx);
4688                         tcg_gen_mov_tl(cpu_val, t0);
4689                     }
4690                     break;
4691 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4692                 case 0x10:      /* lda, V9 lduwa, load word alternate */
4693 #ifndef TARGET_SPARC64
4694                     if (IS_IMM)
4695                         goto illegal_insn;
4696                     if (!supervisor(dc))
4697                         goto priv_insn;
4698 #endif
4699                     save_state(dc);
4700                     gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4701                     break;
4702                 case 0x11:      /* lduba, load unsigned byte alternate */
4703 #ifndef TARGET_SPARC64
4704                     if (IS_IMM)
4705                         goto illegal_insn;
4706                     if (!supervisor(dc))
4707                         goto priv_insn;
4708 #endif
4709                     save_state(dc);
4710                     gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4711                     break;
4712                 case 0x12:      /* lduha, load unsigned halfword alternate */
4713 #ifndef TARGET_SPARC64
4714                     if (IS_IMM)
4715                         goto illegal_insn;
4716                     if (!supervisor(dc))
4717                         goto priv_insn;
4718 #endif
4719                     save_state(dc);
4720                     gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4721                     break;
4722                 case 0x13:      /* ldda, load double word alternate */
4723 #ifndef TARGET_SPARC64
4724                     if (IS_IMM)
4725                         goto illegal_insn;
4726                     if (!supervisor(dc))
4727                         goto priv_insn;
4728 #endif
4729                     if (rd & 1)
4730                         goto illegal_insn;
4731                     save_state(dc);
4732                     gen_ldda_asi(dc, cpu_val, cpu_addr, insn, rd);
4733                     goto skip_move;
4734                 case 0x19:      /* ldsba, load signed byte alternate */
4735 #ifndef TARGET_SPARC64
4736                     if (IS_IMM)
4737                         goto illegal_insn;
4738                     if (!supervisor(dc))
4739                         goto priv_insn;
4740 #endif
4741                     save_state(dc);
4742                     gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4743                     break;
4744                 case 0x1a:      /* ldsha, load signed halfword alternate */
4745 #ifndef TARGET_SPARC64
4746                     if (IS_IMM)
4747                         goto illegal_insn;
4748                     if (!supervisor(dc))
4749                         goto priv_insn;
4750 #endif
4751                     save_state(dc);
4752                     gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4753                     break;
4754                 case 0x1d:      /* ldstuba -- XXX: should be atomically */
4755 #ifndef TARGET_SPARC64
4756                     if (IS_IMM)
4757                         goto illegal_insn;
4758                     if (!supervisor(dc))
4759                         goto priv_insn;
4760 #endif
4761                     save_state(dc);
4762                     gen_ldstub_asi(cpu_val, cpu_addr, insn);
4763                     break;
4764                 case 0x1f:      /* swapa, swap reg with alt. memory. Also
4765                                    atomically */
4766                     CHECK_IU_FEATURE(dc, SWAP);
4767 #ifndef TARGET_SPARC64
4768                     if (IS_IMM)
4769                         goto illegal_insn;
4770                     if (!supervisor(dc))
4771                         goto priv_insn;
4772 #endif
4773                     save_state(dc);
4774                     cpu_src1 = gen_load_gpr(dc, rd);
4775                     gen_swap_asi(cpu_val, cpu_src1, cpu_addr, insn);
4776                     break;
4777
4778 #ifndef TARGET_SPARC64
4779                 case 0x30: /* ldc */
4780                 case 0x31: /* ldcsr */
4781                 case 0x33: /* lddc */
4782                     goto ncp_insn;
4783 #endif
4784 #endif
4785 #ifdef TARGET_SPARC64
4786                 case 0x08: /* V9 ldsw */
4787                     gen_address_mask(dc, cpu_addr);
4788                     tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4789                     break;
4790                 case 0x0b: /* V9 ldx */
4791                     gen_address_mask(dc, cpu_addr);
4792                     tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4793                     break;
4794                 case 0x18: /* V9 ldswa */
4795                     save_state(dc);
4796                     gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4797                     break;
4798                 case 0x1b: /* V9 ldxa */
4799                     save_state(dc);
4800                     gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4801                     break;
4802                 case 0x2d: /* V9 prefetch, no effect */
4803                     goto skip_move;
4804                 case 0x30: /* V9 ldfa */
4805                     if (gen_trap_ifnofpu(dc)) {
4806                         goto jmp_insn;
4807                     }
4808                     save_state(dc);
4809                     gen_ldf_asi(cpu_addr, insn, 4, rd);
4810                     gen_update_fprs_dirty(rd);
4811                     goto skip_move;
4812                 case 0x33: /* V9 lddfa */
4813                     if (gen_trap_ifnofpu(dc)) {
4814                         goto jmp_insn;
4815                     }
4816                     save_state(dc);
4817                     gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4818                     gen_update_fprs_dirty(DFPREG(rd));
4819                     goto skip_move;
4820                 case 0x3d: /* V9 prefetcha, no effect */
4821                     goto skip_move;
4822                 case 0x32: /* V9 ldqfa */
4823                     CHECK_FPU_FEATURE(dc, FLOAT128);
4824                     if (gen_trap_ifnofpu(dc)) {
4825                         goto jmp_insn;
4826                     }
4827                     save_state(dc);
4828                     gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4829                     gen_update_fprs_dirty(QFPREG(rd));
4830                     goto skip_move;
4831 #endif
4832                 default:
4833                     goto illegal_insn;
4834                 }
4835                 gen_store_gpr(dc, rd, cpu_val);
4836 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4837             skip_move: ;
4838 #endif
4839             } else if (xop >= 0x20 && xop < 0x24) {
4840                 TCGv t0;
4841
4842                 if (gen_trap_ifnofpu(dc)) {
4843                     goto jmp_insn;
4844                 }
4845                 save_state(dc);
4846                 switch (xop) {
4847                 case 0x20:      /* ldf, load fpreg */
4848                     gen_address_mask(dc, cpu_addr);
4849                     t0 = get_temp_tl(dc);
4850                     tcg_gen_qemu_ld32u(t0, cpu_addr, dc->mem_idx);
4851                     cpu_dst_32 = gen_dest_fpr_F(dc);
4852                     tcg_gen_trunc_tl_i32(cpu_dst_32, t0);
4853                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4854                     break;
4855                 case 0x21:      /* ldfsr, V9 ldxfsr */
4856 #ifdef TARGET_SPARC64
4857                     gen_address_mask(dc, cpu_addr);
4858                     if (rd == 1) {
4859                         TCGv_i64 t64 = tcg_temp_new_i64();
4860                         tcg_gen_qemu_ld64(t64, cpu_addr, dc->mem_idx);
4861                         gen_helper_ldxfsr(cpu_env, t64);
4862                         tcg_temp_free_i64(t64);
4863                         break;
4864                     }
4865 #endif
4866                     cpu_dst_32 = get_temp_i32(dc);
4867                     t0 = get_temp_tl(dc);
4868                     tcg_gen_qemu_ld32u(t0, cpu_addr, dc->mem_idx);
4869                     tcg_gen_trunc_tl_i32(cpu_dst_32, t0);
4870                     gen_helper_ldfsr(cpu_env, cpu_dst_32);
4871                     break;
4872                 case 0x22:      /* ldqf, load quad fpreg */
4873                     {
4874                         TCGv_i32 r_const;
4875
4876                         CHECK_FPU_FEATURE(dc, FLOAT128);
4877                         r_const = tcg_const_i32(dc->mem_idx);
4878                         gen_address_mask(dc, cpu_addr);
4879                         gen_helper_ldqf(cpu_env, cpu_addr, r_const);
4880                         tcg_temp_free_i32(r_const);
4881                         gen_op_store_QT0_fpr(QFPREG(rd));
4882                         gen_update_fprs_dirty(QFPREG(rd));
4883                     }
4884                     break;
4885                 case 0x23:      /* lddf, load double fpreg */
4886                     gen_address_mask(dc, cpu_addr);
4887                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4888                     tcg_gen_qemu_ld64(cpu_dst_64, cpu_addr, dc->mem_idx);
4889                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4890                     break;
4891                 default:
4892                     goto illegal_insn;
4893                 }
4894             } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4895                        xop == 0xe || xop == 0x1e) {
4896                 TCGv cpu_val = gen_load_gpr(dc, rd);
4897
4898                 switch (xop) {
4899                 case 0x4: /* st, store word */
4900                     gen_address_mask(dc, cpu_addr);
4901                     tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4902                     break;
4903                 case 0x5: /* stb, store byte */
4904                     gen_address_mask(dc, cpu_addr);
4905                     tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4906                     break;
4907                 case 0x6: /* sth, store halfword */
4908                     gen_address_mask(dc, cpu_addr);
4909                     tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4910                     break;
4911                 case 0x7: /* std, store double word */
4912                     if (rd & 1)
4913                         goto illegal_insn;
4914                     else {
4915                         TCGv_i32 r_const;
4916                         TCGv_i64 t64;
4917                         TCGv lo;
4918
4919                         save_state(dc);
4920                         gen_address_mask(dc, cpu_addr);
4921                         r_const = tcg_const_i32(7);
4922                         /* XXX remove alignment check */
4923                         gen_helper_check_align(cpu_env, cpu_addr, r_const);
4924                         tcg_temp_free_i32(r_const);
4925                         lo = gen_load_gpr(dc, rd + 1);
4926
4927                         t64 = tcg_temp_new_i64();
4928                         tcg_gen_concat_tl_i64(t64, lo, cpu_val);
4929                         tcg_gen_qemu_st64(t64, cpu_addr, dc->mem_idx);
4930                         tcg_temp_free_i64(t64);
4931                     }
4932                     break;
4933 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4934                 case 0x14: /* sta, V9 stwa, store word alternate */
4935 #ifndef TARGET_SPARC64
4936                     if (IS_IMM)
4937                         goto illegal_insn;
4938                     if (!supervisor(dc))
4939                         goto priv_insn;
4940 #endif
4941                     save_state(dc);
4942                     gen_st_asi(cpu_val, cpu_addr, insn, 4);
4943                     dc->npc = DYNAMIC_PC;
4944                     break;
4945                 case 0x15: /* stba, store byte alternate */
4946 #ifndef TARGET_SPARC64
4947                     if (IS_IMM)
4948                         goto illegal_insn;
4949                     if (!supervisor(dc))
4950                         goto priv_insn;
4951 #endif
4952                     save_state(dc);
4953                     gen_st_asi(cpu_val, cpu_addr, insn, 1);
4954                     dc->npc = DYNAMIC_PC;
4955                     break;
4956                 case 0x16: /* stha, store halfword alternate */
4957 #ifndef TARGET_SPARC64
4958                     if (IS_IMM)
4959                         goto illegal_insn;
4960                     if (!supervisor(dc))
4961                         goto priv_insn;
4962 #endif
4963                     save_state(dc);
4964                     gen_st_asi(cpu_val, cpu_addr, insn, 2);
4965                     dc->npc = DYNAMIC_PC;
4966                     break;
4967                 case 0x17: /* stda, store double word alternate */
4968 #ifndef TARGET_SPARC64
4969                     if (IS_IMM)
4970                         goto illegal_insn;
4971                     if (!supervisor(dc))
4972                         goto priv_insn;
4973 #endif
4974                     if (rd & 1)
4975                         goto illegal_insn;
4976                     else {
4977                         save_state(dc);
4978                         gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
4979                     }
4980                     break;
4981 #endif
4982 #ifdef TARGET_SPARC64
4983                 case 0x0e: /* V9 stx */
4984                     gen_address_mask(dc, cpu_addr);
4985                     tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4986                     break;
4987                 case 0x1e: /* V9 stxa */
4988                     save_state(dc);
4989                     gen_st_asi(cpu_val, cpu_addr, insn, 8);
4990                     dc->npc = DYNAMIC_PC;
4991                     break;
4992 #endif
4993                 default:
4994                     goto illegal_insn;
4995                 }
4996             } else if (xop > 0x23 && xop < 0x28) {
4997                 if (gen_trap_ifnofpu(dc)) {
4998                     goto jmp_insn;
4999                 }
5000                 save_state(dc);
5001                 switch (xop) {
5002                 case 0x24: /* stf, store fpreg */
5003                     {
5004                         TCGv t = get_temp_tl(dc);
5005                         gen_address_mask(dc, cpu_addr);
5006                         cpu_src1_32 = gen_load_fpr_F(dc, rd);
5007                         tcg_gen_ext_i32_tl(t, cpu_src1_32);
5008                         tcg_gen_qemu_st32(t, cpu_addr, dc->mem_idx);
5009                     }
5010                     break;
5011                 case 0x25: /* stfsr, V9 stxfsr */
5012                     {
5013                         TCGv t = get_temp_tl(dc);
5014
5015                         tcg_gen_ld_tl(t, cpu_env, offsetof(CPUSPARCState, fsr));
5016 #ifdef TARGET_SPARC64
5017                         gen_address_mask(dc, cpu_addr);
5018                         if (rd == 1) {
5019                             tcg_gen_qemu_st64(t, cpu_addr, dc->mem_idx);
5020                             break;
5021                         }
5022 #endif
5023                         tcg_gen_qemu_st32(t, cpu_addr, dc->mem_idx);
5024                     }
5025                     break;
5026                 case 0x26:
5027 #ifdef TARGET_SPARC64
5028                     /* V9 stqf, store quad fpreg */
5029                     {
5030                         TCGv_i32 r_const;
5031
5032                         CHECK_FPU_FEATURE(dc, FLOAT128);
5033                         gen_op_load_fpr_QT0(QFPREG(rd));
5034                         r_const = tcg_const_i32(dc->mem_idx);
5035                         gen_address_mask(dc, cpu_addr);
5036                         gen_helper_stqf(cpu_env, cpu_addr, r_const);
5037                         tcg_temp_free_i32(r_const);
5038                     }
5039                     break;
5040 #else /* !TARGET_SPARC64 */
5041                     /* stdfq, store floating point queue */
5042 #if defined(CONFIG_USER_ONLY)
5043                     goto illegal_insn;
5044 #else
5045                     if (!supervisor(dc))
5046                         goto priv_insn;
5047                     if (gen_trap_ifnofpu(dc)) {
5048                         goto jmp_insn;
5049                     }
5050                     goto nfq_insn;
5051 #endif
5052 #endif
5053                 case 0x27: /* stdf, store double fpreg */
5054                     gen_address_mask(dc, cpu_addr);
5055                     cpu_src1_64 = gen_load_fpr_D(dc, rd);
5056                     tcg_gen_qemu_st64(cpu_src1_64, cpu_addr, dc->mem_idx);
5057                     break;
5058                 default:
5059                     goto illegal_insn;
5060                 }
5061             } else if (xop > 0x33 && xop < 0x3f) {
5062                 save_state(dc);
5063                 switch (xop) {
5064 #ifdef TARGET_SPARC64
5065                 case 0x34: /* V9 stfa */
5066                     if (gen_trap_ifnofpu(dc)) {
5067                         goto jmp_insn;
5068                     }
5069                     gen_stf_asi(cpu_addr, insn, 4, rd);
5070                     break;
5071                 case 0x36: /* V9 stqfa */
5072                     {
5073                         TCGv_i32 r_const;
5074
5075                         CHECK_FPU_FEATURE(dc, FLOAT128);
5076                         if (gen_trap_ifnofpu(dc)) {
5077                             goto jmp_insn;
5078                         }
5079                         r_const = tcg_const_i32(7);
5080                         gen_helper_check_align(cpu_env, cpu_addr, r_const);
5081                         tcg_temp_free_i32(r_const);
5082                         gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
5083                     }
5084                     break;
5085                 case 0x37: /* V9 stdfa */
5086                     if (gen_trap_ifnofpu(dc)) {
5087                         goto jmp_insn;
5088                     }
5089                     gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
5090                     break;
5091                 case 0x3e: /* V9 casxa */
5092                     rs2 = GET_FIELD(insn, 27, 31);
5093                     cpu_src2 = gen_load_gpr(dc, rs2);
5094                     gen_casx_asi(dc, cpu_addr, cpu_src2, insn, rd);
5095                     break;
5096 #else
5097                 case 0x34: /* stc */
5098                 case 0x35: /* stcsr */
5099                 case 0x36: /* stdcq */
5100                 case 0x37: /* stdc */
5101                     goto ncp_insn;
5102 #endif
5103 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5104                 case 0x3c: /* V9 or LEON3 casa */
5105 #ifndef TARGET_SPARC64
5106                     CHECK_IU_FEATURE(dc, CASA);
5107                     if (IS_IMM) {
5108                         goto illegal_insn;
5109                     }
5110                     if (!supervisor(dc)) {
5111                         goto priv_insn;
5112                     }
5113 #endif
5114                     rs2 = GET_FIELD(insn, 27, 31);
5115                     cpu_src2 = gen_load_gpr(dc, rs2);
5116                     gen_cas_asi(dc, cpu_addr, cpu_src2, insn, rd);
5117                     break;
5118 #endif
5119                 default:
5120                     goto illegal_insn;
5121                 }
5122             } else {
5123                 goto illegal_insn;
5124             }
5125         }
5126         break;
5127     }
5128     /* default case for non jump instructions */
5129     if (dc->npc == DYNAMIC_PC) {
5130         dc->pc = DYNAMIC_PC;
5131         gen_op_next_insn();
5132     } else if (dc->npc == JUMP_PC) {
5133         /* we can do a static jump */
5134         gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5135         dc->is_br = 1;
5136     } else {
5137         dc->pc = dc->npc;
5138         dc->npc = dc->npc + 4;
5139     }
5140  jmp_insn:
5141     goto egress;
5142  illegal_insn:
5143     {
5144         TCGv_i32 r_const;
5145
5146         save_state(dc);
5147         r_const = tcg_const_i32(TT_ILL_INSN);
5148         gen_helper_raise_exception(cpu_env, r_const);
5149         tcg_temp_free_i32(r_const);
5150         dc->is_br = 1;
5151     }
5152     goto egress;
5153  unimp_flush:
5154     {
5155         TCGv_i32 r_const;
5156
5157         save_state(dc);
5158         r_const = tcg_const_i32(TT_UNIMP_FLUSH);
5159         gen_helper_raise_exception(cpu_env, r_const);
5160         tcg_temp_free_i32(r_const);
5161         dc->is_br = 1;
5162     }
5163     goto egress;
5164 #if !defined(CONFIG_USER_ONLY)
5165  priv_insn:
5166     {
5167         TCGv_i32 r_const;
5168
5169         save_state(dc);
5170         r_const = tcg_const_i32(TT_PRIV_INSN);
5171         gen_helper_raise_exception(cpu_env, r_const);
5172         tcg_temp_free_i32(r_const);
5173         dc->is_br = 1;
5174     }
5175     goto egress;
5176 #endif
5177  nfpu_insn:
5178     save_state(dc);
5179     gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
5180     dc->is_br = 1;
5181     goto egress;
5182 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5183  nfq_insn:
5184     save_state(dc);
5185     gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
5186     dc->is_br = 1;
5187     goto egress;
5188 #endif
5189 #ifndef TARGET_SPARC64
5190  ncp_insn:
5191     {
5192         TCGv r_const;
5193
5194         save_state(dc);
5195         r_const = tcg_const_i32(TT_NCP_INSN);
5196         gen_helper_raise_exception(cpu_env, r_const);
5197         tcg_temp_free(r_const);
5198         dc->is_br = 1;
5199     }
5200     goto egress;
5201 #endif
5202  egress:
5203     if (dc->n_t32 != 0) {
5204         int i;
5205         for (i = dc->n_t32 - 1; i >= 0; --i) {
5206             tcg_temp_free_i32(dc->t32[i]);
5207         }
5208         dc->n_t32 = 0;
5209     }
5210     if (dc->n_ttl != 0) {
5211         int i;
5212         for (i = dc->n_ttl - 1; i >= 0; --i) {
5213             tcg_temp_free(dc->ttl[i]);
5214         }
5215         dc->n_ttl = 0;
5216     }
5217 }
5218
5219 static inline void gen_intermediate_code_internal(SPARCCPU *cpu,
5220                                                   TranslationBlock *tb,
5221                                                   bool spc)
5222 {
5223     CPUState *cs = CPU(cpu);
5224     CPUSPARCState *env = &cpu->env;
5225     target_ulong pc_start, last_pc;
5226     DisasContext dc1, *dc = &dc1;
5227     CPUBreakpoint *bp;
5228     int j, lj = -1;
5229     int num_insns;
5230     int max_insns;
5231     unsigned int insn;
5232
5233     memset(dc, 0, sizeof(DisasContext));
5234     dc->tb = tb;
5235     pc_start = tb->pc;
5236     dc->pc = pc_start;
5237     last_pc = dc->pc;
5238     dc->npc = (target_ulong) tb->cs_base;
5239     dc->cc_op = CC_OP_DYNAMIC;
5240     dc->mem_idx = cpu_mmu_index(env);
5241     dc->def = env->def;
5242     dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5243     dc->address_mask_32bit = tb_am_enabled(tb->flags);
5244     dc->singlestep = (cs->singlestep_enabled || singlestep);
5245
5246     num_insns = 0;
5247     max_insns = tb->cflags & CF_COUNT_MASK;
5248     if (max_insns == 0)
5249         max_insns = CF_COUNT_MASK;
5250     gen_tb_start(tb);
5251     do {
5252         if (unlikely(!QTAILQ_EMPTY(&cs->breakpoints))) {
5253             QTAILQ_FOREACH(bp, &cs->breakpoints, entry) {
5254                 if (bp->pc == dc->pc) {
5255                     if (dc->pc != pc_start)
5256                         save_state(dc);
5257                     gen_helper_debug(cpu_env);
5258                     tcg_gen_exit_tb(0);
5259                     dc->is_br = 1;
5260                     goto exit_gen_loop;
5261                 }
5262             }
5263         }
5264         if (spc) {
5265             qemu_log("Search PC...\n");
5266             j = tcg_op_buf_count();
5267             if (lj < j) {
5268                 lj++;
5269                 while (lj < j)
5270                     tcg_ctx.gen_opc_instr_start[lj++] = 0;
5271                 tcg_ctx.gen_opc_pc[lj] = dc->pc;
5272                 gen_opc_npc[lj] = dc->npc;
5273                 tcg_ctx.gen_opc_instr_start[lj] = 1;
5274                 tcg_ctx.gen_opc_icount[lj] = num_insns;
5275             }
5276         }
5277         if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
5278             gen_io_start();
5279         last_pc = dc->pc;
5280         insn = cpu_ldl_code(env, dc->pc);
5281
5282         disas_sparc_insn(dc, insn);
5283         num_insns++;
5284
5285         if (dc->is_br)
5286             break;
5287         /* if the next PC is different, we abort now */
5288         if (dc->pc != (last_pc + 4))
5289             break;
5290         /* if we reach a page boundary, we stop generation so that the
5291            PC of a TT_TFAULT exception is always in the right page */
5292         if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5293             break;
5294         /* if single step mode, we generate only one instruction and
5295            generate an exception */
5296         if (dc->singlestep) {
5297             break;
5298         }
5299     } while (!tcg_op_buf_full() &&
5300              (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5301              num_insns < max_insns);
5302
5303  exit_gen_loop:
5304     if (tb->cflags & CF_LAST_IO) {
5305         gen_io_end();
5306     }
5307     if (!dc->is_br) {
5308         if (dc->pc != DYNAMIC_PC &&
5309             (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5310             /* static PC and NPC: we can use direct chaining */
5311             gen_goto_tb(dc, 0, dc->pc, dc->npc);
5312         } else {
5313             if (dc->pc != DYNAMIC_PC) {
5314                 tcg_gen_movi_tl(cpu_pc, dc->pc);
5315             }
5316             save_npc(dc);
5317             tcg_gen_exit_tb(0);
5318         }
5319     }
5320     gen_tb_end(tb, num_insns);
5321
5322     if (spc) {
5323         j = tcg_op_buf_count();
5324         lj++;
5325         while (lj <= j)
5326             tcg_ctx.gen_opc_instr_start[lj++] = 0;
5327 #if 0
5328         log_page_dump();
5329 #endif
5330         gen_opc_jump_pc[0] = dc->jump_pc[0];
5331         gen_opc_jump_pc[1] = dc->jump_pc[1];
5332     } else {
5333         tb->size = last_pc + 4 - pc_start;
5334         tb->icount = num_insns;
5335     }
5336 #ifdef DEBUG_DISAS
5337     if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5338         qemu_log("--------------\n");
5339         qemu_log("IN: %s\n", lookup_symbol(pc_start));
5340         log_target_disas(env, pc_start, last_pc + 4 - pc_start, 0);
5341         qemu_log("\n");
5342     }
5343 #endif
5344 }
5345
5346 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5347 {
5348     gen_intermediate_code_internal(sparc_env_get_cpu(env), tb, false);
5349 }
5350
5351 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5352 {
5353     gen_intermediate_code_internal(sparc_env_get_cpu(env), tb, true);
5354 }
5355
5356 void gen_intermediate_code_init(CPUSPARCState *env)
5357 {
5358     unsigned int i;
5359     static int inited;
5360     static const char * const gregnames[8] = {
5361         NULL, // g0 not used
5362         "g1",
5363         "g2",
5364         "g3",
5365         "g4",
5366         "g5",
5367         "g6",
5368         "g7",
5369     };
5370     static const char * const fregnames[32] = {
5371         "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5372         "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5373         "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5374         "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5375     };
5376
5377     /* init various static tables */
5378     if (!inited) {
5379         inited = 1;
5380
5381         cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5382         cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5383                                              offsetof(CPUSPARCState, regwptr),
5384                                              "regwptr");
5385 #ifdef TARGET_SPARC64
5386         cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, xcc),
5387                                          "xcc");
5388         cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, asi),
5389                                          "asi");
5390         cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, fprs),
5391                                           "fprs");
5392         cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, gsr),
5393                                      "gsr");
5394         cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5395                                            offsetof(CPUSPARCState, tick_cmpr),
5396                                            "tick_cmpr");
5397         cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5398                                             offsetof(CPUSPARCState, stick_cmpr),
5399                                             "stick_cmpr");
5400         cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5401                                              offsetof(CPUSPARCState, hstick_cmpr),
5402                                              "hstick_cmpr");
5403         cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hintp),
5404                                        "hintp");
5405         cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, htba),
5406                                       "htba");
5407         cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hver),
5408                                       "hver");
5409         cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5410                                      offsetof(CPUSPARCState, ssr), "ssr");
5411         cpu_ver = tcg_global_mem_new(TCG_AREG0,
5412                                      offsetof(CPUSPARCState, version), "ver");
5413         cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5414                                              offsetof(CPUSPARCState, softint),
5415                                              "softint");
5416 #else
5417         cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, wim),
5418                                      "wim");
5419 #endif
5420         cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cond),
5421                                       "cond");
5422         cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_src),
5423                                         "cc_src");
5424         cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5425                                          offsetof(CPUSPARCState, cc_src2),
5426                                          "cc_src2");
5427         cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_dst),
5428                                         "cc_dst");
5429         cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, cc_op),
5430                                            "cc_op");
5431         cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, psr),
5432                                          "psr");
5433         cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, fsr),
5434                                      "fsr");
5435         cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, pc),
5436                                     "pc");
5437         cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, npc),
5438                                      "npc");
5439         cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, y), "y");
5440 #ifndef CONFIG_USER_ONLY
5441         cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, tbr),
5442                                      "tbr");
5443 #endif
5444         for (i = 1; i < 8; i++) {
5445             cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5446                                               offsetof(CPUSPARCState, gregs[i]),
5447                                               gregnames[i]);
5448         }
5449         for (i = 0; i < TARGET_DPREGS; i++) {
5450             cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
5451                                                 offsetof(CPUSPARCState, fpr[i]),
5452                                                 fregnames[i]);
5453         }
5454     }
5455 }
5456
5457 void restore_state_to_opc(CPUSPARCState *env, TranslationBlock *tb, int pc_pos)
5458 {
5459     target_ulong npc;
5460     env->pc = tcg_ctx.gen_opc_pc[pc_pos];
5461     npc = gen_opc_npc[pc_pos];
5462     if (npc == 1) {
5463         /* dynamic NPC: already stored */
5464     } else if (npc == 2) {
5465         /* jump PC: use 'cond' and the jump targets of the translation */
5466         if (env->cond) {
5467             env->npc = gen_opc_jump_pc[0];
5468         } else {
5469             env->npc = gen_opc_jump_pc[1];
5470         }
5471     } else {
5472         env->npc = npc;
5473     }
5474 }
This page took 0.327533 seconds and 4 git commands to generate.