]> Git Repo - qemu.git/blob - target-sparc/translate.c
target-sparc: Fall through from not-taken trap
[qemu.git] / target-sparc / translate.c
1 /*
2    SPARC translation
3
4    Copyright (C) 2003 Thomas M. Ogrisegg <[email protected]>
5    Copyright (C) 2003-2005 Fabrice Bellard
6
7    This library is free software; you can redistribute it and/or
8    modify it under the terms of the GNU Lesser General Public
9    License as published by the Free Software Foundation; either
10    version 2 of the License, or (at your option) any later version.
11
12    This library is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15    Lesser General Public License for more details.
16
17    You should have received a copy of the GNU Lesser General Public
18    License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20
21 #include <stdarg.h>
22 #include <stdlib.h>
23 #include <stdio.h>
24 #include <string.h>
25 #include <inttypes.h>
26
27 #include "cpu.h"
28 #include "disas.h"
29 #include "helper.h"
30 #include "tcg-op.h"
31
32 #define GEN_HELPER 1
33 #include "helper.h"
34
35 #define DEBUG_DISAS
36
37 #define DYNAMIC_PC  1 /* dynamic pc value */
38 #define JUMP_PC     2 /* dynamic pc value which takes only two values
39                          according to jump_pc[T2] */
40
41 /* global register indexes */
42 static TCGv_ptr cpu_env, cpu_regwptr;
43 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
44 static TCGv_i32 cpu_cc_op;
45 static TCGv_i32 cpu_psr;
46 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
47 static TCGv cpu_y;
48 #ifndef CONFIG_USER_ONLY
49 static TCGv cpu_tbr;
50 #endif
51 static TCGv cpu_cond, cpu_dst, cpu_addr, cpu_val;
52 #ifdef TARGET_SPARC64
53 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
54 static TCGv cpu_gsr;
55 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
56 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
57 static TCGv_i32 cpu_softint;
58 #else
59 static TCGv cpu_wim;
60 #endif
61 /* local register indexes (only used inside old micro ops) */
62 static TCGv cpu_tmp0;
63 static TCGv_i32 cpu_tmp32;
64 static TCGv_i64 cpu_tmp64;
65 /* Floating point registers */
66 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
67
68 static target_ulong gen_opc_npc[OPC_BUF_SIZE];
69 static target_ulong gen_opc_jump_pc[2];
70
71 #include "gen-icount.h"
72
73 typedef struct DisasContext {
74     target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
75     target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
76     target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
77     int is_br;
78     int mem_idx;
79     int fpu_enabled;
80     int address_mask_32bit;
81     int singlestep;
82     uint32_t cc_op;  /* current CC operation */
83     struct TranslationBlock *tb;
84     sparc_def_t *def;
85     TCGv_i32 t32[3];
86     int n_t32;
87 } DisasContext;
88
89 typedef struct {
90     TCGCond cond;
91     bool is_bool;
92     bool g1, g2;
93     TCGv c1, c2;
94 } DisasCompare;
95
96 // This function uses non-native bit order
97 #define GET_FIELD(X, FROM, TO)                                  \
98     ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
99
100 // This function uses the order in the manuals, i.e. bit 0 is 2^0
101 #define GET_FIELD_SP(X, FROM, TO)               \
102     GET_FIELD(X, 31 - (TO), 31 - (FROM))
103
104 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
105 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
106
107 #ifdef TARGET_SPARC64
108 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
109 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
110 #else
111 #define DFPREG(r) (r & 0x1e)
112 #define QFPREG(r) (r & 0x1c)
113 #endif
114
115 #define UA2005_HTRAP_MASK 0xff
116 #define V8_TRAP_MASK 0x7f
117
118 static int sign_extend(int x, int len)
119 {
120     len = 32 - len;
121     return (x << len) >> len;
122 }
123
124 #define IS_IMM (insn & (1<<13))
125
126 static inline void gen_update_fprs_dirty(int rd)
127 {
128 #if defined(TARGET_SPARC64)
129     tcg_gen_ori_i32(cpu_fprs, cpu_fprs, (rd < 32) ? 1 : 2);
130 #endif
131 }
132
133 /* floating point registers moves */
134 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
135 {
136 #if TCG_TARGET_REG_BITS == 32
137     if (src & 1) {
138         return TCGV_LOW(cpu_fpr[src / 2]);
139     } else {
140         return TCGV_HIGH(cpu_fpr[src / 2]);
141     }
142 #else
143     if (src & 1) {
144         return MAKE_TCGV_I32(GET_TCGV_I64(cpu_fpr[src / 2]));
145     } else {
146         TCGv_i32 ret = tcg_temp_local_new_i32();
147         TCGv_i64 t = tcg_temp_new_i64();
148
149         tcg_gen_shri_i64(t, cpu_fpr[src / 2], 32);
150         tcg_gen_trunc_i64_i32(ret, t);
151         tcg_temp_free_i64(t);
152
153         dc->t32[dc->n_t32++] = ret;
154         assert(dc->n_t32 <= ARRAY_SIZE(dc->t32));
155
156         return ret;
157     }
158 #endif
159 }
160
161 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
162 {
163 #if TCG_TARGET_REG_BITS == 32
164     if (dst & 1) {
165         tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
166     } else {
167         tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
168     }
169 #else
170     TCGv_i64 t = MAKE_TCGV_I64(GET_TCGV_I32(v));
171     tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
172                         (dst & 1 ? 0 : 32), 32);
173 #endif
174     gen_update_fprs_dirty(dst);
175 }
176
177 static TCGv_i32 gen_dest_fpr_F(void)
178 {
179     return cpu_tmp32;
180 }
181
182 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
183 {
184     src = DFPREG(src);
185     return cpu_fpr[src / 2];
186 }
187
188 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
189 {
190     dst = DFPREG(dst);
191     tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
192     gen_update_fprs_dirty(dst);
193 }
194
195 static TCGv_i64 gen_dest_fpr_D(void)
196 {
197     return cpu_tmp64;
198 }
199
200 static void gen_op_load_fpr_QT0(unsigned int src)
201 {
202     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
203                    offsetof(CPU_QuadU, ll.upper));
204     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
205                    offsetof(CPU_QuadU, ll.lower));
206 }
207
208 static void gen_op_load_fpr_QT1(unsigned int src)
209 {
210     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
211                    offsetof(CPU_QuadU, ll.upper));
212     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
213                    offsetof(CPU_QuadU, ll.lower));
214 }
215
216 static void gen_op_store_QT0_fpr(unsigned int dst)
217 {
218     tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
219                    offsetof(CPU_QuadU, ll.upper));
220     tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
221                    offsetof(CPU_QuadU, ll.lower));
222 }
223
224 #ifdef TARGET_SPARC64
225 static void gen_move_Q(unsigned int rd, unsigned int rs)
226 {
227     rd = QFPREG(rd);
228     rs = QFPREG(rs);
229
230     tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
231     tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
232     gen_update_fprs_dirty(rd);
233 }
234 #endif
235
236 /* moves */
237 #ifdef CONFIG_USER_ONLY
238 #define supervisor(dc) 0
239 #ifdef TARGET_SPARC64
240 #define hypervisor(dc) 0
241 #endif
242 #else
243 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
244 #ifdef TARGET_SPARC64
245 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
246 #else
247 #endif
248 #endif
249
250 #ifdef TARGET_SPARC64
251 #ifndef TARGET_ABI32
252 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
253 #else
254 #define AM_CHECK(dc) (1)
255 #endif
256 #endif
257
258 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
259 {
260 #ifdef TARGET_SPARC64
261     if (AM_CHECK(dc))
262         tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
263 #endif
264 }
265
266 static inline void gen_movl_reg_TN(int reg, TCGv tn)
267 {
268     if (reg == 0)
269         tcg_gen_movi_tl(tn, 0);
270     else if (reg < 8)
271         tcg_gen_mov_tl(tn, cpu_gregs[reg]);
272     else {
273         tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
274     }
275 }
276
277 static inline void gen_movl_TN_reg(int reg, TCGv tn)
278 {
279     if (reg == 0)
280         return;
281     else if (reg < 8)
282         tcg_gen_mov_tl(cpu_gregs[reg], tn);
283     else {
284         tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
285     }
286 }
287
288 static inline void gen_goto_tb(DisasContext *s, int tb_num,
289                                target_ulong pc, target_ulong npc)
290 {
291     TranslationBlock *tb;
292
293     tb = s->tb;
294     if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
295         (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
296         !s->singlestep)  {
297         /* jump to same page: we can use a direct jump */
298         tcg_gen_goto_tb(tb_num);
299         tcg_gen_movi_tl(cpu_pc, pc);
300         tcg_gen_movi_tl(cpu_npc, npc);
301         tcg_gen_exit_tb((tcg_target_long)tb + tb_num);
302     } else {
303         /* jump to another page: currently not optimized */
304         tcg_gen_movi_tl(cpu_pc, pc);
305         tcg_gen_movi_tl(cpu_npc, npc);
306         tcg_gen_exit_tb(0);
307     }
308 }
309
310 // XXX suboptimal
311 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
312 {
313     tcg_gen_extu_i32_tl(reg, src);
314     tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
315     tcg_gen_andi_tl(reg, reg, 0x1);
316 }
317
318 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
319 {
320     tcg_gen_extu_i32_tl(reg, src);
321     tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
322     tcg_gen_andi_tl(reg, reg, 0x1);
323 }
324
325 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
326 {
327     tcg_gen_extu_i32_tl(reg, src);
328     tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
329     tcg_gen_andi_tl(reg, reg, 0x1);
330 }
331
332 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
333 {
334     tcg_gen_extu_i32_tl(reg, src);
335     tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
336     tcg_gen_andi_tl(reg, reg, 0x1);
337 }
338
339 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
340 {
341     tcg_gen_mov_tl(cpu_cc_src, src1);
342     tcg_gen_movi_tl(cpu_cc_src2, src2);
343     tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
344     tcg_gen_mov_tl(dst, cpu_cc_dst);
345 }
346
347 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
348 {
349     tcg_gen_mov_tl(cpu_cc_src, src1);
350     tcg_gen_mov_tl(cpu_cc_src2, src2);
351     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
352     tcg_gen_mov_tl(dst, cpu_cc_dst);
353 }
354
355 static TCGv_i32 gen_add32_carry32(void)
356 {
357     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
358
359     /* Carry is computed from a previous add: (dst < src)  */
360 #if TARGET_LONG_BITS == 64
361     cc_src1_32 = tcg_temp_new_i32();
362     cc_src2_32 = tcg_temp_new_i32();
363     tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_dst);
364     tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src);
365 #else
366     cc_src1_32 = cpu_cc_dst;
367     cc_src2_32 = cpu_cc_src;
368 #endif
369
370     carry_32 = tcg_temp_new_i32();
371     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
372
373 #if TARGET_LONG_BITS == 64
374     tcg_temp_free_i32(cc_src1_32);
375     tcg_temp_free_i32(cc_src2_32);
376 #endif
377
378     return carry_32;
379 }
380
381 static TCGv_i32 gen_sub32_carry32(void)
382 {
383     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
384
385     /* Carry is computed from a previous borrow: (src1 < src2)  */
386 #if TARGET_LONG_BITS == 64
387     cc_src1_32 = tcg_temp_new_i32();
388     cc_src2_32 = tcg_temp_new_i32();
389     tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_src);
390     tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src2);
391 #else
392     cc_src1_32 = cpu_cc_src;
393     cc_src2_32 = cpu_cc_src2;
394 #endif
395
396     carry_32 = tcg_temp_new_i32();
397     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
398
399 #if TARGET_LONG_BITS == 64
400     tcg_temp_free_i32(cc_src1_32);
401     tcg_temp_free_i32(cc_src2_32);
402 #endif
403
404     return carry_32;
405 }
406
407 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
408                             TCGv src2, int update_cc)
409 {
410     TCGv_i32 carry_32;
411     TCGv carry;
412
413     switch (dc->cc_op) {
414     case CC_OP_DIV:
415     case CC_OP_LOGIC:
416         /* Carry is known to be zero.  Fall back to plain ADD.  */
417         if (update_cc) {
418             gen_op_add_cc(dst, src1, src2);
419         } else {
420             tcg_gen_add_tl(dst, src1, src2);
421         }
422         return;
423
424     case CC_OP_ADD:
425     case CC_OP_TADD:
426     case CC_OP_TADDTV:
427 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
428         {
429             /* For 32-bit hosts, we can re-use the host's hardware carry
430                generation by using an ADD2 opcode.  We discard the low
431                part of the output.  Ideally we'd combine this operation
432                with the add that generated the carry in the first place.  */
433             TCGv dst_low = tcg_temp_new();
434             tcg_gen_op6_i32(INDEX_op_add2_i32, dst_low, dst,
435                             cpu_cc_src, src1, cpu_cc_src2, src2);
436             tcg_temp_free(dst_low);
437             goto add_done;
438         }
439 #endif
440         carry_32 = gen_add32_carry32();
441         break;
442
443     case CC_OP_SUB:
444     case CC_OP_TSUB:
445     case CC_OP_TSUBTV:
446         carry_32 = gen_sub32_carry32();
447         break;
448
449     default:
450         /* We need external help to produce the carry.  */
451         carry_32 = tcg_temp_new_i32();
452         gen_helper_compute_C_icc(carry_32, cpu_env);
453         break;
454     }
455
456 #if TARGET_LONG_BITS == 64
457     carry = tcg_temp_new();
458     tcg_gen_extu_i32_i64(carry, carry_32);
459 #else
460     carry = carry_32;
461 #endif
462
463     tcg_gen_add_tl(dst, src1, src2);
464     tcg_gen_add_tl(dst, dst, carry);
465
466     tcg_temp_free_i32(carry_32);
467 #if TARGET_LONG_BITS == 64
468     tcg_temp_free(carry);
469 #endif
470
471 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
472  add_done:
473 #endif
474     if (update_cc) {
475         tcg_gen_mov_tl(cpu_cc_src, src1);
476         tcg_gen_mov_tl(cpu_cc_src2, src2);
477         tcg_gen_mov_tl(cpu_cc_dst, dst);
478         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
479         dc->cc_op = CC_OP_ADDX;
480     }
481 }
482
483 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
484 {
485     tcg_gen_mov_tl(cpu_cc_src, src1);
486     tcg_gen_movi_tl(cpu_cc_src2, src2);
487     if (src2 == 0) {
488         tcg_gen_mov_tl(cpu_cc_dst, src1);
489         tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
490         dc->cc_op = CC_OP_LOGIC;
491     } else {
492         tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
493         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
494         dc->cc_op = CC_OP_SUB;
495     }
496     tcg_gen_mov_tl(dst, cpu_cc_dst);
497 }
498
499 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
500 {
501     tcg_gen_mov_tl(cpu_cc_src, src1);
502     tcg_gen_mov_tl(cpu_cc_src2, src2);
503     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
504     tcg_gen_mov_tl(dst, cpu_cc_dst);
505 }
506
507 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
508                             TCGv src2, int update_cc)
509 {
510     TCGv_i32 carry_32;
511     TCGv carry;
512
513     switch (dc->cc_op) {
514     case CC_OP_DIV:
515     case CC_OP_LOGIC:
516         /* Carry is known to be zero.  Fall back to plain SUB.  */
517         if (update_cc) {
518             gen_op_sub_cc(dst, src1, src2);
519         } else {
520             tcg_gen_sub_tl(dst, src1, src2);
521         }
522         return;
523
524     case CC_OP_ADD:
525     case CC_OP_TADD:
526     case CC_OP_TADDTV:
527         carry_32 = gen_add32_carry32();
528         break;
529
530     case CC_OP_SUB:
531     case CC_OP_TSUB:
532     case CC_OP_TSUBTV:
533 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
534         {
535             /* For 32-bit hosts, we can re-use the host's hardware carry
536                generation by using a SUB2 opcode.  We discard the low
537                part of the output.  Ideally we'd combine this operation
538                with the add that generated the carry in the first place.  */
539             TCGv dst_low = tcg_temp_new();
540             tcg_gen_op6_i32(INDEX_op_sub2_i32, dst_low, dst,
541                             cpu_cc_src, src1, cpu_cc_src2, src2);
542             tcg_temp_free(dst_low);
543             goto sub_done;
544         }
545 #endif
546         carry_32 = gen_sub32_carry32();
547         break;
548
549     default:
550         /* We need external help to produce the carry.  */
551         carry_32 = tcg_temp_new_i32();
552         gen_helper_compute_C_icc(carry_32, cpu_env);
553         break;
554     }
555
556 #if TARGET_LONG_BITS == 64
557     carry = tcg_temp_new();
558     tcg_gen_extu_i32_i64(carry, carry_32);
559 #else
560     carry = carry_32;
561 #endif
562
563     tcg_gen_sub_tl(dst, src1, src2);
564     tcg_gen_sub_tl(dst, dst, carry);
565
566     tcg_temp_free_i32(carry_32);
567 #if TARGET_LONG_BITS == 64
568     tcg_temp_free(carry);
569 #endif
570
571 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
572  sub_done:
573 #endif
574     if (update_cc) {
575         tcg_gen_mov_tl(cpu_cc_src, src1);
576         tcg_gen_mov_tl(cpu_cc_src2, src2);
577         tcg_gen_mov_tl(cpu_cc_dst, dst);
578         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
579         dc->cc_op = CC_OP_SUBX;
580     }
581 }
582
583 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
584 {
585     TCGv r_temp, zero;
586
587     r_temp = tcg_temp_new();
588
589     /* old op:
590     if (!(env->y & 1))
591         T1 = 0;
592     */
593     zero = tcg_const_tl(0);
594     tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
595     tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
596     tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
597     tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
598                        zero, cpu_cc_src2);
599     tcg_temp_free(zero);
600
601     // b2 = T0 & 1;
602     // env->y = (b2 << 31) | (env->y >> 1);
603     tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
604     tcg_gen_shli_tl(r_temp, r_temp, 31);
605     tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
606     tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
607     tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
608     tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
609
610     // b1 = N ^ V;
611     gen_mov_reg_N(cpu_tmp0, cpu_psr);
612     gen_mov_reg_V(r_temp, cpu_psr);
613     tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
614     tcg_temp_free(r_temp);
615
616     // T0 = (b1 << 31) | (T0 >> 1);
617     // src1 = T0;
618     tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
619     tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
620     tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
621
622     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
623
624     tcg_gen_mov_tl(dst, cpu_cc_dst);
625 }
626
627 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
628 {
629     TCGv_i32 r_src1, r_src2;
630     TCGv_i64 r_temp, r_temp2;
631
632     r_src1 = tcg_temp_new_i32();
633     r_src2 = tcg_temp_new_i32();
634
635     tcg_gen_trunc_tl_i32(r_src1, src1);
636     tcg_gen_trunc_tl_i32(r_src2, src2);
637
638     r_temp = tcg_temp_new_i64();
639     r_temp2 = tcg_temp_new_i64();
640
641     if (sign_ext) {
642         tcg_gen_ext_i32_i64(r_temp, r_src2);
643         tcg_gen_ext_i32_i64(r_temp2, r_src1);
644     } else {
645         tcg_gen_extu_i32_i64(r_temp, r_src2);
646         tcg_gen_extu_i32_i64(r_temp2, r_src1);
647     }
648
649     tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
650
651     tcg_gen_shri_i64(r_temp, r_temp2, 32);
652     tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
653     tcg_temp_free_i64(r_temp);
654     tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
655
656     tcg_gen_trunc_i64_tl(dst, r_temp2);
657
658     tcg_temp_free_i64(r_temp2);
659
660     tcg_temp_free_i32(r_src1);
661     tcg_temp_free_i32(r_src2);
662 }
663
664 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
665 {
666     /* zero-extend truncated operands before multiplication */
667     gen_op_multiply(dst, src1, src2, 0);
668 }
669
670 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
671 {
672     /* sign-extend truncated operands before multiplication */
673     gen_op_multiply(dst, src1, src2, 1);
674 }
675
676 // 1
677 static inline void gen_op_eval_ba(TCGv dst)
678 {
679     tcg_gen_movi_tl(dst, 1);
680 }
681
682 // Z
683 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
684 {
685     gen_mov_reg_Z(dst, src);
686 }
687
688 // Z | (N ^ V)
689 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
690 {
691     gen_mov_reg_N(cpu_tmp0, src);
692     gen_mov_reg_V(dst, src);
693     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
694     gen_mov_reg_Z(cpu_tmp0, src);
695     tcg_gen_or_tl(dst, dst, cpu_tmp0);
696 }
697
698 // N ^ V
699 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
700 {
701     gen_mov_reg_V(cpu_tmp0, src);
702     gen_mov_reg_N(dst, src);
703     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
704 }
705
706 // C | Z
707 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
708 {
709     gen_mov_reg_Z(cpu_tmp0, src);
710     gen_mov_reg_C(dst, src);
711     tcg_gen_or_tl(dst, dst, cpu_tmp0);
712 }
713
714 // C
715 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
716 {
717     gen_mov_reg_C(dst, src);
718 }
719
720 // V
721 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
722 {
723     gen_mov_reg_V(dst, src);
724 }
725
726 // 0
727 static inline void gen_op_eval_bn(TCGv dst)
728 {
729     tcg_gen_movi_tl(dst, 0);
730 }
731
732 // N
733 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
734 {
735     gen_mov_reg_N(dst, src);
736 }
737
738 // !Z
739 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
740 {
741     gen_mov_reg_Z(dst, src);
742     tcg_gen_xori_tl(dst, dst, 0x1);
743 }
744
745 // !(Z | (N ^ V))
746 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
747 {
748     gen_mov_reg_N(cpu_tmp0, src);
749     gen_mov_reg_V(dst, src);
750     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
751     gen_mov_reg_Z(cpu_tmp0, src);
752     tcg_gen_or_tl(dst, dst, cpu_tmp0);
753     tcg_gen_xori_tl(dst, dst, 0x1);
754 }
755
756 // !(N ^ V)
757 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
758 {
759     gen_mov_reg_V(cpu_tmp0, src);
760     gen_mov_reg_N(dst, src);
761     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
762     tcg_gen_xori_tl(dst, dst, 0x1);
763 }
764
765 // !(C | Z)
766 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
767 {
768     gen_mov_reg_Z(cpu_tmp0, src);
769     gen_mov_reg_C(dst, src);
770     tcg_gen_or_tl(dst, dst, cpu_tmp0);
771     tcg_gen_xori_tl(dst, dst, 0x1);
772 }
773
774 // !C
775 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
776 {
777     gen_mov_reg_C(dst, src);
778     tcg_gen_xori_tl(dst, dst, 0x1);
779 }
780
781 // !N
782 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
783 {
784     gen_mov_reg_N(dst, src);
785     tcg_gen_xori_tl(dst, dst, 0x1);
786 }
787
788 // !V
789 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
790 {
791     gen_mov_reg_V(dst, src);
792     tcg_gen_xori_tl(dst, dst, 0x1);
793 }
794
795 /*
796   FPSR bit field FCC1 | FCC0:
797    0 =
798    1 <
799    2 >
800    3 unordered
801 */
802 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
803                                     unsigned int fcc_offset)
804 {
805     tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
806     tcg_gen_andi_tl(reg, reg, 0x1);
807 }
808
809 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
810                                     unsigned int fcc_offset)
811 {
812     tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
813     tcg_gen_andi_tl(reg, reg, 0x1);
814 }
815
816 // !0: FCC0 | FCC1
817 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
818                                     unsigned int fcc_offset)
819 {
820     gen_mov_reg_FCC0(dst, src, fcc_offset);
821     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
822     tcg_gen_or_tl(dst, dst, cpu_tmp0);
823 }
824
825 // 1 or 2: FCC0 ^ FCC1
826 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
827                                     unsigned int fcc_offset)
828 {
829     gen_mov_reg_FCC0(dst, src, fcc_offset);
830     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
831     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
832 }
833
834 // 1 or 3: FCC0
835 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
836                                     unsigned int fcc_offset)
837 {
838     gen_mov_reg_FCC0(dst, src, fcc_offset);
839 }
840
841 // 1: FCC0 & !FCC1
842 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
843                                     unsigned int fcc_offset)
844 {
845     gen_mov_reg_FCC0(dst, src, fcc_offset);
846     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
847     tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
848     tcg_gen_and_tl(dst, dst, cpu_tmp0);
849 }
850
851 // 2 or 3: FCC1
852 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
853                                     unsigned int fcc_offset)
854 {
855     gen_mov_reg_FCC1(dst, src, fcc_offset);
856 }
857
858 // 2: !FCC0 & FCC1
859 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
860                                     unsigned int fcc_offset)
861 {
862     gen_mov_reg_FCC0(dst, src, fcc_offset);
863     tcg_gen_xori_tl(dst, dst, 0x1);
864     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
865     tcg_gen_and_tl(dst, dst, cpu_tmp0);
866 }
867
868 // 3: FCC0 & FCC1
869 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
870                                     unsigned int fcc_offset)
871 {
872     gen_mov_reg_FCC0(dst, src, fcc_offset);
873     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
874     tcg_gen_and_tl(dst, dst, cpu_tmp0);
875 }
876
877 // 0: !(FCC0 | FCC1)
878 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
879                                     unsigned int fcc_offset)
880 {
881     gen_mov_reg_FCC0(dst, src, fcc_offset);
882     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
883     tcg_gen_or_tl(dst, dst, cpu_tmp0);
884     tcg_gen_xori_tl(dst, dst, 0x1);
885 }
886
887 // 0 or 3: !(FCC0 ^ FCC1)
888 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
889                                     unsigned int fcc_offset)
890 {
891     gen_mov_reg_FCC0(dst, src, fcc_offset);
892     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
893     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
894     tcg_gen_xori_tl(dst, dst, 0x1);
895 }
896
897 // 0 or 2: !FCC0
898 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
899                                     unsigned int fcc_offset)
900 {
901     gen_mov_reg_FCC0(dst, src, fcc_offset);
902     tcg_gen_xori_tl(dst, dst, 0x1);
903 }
904
905 // !1: !(FCC0 & !FCC1)
906 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
907                                     unsigned int fcc_offset)
908 {
909     gen_mov_reg_FCC0(dst, src, fcc_offset);
910     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
911     tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
912     tcg_gen_and_tl(dst, dst, cpu_tmp0);
913     tcg_gen_xori_tl(dst, dst, 0x1);
914 }
915
916 // 0 or 1: !FCC1
917 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
918                                     unsigned int fcc_offset)
919 {
920     gen_mov_reg_FCC1(dst, src, fcc_offset);
921     tcg_gen_xori_tl(dst, dst, 0x1);
922 }
923
924 // !2: !(!FCC0 & FCC1)
925 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
926                                     unsigned int fcc_offset)
927 {
928     gen_mov_reg_FCC0(dst, src, fcc_offset);
929     tcg_gen_xori_tl(dst, dst, 0x1);
930     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
931     tcg_gen_and_tl(dst, dst, cpu_tmp0);
932     tcg_gen_xori_tl(dst, dst, 0x1);
933 }
934
935 // !3: !(FCC0 & FCC1)
936 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
937                                     unsigned int fcc_offset)
938 {
939     gen_mov_reg_FCC0(dst, src, fcc_offset);
940     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
941     tcg_gen_and_tl(dst, dst, cpu_tmp0);
942     tcg_gen_xori_tl(dst, dst, 0x1);
943 }
944
945 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
946                                target_ulong pc2, TCGv r_cond)
947 {
948     int l1;
949
950     l1 = gen_new_label();
951
952     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
953
954     gen_goto_tb(dc, 0, pc1, pc1 + 4);
955
956     gen_set_label(l1);
957     gen_goto_tb(dc, 1, pc2, pc2 + 4);
958 }
959
960 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
961                                 target_ulong pc2, TCGv r_cond)
962 {
963     int l1;
964
965     l1 = gen_new_label();
966
967     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
968
969     gen_goto_tb(dc, 0, pc2, pc1);
970
971     gen_set_label(l1);
972     gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
973 }
974
975 static inline void gen_generic_branch(DisasContext *dc)
976 {
977     TCGv npc0 = tcg_const_tl(dc->jump_pc[0]);
978     TCGv npc1 = tcg_const_tl(dc->jump_pc[1]);
979     TCGv zero = tcg_const_tl(0);
980
981     tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
982
983     tcg_temp_free(npc0);
984     tcg_temp_free(npc1);
985     tcg_temp_free(zero);
986 }
987
988 /* call this function before using the condition register as it may
989    have been set for a jump */
990 static inline void flush_cond(DisasContext *dc)
991 {
992     if (dc->npc == JUMP_PC) {
993         gen_generic_branch(dc);
994         dc->npc = DYNAMIC_PC;
995     }
996 }
997
998 static inline void save_npc(DisasContext *dc)
999 {
1000     if (dc->npc == JUMP_PC) {
1001         gen_generic_branch(dc);
1002         dc->npc = DYNAMIC_PC;
1003     } else if (dc->npc != DYNAMIC_PC) {
1004         tcg_gen_movi_tl(cpu_npc, dc->npc);
1005     }
1006 }
1007
1008 static inline void save_state(DisasContext *dc)
1009 {
1010     tcg_gen_movi_tl(cpu_pc, dc->pc);
1011     /* flush pending conditional evaluations before exposing cpu state */
1012     if (dc->cc_op != CC_OP_FLAGS) {
1013         dc->cc_op = CC_OP_FLAGS;
1014         gen_helper_compute_psr(cpu_env);
1015     }
1016     save_npc(dc);
1017 }
1018
1019 static inline void gen_mov_pc_npc(DisasContext *dc)
1020 {
1021     if (dc->npc == JUMP_PC) {
1022         gen_generic_branch(dc);
1023         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1024         dc->pc = DYNAMIC_PC;
1025     } else if (dc->npc == DYNAMIC_PC) {
1026         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1027         dc->pc = DYNAMIC_PC;
1028     } else {
1029         dc->pc = dc->npc;
1030     }
1031 }
1032
1033 static inline void gen_op_next_insn(void)
1034 {
1035     tcg_gen_mov_tl(cpu_pc, cpu_npc);
1036     tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1037 }
1038
1039 static void free_compare(DisasCompare *cmp)
1040 {
1041     if (!cmp->g1) {
1042         tcg_temp_free(cmp->c1);
1043     }
1044     if (!cmp->g2) {
1045         tcg_temp_free(cmp->c2);
1046     }
1047 }
1048
1049 static void gen_compare(DisasCompare *cmp, unsigned int cc, unsigned int cond,
1050                         DisasContext *dc)
1051 {
1052     TCGv_i32 r_src;
1053     TCGv r_dst;
1054
1055     /* For now we still generate a straight boolean result.  */
1056     cmp->cond = TCG_COND_NE;
1057     cmp->is_bool = true;
1058     cmp->g1 = cmp->g2 = false;
1059     cmp->c1 = r_dst = tcg_temp_new();
1060     cmp->c2 = tcg_const_tl(0);
1061
1062 #ifdef TARGET_SPARC64
1063     if (cc)
1064         r_src = cpu_xcc;
1065     else
1066         r_src = cpu_psr;
1067 #else
1068     r_src = cpu_psr;
1069 #endif
1070     switch (dc->cc_op) {
1071     case CC_OP_FLAGS:
1072         break;
1073     default:
1074         gen_helper_compute_psr(cpu_env);
1075         dc->cc_op = CC_OP_FLAGS;
1076         break;
1077     }
1078     switch (cond) {
1079     case 0x0:
1080         gen_op_eval_bn(r_dst);
1081         break;
1082     case 0x1:
1083         gen_op_eval_be(r_dst, r_src);
1084         break;
1085     case 0x2:
1086         gen_op_eval_ble(r_dst, r_src);
1087         break;
1088     case 0x3:
1089         gen_op_eval_bl(r_dst, r_src);
1090         break;
1091     case 0x4:
1092         gen_op_eval_bleu(r_dst, r_src);
1093         break;
1094     case 0x5:
1095         gen_op_eval_bcs(r_dst, r_src);
1096         break;
1097     case 0x6:
1098         gen_op_eval_bneg(r_dst, r_src);
1099         break;
1100     case 0x7:
1101         gen_op_eval_bvs(r_dst, r_src);
1102         break;
1103     case 0x8:
1104         gen_op_eval_ba(r_dst);
1105         break;
1106     case 0x9:
1107         gen_op_eval_bne(r_dst, r_src);
1108         break;
1109     case 0xa:
1110         gen_op_eval_bg(r_dst, r_src);
1111         break;
1112     case 0xb:
1113         gen_op_eval_bge(r_dst, r_src);
1114         break;
1115     case 0xc:
1116         gen_op_eval_bgu(r_dst, r_src);
1117         break;
1118     case 0xd:
1119         gen_op_eval_bcc(r_dst, r_src);
1120         break;
1121     case 0xe:
1122         gen_op_eval_bpos(r_dst, r_src);
1123         break;
1124     case 0xf:
1125         gen_op_eval_bvc(r_dst, r_src);
1126         break;
1127     }
1128 }
1129
1130 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1131 {
1132     unsigned int offset;
1133     TCGv r_dst;
1134
1135     /* For now we still generate a straight boolean result.  */
1136     cmp->cond = TCG_COND_NE;
1137     cmp->is_bool = true;
1138     cmp->g1 = cmp->g2 = false;
1139     cmp->c1 = r_dst = tcg_temp_new();
1140     cmp->c2 = tcg_const_tl(0);
1141
1142     switch (cc) {
1143     default:
1144     case 0x0:
1145         offset = 0;
1146         break;
1147     case 0x1:
1148         offset = 32 - 10;
1149         break;
1150     case 0x2:
1151         offset = 34 - 10;
1152         break;
1153     case 0x3:
1154         offset = 36 - 10;
1155         break;
1156     }
1157
1158     switch (cond) {
1159     case 0x0:
1160         gen_op_eval_bn(r_dst);
1161         break;
1162     case 0x1:
1163         gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1164         break;
1165     case 0x2:
1166         gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1167         break;
1168     case 0x3:
1169         gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1170         break;
1171     case 0x4:
1172         gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1173         break;
1174     case 0x5:
1175         gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1176         break;
1177     case 0x6:
1178         gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1179         break;
1180     case 0x7:
1181         gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1182         break;
1183     case 0x8:
1184         gen_op_eval_ba(r_dst);
1185         break;
1186     case 0x9:
1187         gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1188         break;
1189     case 0xa:
1190         gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1191         break;
1192     case 0xb:
1193         gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1194         break;
1195     case 0xc:
1196         gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1197         break;
1198     case 0xd:
1199         gen_op_eval_fble(r_dst, cpu_fsr, offset);
1200         break;
1201     case 0xe:
1202         gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1203         break;
1204     case 0xf:
1205         gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1206         break;
1207     }
1208 }
1209
1210 static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1211                      DisasContext *dc)
1212 {
1213     DisasCompare cmp;
1214     gen_compare(&cmp, cc, cond, dc);
1215
1216     /* The interface is to return a boolean in r_dst.  */
1217     if (cmp.is_bool) {
1218         tcg_gen_mov_tl(r_dst, cmp.c1);
1219     } else {
1220         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1221     }
1222
1223     free_compare(&cmp);
1224 }
1225
1226 static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1227 {
1228     DisasCompare cmp;
1229     gen_fcompare(&cmp, cc, cond);
1230
1231     /* The interface is to return a boolean in r_dst.  */
1232     if (cmp.is_bool) {
1233         tcg_gen_mov_tl(r_dst, cmp.c1);
1234     } else {
1235         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1236     }
1237
1238     free_compare(&cmp);
1239 }
1240
1241 #ifdef TARGET_SPARC64
1242 // Inverted logic
1243 static const int gen_tcg_cond_reg[8] = {
1244     -1,
1245     TCG_COND_NE,
1246     TCG_COND_GT,
1247     TCG_COND_GE,
1248     -1,
1249     TCG_COND_EQ,
1250     TCG_COND_LE,
1251     TCG_COND_LT,
1252 };
1253
1254 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1255 {
1256     cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1257     cmp->is_bool = false;
1258     cmp->g1 = true;
1259     cmp->g2 = false;
1260     cmp->c1 = r_src;
1261     cmp->c2 = tcg_const_tl(0);
1262 }
1263
1264 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1265 {
1266     DisasCompare cmp;
1267     gen_compare_reg(&cmp, cond, r_src);
1268
1269     /* The interface is to return a boolean in r_dst.  */
1270     tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1271
1272     free_compare(&cmp);
1273 }
1274 #endif
1275
1276 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1277 {
1278     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1279     target_ulong target = dc->pc + offset;
1280
1281 #ifdef TARGET_SPARC64
1282     if (unlikely(AM_CHECK(dc))) {
1283         target &= 0xffffffffULL;
1284     }
1285 #endif
1286     if (cond == 0x0) {
1287         /* unconditional not taken */
1288         if (a) {
1289             dc->pc = dc->npc + 4;
1290             dc->npc = dc->pc + 4;
1291         } else {
1292             dc->pc = dc->npc;
1293             dc->npc = dc->pc + 4;
1294         }
1295     } else if (cond == 0x8) {
1296         /* unconditional taken */
1297         if (a) {
1298             dc->pc = target;
1299             dc->npc = dc->pc + 4;
1300         } else {
1301             dc->pc = dc->npc;
1302             dc->npc = target;
1303             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1304         }
1305     } else {
1306         flush_cond(dc);
1307         gen_cond(cpu_cond, cc, cond, dc);
1308         if (a) {
1309             gen_branch_a(dc, target, dc->npc, cpu_cond);
1310             dc->is_br = 1;
1311         } else {
1312             dc->pc = dc->npc;
1313             dc->jump_pc[0] = target;
1314             if (unlikely(dc->npc == DYNAMIC_PC)) {
1315                 dc->jump_pc[1] = DYNAMIC_PC;
1316                 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1317             } else {
1318                 dc->jump_pc[1] = dc->npc + 4;
1319                 dc->npc = JUMP_PC;
1320             }
1321         }
1322     }
1323 }
1324
1325 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1326 {
1327     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1328     target_ulong target = dc->pc + offset;
1329
1330 #ifdef TARGET_SPARC64
1331     if (unlikely(AM_CHECK(dc))) {
1332         target &= 0xffffffffULL;
1333     }
1334 #endif
1335     if (cond == 0x0) {
1336         /* unconditional not taken */
1337         if (a) {
1338             dc->pc = dc->npc + 4;
1339             dc->npc = dc->pc + 4;
1340         } else {
1341             dc->pc = dc->npc;
1342             dc->npc = dc->pc + 4;
1343         }
1344     } else if (cond == 0x8) {
1345         /* unconditional taken */
1346         if (a) {
1347             dc->pc = target;
1348             dc->npc = dc->pc + 4;
1349         } else {
1350             dc->pc = dc->npc;
1351             dc->npc = target;
1352             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1353         }
1354     } else {
1355         flush_cond(dc);
1356         gen_fcond(cpu_cond, cc, cond);
1357         if (a) {
1358             gen_branch_a(dc, target, dc->npc, cpu_cond);
1359             dc->is_br = 1;
1360         } else {
1361             dc->pc = dc->npc;
1362             dc->jump_pc[0] = target;
1363             if (unlikely(dc->npc == DYNAMIC_PC)) {
1364                 dc->jump_pc[1] = DYNAMIC_PC;
1365                 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1366             } else {
1367                 dc->jump_pc[1] = dc->npc + 4;
1368                 dc->npc = JUMP_PC;
1369             }
1370         }
1371     }
1372 }
1373
1374 #ifdef TARGET_SPARC64
1375 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1376                           TCGv r_reg)
1377 {
1378     unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1379     target_ulong target = dc->pc + offset;
1380
1381     if (unlikely(AM_CHECK(dc))) {
1382         target &= 0xffffffffULL;
1383     }
1384     flush_cond(dc);
1385     gen_cond_reg(cpu_cond, cond, r_reg);
1386     if (a) {
1387         gen_branch_a(dc, target, dc->npc, cpu_cond);
1388         dc->is_br = 1;
1389     } else {
1390         dc->pc = dc->npc;
1391         dc->jump_pc[0] = target;
1392         if (unlikely(dc->npc == DYNAMIC_PC)) {
1393             dc->jump_pc[1] = DYNAMIC_PC;
1394             tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1395         } else {
1396             dc->jump_pc[1] = dc->npc + 4;
1397             dc->npc = JUMP_PC;
1398         }
1399     }
1400 }
1401
1402 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1403 {
1404     switch (fccno) {
1405     case 0:
1406         gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1407         break;
1408     case 1:
1409         gen_helper_fcmps_fcc1(cpu_env, r_rs1, r_rs2);
1410         break;
1411     case 2:
1412         gen_helper_fcmps_fcc2(cpu_env, r_rs1, r_rs2);
1413         break;
1414     case 3:
1415         gen_helper_fcmps_fcc3(cpu_env, r_rs1, r_rs2);
1416         break;
1417     }
1418 }
1419
1420 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1421 {
1422     switch (fccno) {
1423     case 0:
1424         gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1425         break;
1426     case 1:
1427         gen_helper_fcmpd_fcc1(cpu_env, r_rs1, r_rs2);
1428         break;
1429     case 2:
1430         gen_helper_fcmpd_fcc2(cpu_env, r_rs1, r_rs2);
1431         break;
1432     case 3:
1433         gen_helper_fcmpd_fcc3(cpu_env, r_rs1, r_rs2);
1434         break;
1435     }
1436 }
1437
1438 static inline void gen_op_fcmpq(int fccno)
1439 {
1440     switch (fccno) {
1441     case 0:
1442         gen_helper_fcmpq(cpu_env);
1443         break;
1444     case 1:
1445         gen_helper_fcmpq_fcc1(cpu_env);
1446         break;
1447     case 2:
1448         gen_helper_fcmpq_fcc2(cpu_env);
1449         break;
1450     case 3:
1451         gen_helper_fcmpq_fcc3(cpu_env);
1452         break;
1453     }
1454 }
1455
1456 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1457 {
1458     switch (fccno) {
1459     case 0:
1460         gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1461         break;
1462     case 1:
1463         gen_helper_fcmpes_fcc1(cpu_env, r_rs1, r_rs2);
1464         break;
1465     case 2:
1466         gen_helper_fcmpes_fcc2(cpu_env, r_rs1, r_rs2);
1467         break;
1468     case 3:
1469         gen_helper_fcmpes_fcc3(cpu_env, r_rs1, r_rs2);
1470         break;
1471     }
1472 }
1473
1474 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1475 {
1476     switch (fccno) {
1477     case 0:
1478         gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1479         break;
1480     case 1:
1481         gen_helper_fcmped_fcc1(cpu_env, r_rs1, r_rs2);
1482         break;
1483     case 2:
1484         gen_helper_fcmped_fcc2(cpu_env, r_rs1, r_rs2);
1485         break;
1486     case 3:
1487         gen_helper_fcmped_fcc3(cpu_env, r_rs1, r_rs2);
1488         break;
1489     }
1490 }
1491
1492 static inline void gen_op_fcmpeq(int fccno)
1493 {
1494     switch (fccno) {
1495     case 0:
1496         gen_helper_fcmpeq(cpu_env);
1497         break;
1498     case 1:
1499         gen_helper_fcmpeq_fcc1(cpu_env);
1500         break;
1501     case 2:
1502         gen_helper_fcmpeq_fcc2(cpu_env);
1503         break;
1504     case 3:
1505         gen_helper_fcmpeq_fcc3(cpu_env);
1506         break;
1507     }
1508 }
1509
1510 #else
1511
1512 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1513 {
1514     gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1515 }
1516
1517 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1518 {
1519     gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1520 }
1521
1522 static inline void gen_op_fcmpq(int fccno)
1523 {
1524     gen_helper_fcmpq(cpu_env);
1525 }
1526
1527 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1528 {
1529     gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1530 }
1531
1532 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1533 {
1534     gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1535 }
1536
1537 static inline void gen_op_fcmpeq(int fccno)
1538 {
1539     gen_helper_fcmpeq(cpu_env);
1540 }
1541 #endif
1542
1543 static inline void gen_op_fpexception_im(int fsr_flags)
1544 {
1545     TCGv_i32 r_const;
1546
1547     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1548     tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1549     r_const = tcg_const_i32(TT_FP_EXCP);
1550     gen_helper_raise_exception(cpu_env, r_const);
1551     tcg_temp_free_i32(r_const);
1552 }
1553
1554 static int gen_trap_ifnofpu(DisasContext *dc)
1555 {
1556 #if !defined(CONFIG_USER_ONLY)
1557     if (!dc->fpu_enabled) {
1558         TCGv_i32 r_const;
1559
1560         save_state(dc);
1561         r_const = tcg_const_i32(TT_NFPU_INSN);
1562         gen_helper_raise_exception(cpu_env, r_const);
1563         tcg_temp_free_i32(r_const);
1564         dc->is_br = 1;
1565         return 1;
1566     }
1567 #endif
1568     return 0;
1569 }
1570
1571 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1572 {
1573     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1574 }
1575
1576 static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1577                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1578 {
1579     TCGv_i32 dst, src;
1580
1581     src = gen_load_fpr_F(dc, rs);
1582     dst = gen_dest_fpr_F();
1583
1584     gen(dst, cpu_env, src);
1585
1586     gen_store_fpr_F(dc, rd, dst);
1587 }
1588
1589 static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1590                                  void (*gen)(TCGv_i32, TCGv_i32))
1591 {
1592     TCGv_i32 dst, src;
1593
1594     src = gen_load_fpr_F(dc, rs);
1595     dst = gen_dest_fpr_F();
1596
1597     gen(dst, src);
1598
1599     gen_store_fpr_F(dc, rd, dst);
1600 }
1601
1602 static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1603                         void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1604 {
1605     TCGv_i32 dst, src1, src2;
1606
1607     src1 = gen_load_fpr_F(dc, rs1);
1608     src2 = gen_load_fpr_F(dc, rs2);
1609     dst = gen_dest_fpr_F();
1610
1611     gen(dst, cpu_env, src1, src2);
1612
1613     gen_store_fpr_F(dc, rd, dst);
1614 }
1615
1616 #ifdef TARGET_SPARC64
1617 static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1618                                   void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1619 {
1620     TCGv_i32 dst, src1, src2;
1621
1622     src1 = gen_load_fpr_F(dc, rs1);
1623     src2 = gen_load_fpr_F(dc, rs2);
1624     dst = gen_dest_fpr_F();
1625
1626     gen(dst, src1, src2);
1627
1628     gen_store_fpr_F(dc, rd, dst);
1629 }
1630 #endif
1631
1632 static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1633                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1634 {
1635     TCGv_i64 dst, src;
1636
1637     src = gen_load_fpr_D(dc, rs);
1638     dst = gen_dest_fpr_D();
1639
1640     gen(dst, cpu_env, src);
1641
1642     gen_store_fpr_D(dc, rd, dst);
1643 }
1644
1645 #ifdef TARGET_SPARC64
1646 static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1647                                  void (*gen)(TCGv_i64, TCGv_i64))
1648 {
1649     TCGv_i64 dst, src;
1650
1651     src = gen_load_fpr_D(dc, rs);
1652     dst = gen_dest_fpr_D();
1653
1654     gen(dst, src);
1655
1656     gen_store_fpr_D(dc, rd, dst);
1657 }
1658 #endif
1659
1660 static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1661                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1662 {
1663     TCGv_i64 dst, src1, src2;
1664
1665     src1 = gen_load_fpr_D(dc, rs1);
1666     src2 = gen_load_fpr_D(dc, rs2);
1667     dst = gen_dest_fpr_D();
1668
1669     gen(dst, cpu_env, src1, src2);
1670
1671     gen_store_fpr_D(dc, rd, dst);
1672 }
1673
1674 #ifdef TARGET_SPARC64
1675 static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1676                                   void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1677 {
1678     TCGv_i64 dst, src1, src2;
1679
1680     src1 = gen_load_fpr_D(dc, rs1);
1681     src2 = gen_load_fpr_D(dc, rs2);
1682     dst = gen_dest_fpr_D();
1683
1684     gen(dst, src1, src2);
1685
1686     gen_store_fpr_D(dc, rd, dst);
1687 }
1688
1689 static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1690                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1691 {
1692     TCGv_i64 dst, src1, src2;
1693
1694     src1 = gen_load_fpr_D(dc, rs1);
1695     src2 = gen_load_fpr_D(dc, rs2);
1696     dst = gen_dest_fpr_D();
1697
1698     gen(dst, cpu_gsr, src1, src2);
1699
1700     gen_store_fpr_D(dc, rd, dst);
1701 }
1702
1703 static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1704                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1705 {
1706     TCGv_i64 dst, src0, src1, src2;
1707
1708     src1 = gen_load_fpr_D(dc, rs1);
1709     src2 = gen_load_fpr_D(dc, rs2);
1710     src0 = gen_load_fpr_D(dc, rd);
1711     dst = gen_dest_fpr_D();
1712
1713     gen(dst, src0, src1, src2);
1714
1715     gen_store_fpr_D(dc, rd, dst);
1716 }
1717 #endif
1718
1719 static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1720                               void (*gen)(TCGv_ptr))
1721 {
1722     gen_op_load_fpr_QT1(QFPREG(rs));
1723
1724     gen(cpu_env);
1725
1726     gen_op_store_QT0_fpr(QFPREG(rd));
1727     gen_update_fprs_dirty(QFPREG(rd));
1728 }
1729
1730 #ifdef TARGET_SPARC64
1731 static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1732                                  void (*gen)(TCGv_ptr))
1733 {
1734     gen_op_load_fpr_QT1(QFPREG(rs));
1735
1736     gen(cpu_env);
1737
1738     gen_op_store_QT0_fpr(QFPREG(rd));
1739     gen_update_fprs_dirty(QFPREG(rd));
1740 }
1741 #endif
1742
1743 static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1744                                void (*gen)(TCGv_ptr))
1745 {
1746     gen_op_load_fpr_QT0(QFPREG(rs1));
1747     gen_op_load_fpr_QT1(QFPREG(rs2));
1748
1749     gen(cpu_env);
1750
1751     gen_op_store_QT0_fpr(QFPREG(rd));
1752     gen_update_fprs_dirty(QFPREG(rd));
1753 }
1754
1755 static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1756                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1757 {
1758     TCGv_i64 dst;
1759     TCGv_i32 src1, src2;
1760
1761     src1 = gen_load_fpr_F(dc, rs1);
1762     src2 = gen_load_fpr_F(dc, rs2);
1763     dst = gen_dest_fpr_D();
1764
1765     gen(dst, cpu_env, src1, src2);
1766
1767     gen_store_fpr_D(dc, rd, dst);
1768 }
1769
1770 static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1771                                void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1772 {
1773     TCGv_i64 src1, src2;
1774
1775     src1 = gen_load_fpr_D(dc, rs1);
1776     src2 = gen_load_fpr_D(dc, rs2);
1777
1778     gen(cpu_env, src1, src2);
1779
1780     gen_op_store_QT0_fpr(QFPREG(rd));
1781     gen_update_fprs_dirty(QFPREG(rd));
1782 }
1783
1784 #ifdef TARGET_SPARC64
1785 static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1786                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1787 {
1788     TCGv_i64 dst;
1789     TCGv_i32 src;
1790
1791     src = gen_load_fpr_F(dc, rs);
1792     dst = gen_dest_fpr_D();
1793
1794     gen(dst, cpu_env, src);
1795
1796     gen_store_fpr_D(dc, rd, dst);
1797 }
1798 #endif
1799
1800 static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1801                                  void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1802 {
1803     TCGv_i64 dst;
1804     TCGv_i32 src;
1805
1806     src = gen_load_fpr_F(dc, rs);
1807     dst = gen_dest_fpr_D();
1808
1809     gen(dst, cpu_env, src);
1810
1811     gen_store_fpr_D(dc, rd, dst);
1812 }
1813
1814 static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1815                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1816 {
1817     TCGv_i32 dst;
1818     TCGv_i64 src;
1819
1820     src = gen_load_fpr_D(dc, rs);
1821     dst = gen_dest_fpr_F();
1822
1823     gen(dst, cpu_env, src);
1824
1825     gen_store_fpr_F(dc, rd, dst);
1826 }
1827
1828 static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1829                               void (*gen)(TCGv_i32, TCGv_ptr))
1830 {
1831     TCGv_i32 dst;
1832
1833     gen_op_load_fpr_QT1(QFPREG(rs));
1834     dst = gen_dest_fpr_F();
1835
1836     gen(dst, cpu_env);
1837
1838     gen_store_fpr_F(dc, rd, dst);
1839 }
1840
1841 static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1842                               void (*gen)(TCGv_i64, TCGv_ptr))
1843 {
1844     TCGv_i64 dst;
1845
1846     gen_op_load_fpr_QT1(QFPREG(rs));
1847     dst = gen_dest_fpr_D();
1848
1849     gen(dst, cpu_env);
1850
1851     gen_store_fpr_D(dc, rd, dst);
1852 }
1853
1854 static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1855                                  void (*gen)(TCGv_ptr, TCGv_i32))
1856 {
1857     TCGv_i32 src;
1858
1859     src = gen_load_fpr_F(dc, rs);
1860
1861     gen(cpu_env, src);
1862
1863     gen_op_store_QT0_fpr(QFPREG(rd));
1864     gen_update_fprs_dirty(QFPREG(rd));
1865 }
1866
1867 static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1868                                  void (*gen)(TCGv_ptr, TCGv_i64))
1869 {
1870     TCGv_i64 src;
1871
1872     src = gen_load_fpr_D(dc, rs);
1873
1874     gen(cpu_env, src);
1875
1876     gen_op_store_QT0_fpr(QFPREG(rd));
1877     gen_update_fprs_dirty(QFPREG(rd));
1878 }
1879
1880 /* asi moves */
1881 #ifdef TARGET_SPARC64
1882 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1883 {
1884     int asi;
1885     TCGv_i32 r_asi;
1886
1887     if (IS_IMM) {
1888         r_asi = tcg_temp_new_i32();
1889         tcg_gen_mov_i32(r_asi, cpu_asi);
1890     } else {
1891         asi = GET_FIELD(insn, 19, 26);
1892         r_asi = tcg_const_i32(asi);
1893     }
1894     return r_asi;
1895 }
1896
1897 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1898                               int sign)
1899 {
1900     TCGv_i32 r_asi, r_size, r_sign;
1901
1902     r_asi = gen_get_asi(insn, addr);
1903     r_size = tcg_const_i32(size);
1904     r_sign = tcg_const_i32(sign);
1905     gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_size, r_sign);
1906     tcg_temp_free_i32(r_sign);
1907     tcg_temp_free_i32(r_size);
1908     tcg_temp_free_i32(r_asi);
1909 }
1910
1911 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1912 {
1913     TCGv_i32 r_asi, r_size;
1914
1915     r_asi = gen_get_asi(insn, addr);
1916     r_size = tcg_const_i32(size);
1917     gen_helper_st_asi(cpu_env, addr, src, r_asi, r_size);
1918     tcg_temp_free_i32(r_size);
1919     tcg_temp_free_i32(r_asi);
1920 }
1921
1922 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1923 {
1924     TCGv_i32 r_asi, r_size, r_rd;
1925
1926     r_asi = gen_get_asi(insn, addr);
1927     r_size = tcg_const_i32(size);
1928     r_rd = tcg_const_i32(rd);
1929     gen_helper_ldf_asi(cpu_env, addr, r_asi, r_size, r_rd);
1930     tcg_temp_free_i32(r_rd);
1931     tcg_temp_free_i32(r_size);
1932     tcg_temp_free_i32(r_asi);
1933 }
1934
1935 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1936 {
1937     TCGv_i32 r_asi, r_size, r_rd;
1938
1939     r_asi = gen_get_asi(insn, addr);
1940     r_size = tcg_const_i32(size);
1941     r_rd = tcg_const_i32(rd);
1942     gen_helper_stf_asi(cpu_env, addr, r_asi, r_size, r_rd);
1943     tcg_temp_free_i32(r_rd);
1944     tcg_temp_free_i32(r_size);
1945     tcg_temp_free_i32(r_asi);
1946 }
1947
1948 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1949 {
1950     TCGv_i32 r_asi, r_size, r_sign;
1951
1952     r_asi = gen_get_asi(insn, addr);
1953     r_size = tcg_const_i32(4);
1954     r_sign = tcg_const_i32(0);
1955     gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
1956     tcg_temp_free_i32(r_sign);
1957     gen_helper_st_asi(cpu_env, addr, dst, r_asi, r_size);
1958     tcg_temp_free_i32(r_size);
1959     tcg_temp_free_i32(r_asi);
1960     tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1961 }
1962
1963 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1964 {
1965     TCGv_i32 r_asi, r_rd;
1966
1967     r_asi = gen_get_asi(insn, addr);
1968     r_rd = tcg_const_i32(rd);
1969     gen_helper_ldda_asi(cpu_env, addr, r_asi, r_rd);
1970     tcg_temp_free_i32(r_rd);
1971     tcg_temp_free_i32(r_asi);
1972 }
1973
1974 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1975 {
1976     TCGv_i32 r_asi, r_size;
1977
1978     gen_movl_reg_TN(rd + 1, cpu_tmp0);
1979     tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1980     r_asi = gen_get_asi(insn, addr);
1981     r_size = tcg_const_i32(8);
1982     gen_helper_st_asi(cpu_env, addr, cpu_tmp64, r_asi, r_size);
1983     tcg_temp_free_i32(r_size);
1984     tcg_temp_free_i32(r_asi);
1985 }
1986
1987 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1988                                int rd)
1989 {
1990     TCGv r_val1;
1991     TCGv_i32 r_asi;
1992
1993     r_val1 = tcg_temp_new();
1994     gen_movl_reg_TN(rd, r_val1);
1995     r_asi = gen_get_asi(insn, addr);
1996     gen_helper_cas_asi(dst, cpu_env, addr, r_val1, val2, r_asi);
1997     tcg_temp_free_i32(r_asi);
1998     tcg_temp_free(r_val1);
1999 }
2000
2001 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
2002                                 int rd)
2003 {
2004     TCGv_i32 r_asi;
2005
2006     gen_movl_reg_TN(rd, cpu_tmp64);
2007     r_asi = gen_get_asi(insn, addr);
2008     gen_helper_casx_asi(dst, cpu_env, addr, cpu_tmp64, val2, r_asi);
2009     tcg_temp_free_i32(r_asi);
2010 }
2011
2012 #elif !defined(CONFIG_USER_ONLY)
2013
2014 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
2015                               int sign)
2016 {
2017     TCGv_i32 r_asi, r_size, r_sign;
2018
2019     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2020     r_size = tcg_const_i32(size);
2021     r_sign = tcg_const_i32(sign);
2022     gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2023     tcg_temp_free(r_sign);
2024     tcg_temp_free(r_size);
2025     tcg_temp_free(r_asi);
2026     tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2027 }
2028
2029 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2030 {
2031     TCGv_i32 r_asi, r_size;
2032
2033     tcg_gen_extu_tl_i64(cpu_tmp64, src);
2034     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2035     r_size = tcg_const_i32(size);
2036     gen_helper_st_asi(cpu_env, addr, cpu_tmp64, r_asi, r_size);
2037     tcg_temp_free(r_size);
2038     tcg_temp_free(r_asi);
2039 }
2040
2041 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
2042 {
2043     TCGv_i32 r_asi, r_size, r_sign;
2044     TCGv_i64 r_val;
2045
2046     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2047     r_size = tcg_const_i32(4);
2048     r_sign = tcg_const_i32(0);
2049     gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2050     tcg_temp_free(r_sign);
2051     r_val = tcg_temp_new_i64();
2052     tcg_gen_extu_tl_i64(r_val, dst);
2053     gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2054     tcg_temp_free_i64(r_val);
2055     tcg_temp_free(r_size);
2056     tcg_temp_free(r_asi);
2057     tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2058 }
2059
2060 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
2061 {
2062     TCGv_i32 r_asi, r_size, r_sign;
2063
2064     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2065     r_size = tcg_const_i32(8);
2066     r_sign = tcg_const_i32(0);
2067     gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2068     tcg_temp_free(r_sign);
2069     tcg_temp_free(r_size);
2070     tcg_temp_free(r_asi);
2071     tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
2072     gen_movl_TN_reg(rd + 1, cpu_tmp0);
2073     tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
2074     tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
2075     gen_movl_TN_reg(rd, hi);
2076 }
2077
2078 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
2079 {
2080     TCGv_i32 r_asi, r_size;
2081
2082     gen_movl_reg_TN(rd + 1, cpu_tmp0);
2083     tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
2084     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2085     r_size = tcg_const_i32(8);
2086     gen_helper_st_asi(cpu_env, addr, cpu_tmp64, r_asi, r_size);
2087     tcg_temp_free(r_size);
2088     tcg_temp_free(r_asi);
2089 }
2090 #endif
2091
2092 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2093 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
2094 {
2095     TCGv_i64 r_val;
2096     TCGv_i32 r_asi, r_size;
2097
2098     gen_ld_asi(dst, addr, insn, 1, 0);
2099
2100     r_val = tcg_const_i64(0xffULL);
2101     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2102     r_size = tcg_const_i32(1);
2103     gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2104     tcg_temp_free_i32(r_size);
2105     tcg_temp_free_i32(r_asi);
2106     tcg_temp_free_i64(r_val);
2107 }
2108 #endif
2109
2110 static inline TCGv get_src1(unsigned int insn, TCGv def)
2111 {
2112     TCGv r_rs1 = def;
2113     unsigned int rs1;
2114
2115     rs1 = GET_FIELD(insn, 13, 17);
2116     if (rs1 == 0) {
2117         tcg_gen_movi_tl(def, 0);
2118     } else if (rs1 < 8) {
2119         r_rs1 = cpu_gregs[rs1];
2120     } else {
2121         tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
2122     }
2123     return r_rs1;
2124 }
2125
2126 static inline TCGv get_src2(unsigned int insn, TCGv def)
2127 {
2128     TCGv r_rs2 = def;
2129
2130     if (IS_IMM) { /* immediate */
2131         target_long simm = GET_FIELDs(insn, 19, 31);
2132         tcg_gen_movi_tl(def, simm);
2133     } else { /* register */
2134         unsigned int rs2 = GET_FIELD(insn, 27, 31);
2135         if (rs2 == 0) {
2136             tcg_gen_movi_tl(def, 0);
2137         } else if (rs2 < 8) {
2138             r_rs2 = cpu_gregs[rs2];
2139         } else {
2140             tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
2141         }
2142     }
2143     return r_rs2;
2144 }
2145
2146 #ifdef TARGET_SPARC64
2147 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2148 {
2149     TCGv_i32 c32, zero, dst, s1, s2;
2150
2151     /* We have two choices here: extend the 32 bit data and use movcond_i64,
2152        or fold the comparison down to 32 bits and use movcond_i32.  Choose
2153        the later.  */
2154     c32 = tcg_temp_new_i32();
2155     if (cmp->is_bool) {
2156         tcg_gen_trunc_i64_i32(c32, cmp->c1);
2157     } else {
2158         TCGv_i64 c64 = tcg_temp_new_i64();
2159         tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2160         tcg_gen_trunc_i64_i32(c32, c64);
2161         tcg_temp_free_i64(c64);
2162     }
2163
2164     s1 = gen_load_fpr_F(dc, rs);
2165     s2 = gen_load_fpr_F(dc, rd);
2166     dst = gen_dest_fpr_F();
2167     zero = tcg_const_i32(0);
2168
2169     tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2170
2171     tcg_temp_free_i32(c32);
2172     tcg_temp_free_i32(zero);
2173     gen_store_fpr_F(dc, rd, dst);
2174 }
2175
2176 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2177 {
2178     TCGv_i64 dst = gen_dest_fpr_D();
2179     tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2180                         gen_load_fpr_D(dc, rs),
2181                         gen_load_fpr_D(dc, rd));
2182     gen_store_fpr_D(dc, rd, dst);
2183 }
2184
2185 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2186 {
2187     int qd = QFPREG(rd);
2188     int qs = QFPREG(rs);
2189
2190     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2191                         cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2192     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2193                         cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2194
2195     gen_update_fprs_dirty(qd);
2196 }
2197
2198 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
2199 {
2200     TCGv_i32 r_tl = tcg_temp_new_i32();
2201
2202     /* load env->tl into r_tl */
2203     tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2204
2205     /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2206     tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2207
2208     /* calculate offset to current trap state from env->ts, reuse r_tl */
2209     tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2210     tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2211
2212     /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2213     {
2214         TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2215         tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2216         tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2217         tcg_temp_free_ptr(r_tl_tmp);
2218     }
2219
2220     tcg_temp_free_i32(r_tl);
2221 }
2222
2223 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2224                      int width, bool cc, bool left)
2225 {
2226     TCGv lo1, lo2, t1, t2;
2227     uint64_t amask, tabl, tabr;
2228     int shift, imask, omask;
2229
2230     if (cc) {
2231         tcg_gen_mov_tl(cpu_cc_src, s1);
2232         tcg_gen_mov_tl(cpu_cc_src2, s2);
2233         tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2234         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2235         dc->cc_op = CC_OP_SUB;
2236     }
2237
2238     /* Theory of operation: there are two tables, left and right (not to
2239        be confused with the left and right versions of the opcode).  These
2240        are indexed by the low 3 bits of the inputs.  To make things "easy",
2241        these tables are loaded into two constants, TABL and TABR below.
2242        The operation index = (input & imask) << shift calculates the index
2243        into the constant, while val = (table >> index) & omask calculates
2244        the value we're looking for.  */
2245     switch (width) {
2246     case 8:
2247         imask = 0x7;
2248         shift = 3;
2249         omask = 0xff;
2250         if (left) {
2251             tabl = 0x80c0e0f0f8fcfeffULL;
2252             tabr = 0xff7f3f1f0f070301ULL;
2253         } else {
2254             tabl = 0x0103070f1f3f7fffULL;
2255             tabr = 0xfffefcf8f0e0c080ULL;
2256         }
2257         break;
2258     case 16:
2259         imask = 0x6;
2260         shift = 1;
2261         omask = 0xf;
2262         if (left) {
2263             tabl = 0x8cef;
2264             tabr = 0xf731;
2265         } else {
2266             tabl = 0x137f;
2267             tabr = 0xfec8;
2268         }
2269         break;
2270     case 32:
2271         imask = 0x4;
2272         shift = 0;
2273         omask = 0x3;
2274         if (left) {
2275             tabl = (2 << 2) | 3;
2276             tabr = (3 << 2) | 1;
2277         } else {
2278             tabl = (1 << 2) | 3;
2279             tabr = (3 << 2) | 2;
2280         }
2281         break;
2282     default:
2283         abort();
2284     }
2285
2286     lo1 = tcg_temp_new();
2287     lo2 = tcg_temp_new();
2288     tcg_gen_andi_tl(lo1, s1, imask);
2289     tcg_gen_andi_tl(lo2, s2, imask);
2290     tcg_gen_shli_tl(lo1, lo1, shift);
2291     tcg_gen_shli_tl(lo2, lo2, shift);
2292
2293     t1 = tcg_const_tl(tabl);
2294     t2 = tcg_const_tl(tabr);
2295     tcg_gen_shr_tl(lo1, t1, lo1);
2296     tcg_gen_shr_tl(lo2, t2, lo2);
2297     tcg_gen_andi_tl(dst, lo1, omask);
2298     tcg_gen_andi_tl(lo2, lo2, omask);
2299
2300     amask = -8;
2301     if (AM_CHECK(dc)) {
2302         amask &= 0xffffffffULL;
2303     }
2304     tcg_gen_andi_tl(s1, s1, amask);
2305     tcg_gen_andi_tl(s2, s2, amask);
2306
2307     /* We want to compute
2308         dst = (s1 == s2 ? lo1 : lo1 & lo2).
2309        We've already done dst = lo1, so this reduces to
2310         dst &= (s1 == s2 ? -1 : lo2)
2311        Which we perform by
2312         lo2 |= -(s1 == s2)
2313         dst &= lo2
2314     */
2315     tcg_gen_setcond_tl(TCG_COND_EQ, t1, s1, s2);
2316     tcg_gen_neg_tl(t1, t1);
2317     tcg_gen_or_tl(lo2, lo2, t1);
2318     tcg_gen_and_tl(dst, dst, lo2);
2319
2320     tcg_temp_free(lo1);
2321     tcg_temp_free(lo2);
2322     tcg_temp_free(t1);
2323     tcg_temp_free(t2);
2324 }
2325
2326 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2327 {
2328     TCGv tmp = tcg_temp_new();
2329
2330     tcg_gen_add_tl(tmp, s1, s2);
2331     tcg_gen_andi_tl(dst, tmp, -8);
2332     if (left) {
2333         tcg_gen_neg_tl(tmp, tmp);
2334     }
2335     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2336
2337     tcg_temp_free(tmp);
2338 }
2339
2340 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2341 {
2342     TCGv t1, t2, shift;
2343
2344     t1 = tcg_temp_new();
2345     t2 = tcg_temp_new();
2346     shift = tcg_temp_new();
2347
2348     tcg_gen_andi_tl(shift, gsr, 7);
2349     tcg_gen_shli_tl(shift, shift, 3);
2350     tcg_gen_shl_tl(t1, s1, shift);
2351
2352     /* A shift of 64 does not produce 0 in TCG.  Divide this into a
2353        shift of (up to 63) followed by a constant shift of 1.  */
2354     tcg_gen_xori_tl(shift, shift, 63);
2355     tcg_gen_shr_tl(t2, s2, shift);
2356     tcg_gen_shri_tl(t2, t2, 1);
2357
2358     tcg_gen_or_tl(dst, t1, t2);
2359
2360     tcg_temp_free(t1);
2361     tcg_temp_free(t2);
2362     tcg_temp_free(shift);
2363 }
2364 #endif
2365
2366 #define CHECK_IU_FEATURE(dc, FEATURE)                      \
2367     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2368         goto illegal_insn;
2369 #define CHECK_FPU_FEATURE(dc, FEATURE)                     \
2370     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2371         goto nfpu_insn;
2372
2373 /* before an instruction, dc->pc must be static */
2374 static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
2375 {
2376     unsigned int opc, rs1, rs2, rd;
2377     TCGv cpu_src1, cpu_src2, cpu_tmp1, cpu_tmp2;
2378     TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
2379     TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
2380     target_long simm;
2381
2382     if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP | CPU_LOG_TB_OP_OPT))) {
2383         tcg_gen_debug_insn_start(dc->pc);
2384     }
2385
2386     opc = GET_FIELD(insn, 0, 1);
2387
2388     rd = GET_FIELD(insn, 2, 6);
2389
2390     cpu_tmp1 = cpu_src1 = tcg_temp_new();
2391     cpu_tmp2 = cpu_src2 = tcg_temp_new();
2392
2393     switch (opc) {
2394     case 0:                     /* branches/sethi */
2395         {
2396             unsigned int xop = GET_FIELD(insn, 7, 9);
2397             int32_t target;
2398             switch (xop) {
2399 #ifdef TARGET_SPARC64
2400             case 0x1:           /* V9 BPcc */
2401                 {
2402                     int cc;
2403
2404                     target = GET_FIELD_SP(insn, 0, 18);
2405                     target = sign_extend(target, 19);
2406                     target <<= 2;
2407                     cc = GET_FIELD_SP(insn, 20, 21);
2408                     if (cc == 0)
2409                         do_branch(dc, target, insn, 0);
2410                     else if (cc == 2)
2411                         do_branch(dc, target, insn, 1);
2412                     else
2413                         goto illegal_insn;
2414                     goto jmp_insn;
2415                 }
2416             case 0x3:           /* V9 BPr */
2417                 {
2418                     target = GET_FIELD_SP(insn, 0, 13) |
2419                         (GET_FIELD_SP(insn, 20, 21) << 14);
2420                     target = sign_extend(target, 16);
2421                     target <<= 2;
2422                     cpu_src1 = get_src1(insn, cpu_src1);
2423                     do_branch_reg(dc, target, insn, cpu_src1);
2424                     goto jmp_insn;
2425                 }
2426             case 0x5:           /* V9 FBPcc */
2427                 {
2428                     int cc = GET_FIELD_SP(insn, 20, 21);
2429                     if (gen_trap_ifnofpu(dc)) {
2430                         goto jmp_insn;
2431                     }
2432                     target = GET_FIELD_SP(insn, 0, 18);
2433                     target = sign_extend(target, 19);
2434                     target <<= 2;
2435                     do_fbranch(dc, target, insn, cc);
2436                     goto jmp_insn;
2437                 }
2438 #else
2439             case 0x7:           /* CBN+x */
2440                 {
2441                     goto ncp_insn;
2442                 }
2443 #endif
2444             case 0x2:           /* BN+x */
2445                 {
2446                     target = GET_FIELD(insn, 10, 31);
2447                     target = sign_extend(target, 22);
2448                     target <<= 2;
2449                     do_branch(dc, target, insn, 0);
2450                     goto jmp_insn;
2451                 }
2452             case 0x6:           /* FBN+x */
2453                 {
2454                     if (gen_trap_ifnofpu(dc)) {
2455                         goto jmp_insn;
2456                     }
2457                     target = GET_FIELD(insn, 10, 31);
2458                     target = sign_extend(target, 22);
2459                     target <<= 2;
2460                     do_fbranch(dc, target, insn, 0);
2461                     goto jmp_insn;
2462                 }
2463             case 0x4:           /* SETHI */
2464                 if (rd) { // nop
2465                     uint32_t value = GET_FIELD(insn, 10, 31);
2466                     TCGv r_const;
2467
2468                     r_const = tcg_const_tl(value << 10);
2469                     gen_movl_TN_reg(rd, r_const);
2470                     tcg_temp_free(r_const);
2471                 }
2472                 break;
2473             case 0x0:           /* UNIMPL */
2474             default:
2475                 goto illegal_insn;
2476             }
2477             break;
2478         }
2479         break;
2480     case 1:                     /*CALL*/
2481         {
2482             target_long target = GET_FIELDs(insn, 2, 31) << 2;
2483             TCGv r_const;
2484
2485             r_const = tcg_const_tl(dc->pc);
2486             gen_movl_TN_reg(15, r_const);
2487             tcg_temp_free(r_const);
2488             target += dc->pc;
2489             gen_mov_pc_npc(dc);
2490 #ifdef TARGET_SPARC64
2491             if (unlikely(AM_CHECK(dc))) {
2492                 target &= 0xffffffffULL;
2493             }
2494 #endif
2495             dc->npc = target;
2496         }
2497         goto jmp_insn;
2498     case 2:                     /* FPU & Logical Operations */
2499         {
2500             unsigned int xop = GET_FIELD(insn, 7, 12);
2501             if (xop == 0x3a) {  /* generate trap */
2502                 int cond = GET_FIELD(insn, 3, 6);
2503                 TCGv_i32 trap;
2504                 int l1 = -1, mask;
2505
2506                 if (cond == 0) {
2507                     /* Trap never.  */
2508                     break;
2509                 }
2510
2511                 save_state(dc);
2512
2513                 if (cond != 8) {
2514                     /* Conditional trap.  */
2515                     DisasCompare cmp;
2516 #ifdef TARGET_SPARC64
2517                     /* V9 icc/xcc */
2518                     int cc = GET_FIELD_SP(insn, 11, 12);
2519                     if (cc == 0) {
2520                         gen_compare(&cmp, 0, cond, dc);
2521                     } else if (cc == 2) {
2522                         gen_compare(&cmp, 1, cond, dc);
2523                     } else {
2524                         goto illegal_insn;
2525                     }
2526 #else
2527                     gen_compare(&cmp, 0, cond, dc);
2528 #endif
2529                     l1 = gen_new_label();
2530                     tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
2531                                       cmp.c1, cmp.c2, l1);
2532                     free_compare(&cmp);
2533                 }
2534
2535                 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
2536                         ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
2537
2538                 /* Don't use the normal temporaries, as they may well have
2539                    gone out of scope with the branch above.  While we're
2540                    doing that we might as well pre-truncate to 32-bit.  */
2541                 trap = tcg_temp_new_i32();
2542
2543                 rs1 = GET_FIELD_SP(insn, 14, 18);
2544                 if (IS_IMM) {
2545                     rs2 = GET_FIELD_SP(insn, 0, 6);
2546                     if (rs1 == 0) {
2547                         tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
2548                         /* Signal that the trap value is fully constant.  */
2549                         mask = 0;
2550                     } else {
2551                         TCGv t1 = tcg_temp_new();
2552                         gen_movl_reg_TN(rs1, t1);
2553                         tcg_gen_trunc_tl_i32(trap, t1);
2554                         tcg_temp_free(t1);
2555                         tcg_gen_addi_i32(trap, trap, rs2);
2556                     }
2557                 } else {
2558                     TCGv t1 = tcg_temp_new();
2559                     TCGv t2 = tcg_temp_new();
2560                     rs2 = GET_FIELD_SP(insn, 0, 4);
2561                     gen_movl_reg_TN(rs1, t1);
2562                     gen_movl_reg_TN(rs2, t2);
2563                     tcg_gen_add_tl(t1, t1, t2);
2564                     tcg_gen_trunc_tl_i32(trap, t1);
2565                     tcg_temp_free(t1);
2566                     tcg_temp_free(t2);
2567                 }
2568                 if (mask != 0) {
2569                     tcg_gen_andi_i32(trap, trap, mask);
2570                     tcg_gen_addi_i32(trap, trap, TT_TRAP);
2571                 }
2572
2573                 gen_helper_raise_exception(cpu_env, trap);
2574                 tcg_temp_free_i32(trap);
2575
2576                 if (cond == 8) {
2577                     /* An unconditional trap ends the TB.  */
2578                     dc->is_br = 1;
2579                     goto jmp_insn;
2580                 } else {
2581                     /* A conditional trap falls through to the next insn.  */
2582                     gen_set_label(l1);
2583                     break;
2584                 }
2585             } else if (xop == 0x28) {
2586                 rs1 = GET_FIELD(insn, 13, 17);
2587                 switch(rs1) {
2588                 case 0: /* rdy */
2589 #ifndef TARGET_SPARC64
2590                 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2591                                        manual, rdy on the microSPARC
2592                                        II */
2593                 case 0x0f:          /* stbar in the SPARCv8 manual,
2594                                        rdy on the microSPARC II */
2595                 case 0x10 ... 0x1f: /* implementation-dependent in the
2596                                        SPARCv8 manual, rdy on the
2597                                        microSPARC II */
2598                     /* Read Asr17 */
2599                     if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2600                         TCGv r_const;
2601
2602                         /* Read Asr17 for a Leon3 monoprocessor */
2603                         r_const = tcg_const_tl((1 << 8)
2604                                                | (dc->def->nwindows - 1));
2605                         gen_movl_TN_reg(rd, r_const);
2606                         tcg_temp_free(r_const);
2607                         break;
2608                     }
2609 #endif
2610                     gen_movl_TN_reg(rd, cpu_y);
2611                     break;
2612 #ifdef TARGET_SPARC64
2613                 case 0x2: /* V9 rdccr */
2614                     gen_helper_compute_psr(cpu_env);
2615                     gen_helper_rdccr(cpu_dst, cpu_env);
2616                     gen_movl_TN_reg(rd, cpu_dst);
2617                     break;
2618                 case 0x3: /* V9 rdasi */
2619                     tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2620                     gen_movl_TN_reg(rd, cpu_dst);
2621                     break;
2622                 case 0x4: /* V9 rdtick */
2623                     {
2624                         TCGv_ptr r_tickptr;
2625
2626                         r_tickptr = tcg_temp_new_ptr();
2627                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2628                                        offsetof(CPUSPARCState, tick));
2629                         gen_helper_tick_get_count(cpu_dst, r_tickptr);
2630                         tcg_temp_free_ptr(r_tickptr);
2631                         gen_movl_TN_reg(rd, cpu_dst);
2632                     }
2633                     break;
2634                 case 0x5: /* V9 rdpc */
2635                     {
2636                         TCGv r_const;
2637
2638                         if (unlikely(AM_CHECK(dc))) {
2639                             r_const = tcg_const_tl(dc->pc & 0xffffffffULL);
2640                         } else {
2641                            r_const = tcg_const_tl(dc->pc);
2642                         }
2643                         gen_movl_TN_reg(rd, r_const);
2644                         tcg_temp_free(r_const);
2645                     }
2646                     break;
2647                 case 0x6: /* V9 rdfprs */
2648                     tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2649                     gen_movl_TN_reg(rd, cpu_dst);
2650                     break;
2651                 case 0xf: /* V9 membar */
2652                     break; /* no effect */
2653                 case 0x13: /* Graphics Status */
2654                     if (gen_trap_ifnofpu(dc)) {
2655                         goto jmp_insn;
2656                     }
2657                     gen_movl_TN_reg(rd, cpu_gsr);
2658                     break;
2659                 case 0x16: /* Softint */
2660                     tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2661                     gen_movl_TN_reg(rd, cpu_dst);
2662                     break;
2663                 case 0x17: /* Tick compare */
2664                     gen_movl_TN_reg(rd, cpu_tick_cmpr);
2665                     break;
2666                 case 0x18: /* System tick */
2667                     {
2668                         TCGv_ptr r_tickptr;
2669
2670                         r_tickptr = tcg_temp_new_ptr();
2671                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2672                                        offsetof(CPUSPARCState, stick));
2673                         gen_helper_tick_get_count(cpu_dst, r_tickptr);
2674                         tcg_temp_free_ptr(r_tickptr);
2675                         gen_movl_TN_reg(rd, cpu_dst);
2676                     }
2677                     break;
2678                 case 0x19: /* System tick compare */
2679                     gen_movl_TN_reg(rd, cpu_stick_cmpr);
2680                     break;
2681                 case 0x10: /* Performance Control */
2682                 case 0x11: /* Performance Instrumentation Counter */
2683                 case 0x12: /* Dispatch Control */
2684                 case 0x14: /* Softint set, WO */
2685                 case 0x15: /* Softint clear, WO */
2686 #endif
2687                 default:
2688                     goto illegal_insn;
2689                 }
2690 #if !defined(CONFIG_USER_ONLY)
2691             } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2692 #ifndef TARGET_SPARC64
2693                 if (!supervisor(dc))
2694                     goto priv_insn;
2695                 gen_helper_compute_psr(cpu_env);
2696                 dc->cc_op = CC_OP_FLAGS;
2697                 gen_helper_rdpsr(cpu_dst, cpu_env);
2698 #else
2699                 CHECK_IU_FEATURE(dc, HYPV);
2700                 if (!hypervisor(dc))
2701                     goto priv_insn;
2702                 rs1 = GET_FIELD(insn, 13, 17);
2703                 switch (rs1) {
2704                 case 0: // hpstate
2705                     // gen_op_rdhpstate();
2706                     break;
2707                 case 1: // htstate
2708                     // gen_op_rdhtstate();
2709                     break;
2710                 case 3: // hintp
2711                     tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2712                     break;
2713                 case 5: // htba
2714                     tcg_gen_mov_tl(cpu_dst, cpu_htba);
2715                     break;
2716                 case 6: // hver
2717                     tcg_gen_mov_tl(cpu_dst, cpu_hver);
2718                     break;
2719                 case 31: // hstick_cmpr
2720                     tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2721                     break;
2722                 default:
2723                     goto illegal_insn;
2724                 }
2725 #endif
2726                 gen_movl_TN_reg(rd, cpu_dst);
2727                 break;
2728             } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2729                 if (!supervisor(dc))
2730                     goto priv_insn;
2731 #ifdef TARGET_SPARC64
2732                 rs1 = GET_FIELD(insn, 13, 17);
2733                 switch (rs1) {
2734                 case 0: // tpc
2735                     {
2736                         TCGv_ptr r_tsptr;
2737
2738                         r_tsptr = tcg_temp_new_ptr();
2739                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2740                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2741                                       offsetof(trap_state, tpc));
2742                         tcg_temp_free_ptr(r_tsptr);
2743                     }
2744                     break;
2745                 case 1: // tnpc
2746                     {
2747                         TCGv_ptr r_tsptr;
2748
2749                         r_tsptr = tcg_temp_new_ptr();
2750                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2751                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2752                                       offsetof(trap_state, tnpc));
2753                         tcg_temp_free_ptr(r_tsptr);
2754                     }
2755                     break;
2756                 case 2: // tstate
2757                     {
2758                         TCGv_ptr r_tsptr;
2759
2760                         r_tsptr = tcg_temp_new_ptr();
2761                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2762                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2763                                       offsetof(trap_state, tstate));
2764                         tcg_temp_free_ptr(r_tsptr);
2765                     }
2766                     break;
2767                 case 3: // tt
2768                     {
2769                         TCGv_ptr r_tsptr;
2770
2771                         r_tsptr = tcg_temp_new_ptr();
2772                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2773                         tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2774                                        offsetof(trap_state, tt));
2775                         tcg_temp_free_ptr(r_tsptr);
2776                         tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2777                     }
2778                     break;
2779                 case 4: // tick
2780                     {
2781                         TCGv_ptr r_tickptr;
2782
2783                         r_tickptr = tcg_temp_new_ptr();
2784                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2785                                        offsetof(CPUSPARCState, tick));
2786                         gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2787                         gen_movl_TN_reg(rd, cpu_tmp0);
2788                         tcg_temp_free_ptr(r_tickptr);
2789                     }
2790                     break;
2791                 case 5: // tba
2792                     tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2793                     break;
2794                 case 6: // pstate
2795                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2796                                    offsetof(CPUSPARCState, pstate));
2797                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2798                     break;
2799                 case 7: // tl
2800                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2801                                    offsetof(CPUSPARCState, tl));
2802                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2803                     break;
2804                 case 8: // pil
2805                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2806                                    offsetof(CPUSPARCState, psrpil));
2807                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2808                     break;
2809                 case 9: // cwp
2810                     gen_helper_rdcwp(cpu_tmp0, cpu_env);
2811                     break;
2812                 case 10: // cansave
2813                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2814                                    offsetof(CPUSPARCState, cansave));
2815                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2816                     break;
2817                 case 11: // canrestore
2818                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2819                                    offsetof(CPUSPARCState, canrestore));
2820                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2821                     break;
2822                 case 12: // cleanwin
2823                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2824                                    offsetof(CPUSPARCState, cleanwin));
2825                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2826                     break;
2827                 case 13: // otherwin
2828                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2829                                    offsetof(CPUSPARCState, otherwin));
2830                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2831                     break;
2832                 case 14: // wstate
2833                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2834                                    offsetof(CPUSPARCState, wstate));
2835                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2836                     break;
2837                 case 16: // UA2005 gl
2838                     CHECK_IU_FEATURE(dc, GL);
2839                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2840                                    offsetof(CPUSPARCState, gl));
2841                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2842                     break;
2843                 case 26: // UA2005 strand status
2844                     CHECK_IU_FEATURE(dc, HYPV);
2845                     if (!hypervisor(dc))
2846                         goto priv_insn;
2847                     tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2848                     break;
2849                 case 31: // ver
2850                     tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2851                     break;
2852                 case 15: // fq
2853                 default:
2854                     goto illegal_insn;
2855                 }
2856 #else
2857                 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2858 #endif
2859                 gen_movl_TN_reg(rd, cpu_tmp0);
2860                 break;
2861             } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2862 #ifdef TARGET_SPARC64
2863                 save_state(dc);
2864                 gen_helper_flushw(cpu_env);
2865 #else
2866                 if (!supervisor(dc))
2867                     goto priv_insn;
2868                 gen_movl_TN_reg(rd, cpu_tbr);
2869 #endif
2870                 break;
2871 #endif
2872             } else if (xop == 0x34) {   /* FPU Operations */
2873                 if (gen_trap_ifnofpu(dc)) {
2874                     goto jmp_insn;
2875                 }
2876                 gen_op_clear_ieee_excp_and_FTT();
2877                 rs1 = GET_FIELD(insn, 13, 17);
2878                 rs2 = GET_FIELD(insn, 27, 31);
2879                 xop = GET_FIELD(insn, 18, 26);
2880                 save_state(dc);
2881                 switch (xop) {
2882                 case 0x1: /* fmovs */
2883                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2884                     gen_store_fpr_F(dc, rd, cpu_src1_32);
2885                     break;
2886                 case 0x5: /* fnegs */
2887                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
2888                     break;
2889                 case 0x9: /* fabss */
2890                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
2891                     break;
2892                 case 0x29: /* fsqrts */
2893                     CHECK_FPU_FEATURE(dc, FSQRT);
2894                     gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
2895                     break;
2896                 case 0x2a: /* fsqrtd */
2897                     CHECK_FPU_FEATURE(dc, FSQRT);
2898                     gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
2899                     break;
2900                 case 0x2b: /* fsqrtq */
2901                     CHECK_FPU_FEATURE(dc, FLOAT128);
2902                     gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
2903                     break;
2904                 case 0x41: /* fadds */
2905                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
2906                     break;
2907                 case 0x42: /* faddd */
2908                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
2909                     break;
2910                 case 0x43: /* faddq */
2911                     CHECK_FPU_FEATURE(dc, FLOAT128);
2912                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
2913                     break;
2914                 case 0x45: /* fsubs */
2915                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
2916                     break;
2917                 case 0x46: /* fsubd */
2918                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
2919                     break;
2920                 case 0x47: /* fsubq */
2921                     CHECK_FPU_FEATURE(dc, FLOAT128);
2922                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
2923                     break;
2924                 case 0x49: /* fmuls */
2925                     CHECK_FPU_FEATURE(dc, FMUL);
2926                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
2927                     break;
2928                 case 0x4a: /* fmuld */
2929                     CHECK_FPU_FEATURE(dc, FMUL);
2930                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
2931                     break;
2932                 case 0x4b: /* fmulq */
2933                     CHECK_FPU_FEATURE(dc, FLOAT128);
2934                     CHECK_FPU_FEATURE(dc, FMUL);
2935                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
2936                     break;
2937                 case 0x4d: /* fdivs */
2938                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
2939                     break;
2940                 case 0x4e: /* fdivd */
2941                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
2942                     break;
2943                 case 0x4f: /* fdivq */
2944                     CHECK_FPU_FEATURE(dc, FLOAT128);
2945                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
2946                     break;
2947                 case 0x69: /* fsmuld */
2948                     CHECK_FPU_FEATURE(dc, FSMULD);
2949                     gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
2950                     break;
2951                 case 0x6e: /* fdmulq */
2952                     CHECK_FPU_FEATURE(dc, FLOAT128);
2953                     gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
2954                     break;
2955                 case 0xc4: /* fitos */
2956                     gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
2957                     break;
2958                 case 0xc6: /* fdtos */
2959                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
2960                     break;
2961                 case 0xc7: /* fqtos */
2962                     CHECK_FPU_FEATURE(dc, FLOAT128);
2963                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
2964                     break;
2965                 case 0xc8: /* fitod */
2966                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
2967                     break;
2968                 case 0xc9: /* fstod */
2969                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
2970                     break;
2971                 case 0xcb: /* fqtod */
2972                     CHECK_FPU_FEATURE(dc, FLOAT128);
2973                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
2974                     break;
2975                 case 0xcc: /* fitoq */
2976                     CHECK_FPU_FEATURE(dc, FLOAT128);
2977                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
2978                     break;
2979                 case 0xcd: /* fstoq */
2980                     CHECK_FPU_FEATURE(dc, FLOAT128);
2981                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
2982                     break;
2983                 case 0xce: /* fdtoq */
2984                     CHECK_FPU_FEATURE(dc, FLOAT128);
2985                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
2986                     break;
2987                 case 0xd1: /* fstoi */
2988                     gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
2989                     break;
2990                 case 0xd2: /* fdtoi */
2991                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
2992                     break;
2993                 case 0xd3: /* fqtoi */
2994                     CHECK_FPU_FEATURE(dc, FLOAT128);
2995                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
2996                     break;
2997 #ifdef TARGET_SPARC64
2998                 case 0x2: /* V9 fmovd */
2999                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3000                     gen_store_fpr_D(dc, rd, cpu_src1_64);
3001                     break;
3002                 case 0x3: /* V9 fmovq */
3003                     CHECK_FPU_FEATURE(dc, FLOAT128);
3004                     gen_move_Q(rd, rs2);
3005                     break;
3006                 case 0x6: /* V9 fnegd */
3007                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3008                     break;
3009                 case 0x7: /* V9 fnegq */
3010                     CHECK_FPU_FEATURE(dc, FLOAT128);
3011                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3012                     break;
3013                 case 0xa: /* V9 fabsd */
3014                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3015                     break;
3016                 case 0xb: /* V9 fabsq */
3017                     CHECK_FPU_FEATURE(dc, FLOAT128);
3018                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3019                     break;
3020                 case 0x81: /* V9 fstox */
3021                     gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3022                     break;
3023                 case 0x82: /* V9 fdtox */
3024                     gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3025                     break;
3026                 case 0x83: /* V9 fqtox */
3027                     CHECK_FPU_FEATURE(dc, FLOAT128);
3028                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3029                     break;
3030                 case 0x84: /* V9 fxtos */
3031                     gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3032                     break;
3033                 case 0x88: /* V9 fxtod */
3034                     gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3035                     break;
3036                 case 0x8c: /* V9 fxtoq */
3037                     CHECK_FPU_FEATURE(dc, FLOAT128);
3038                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3039                     break;
3040 #endif
3041                 default:
3042                     goto illegal_insn;
3043                 }
3044             } else if (xop == 0x35) {   /* FPU Operations */
3045 #ifdef TARGET_SPARC64
3046                 int cond;
3047 #endif
3048                 if (gen_trap_ifnofpu(dc)) {
3049                     goto jmp_insn;
3050                 }
3051                 gen_op_clear_ieee_excp_and_FTT();
3052                 rs1 = GET_FIELD(insn, 13, 17);
3053                 rs2 = GET_FIELD(insn, 27, 31);
3054                 xop = GET_FIELD(insn, 18, 26);
3055                 save_state(dc);
3056
3057 #ifdef TARGET_SPARC64
3058 #define FMOVR(sz)                                                  \
3059                 do {                                               \
3060                     DisasCompare cmp;                              \
3061                     cond = GET_FIELD_SP(insn, 14, 17);             \
3062                     cpu_src1 = get_src1(insn, cpu_src1);           \
3063                     gen_compare_reg(&cmp, cond, cpu_src1);         \
3064                     gen_fmov##sz(dc, &cmp, rd, rs2);               \
3065                     free_compare(&cmp);                            \
3066                 } while (0)
3067
3068                 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3069                     FMOVR(s);
3070                     break;
3071                 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3072                     FMOVR(d);
3073                     break;
3074                 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3075                     CHECK_FPU_FEATURE(dc, FLOAT128);
3076                     FMOVR(q);
3077                     break;
3078                 }
3079 #undef FMOVR
3080 #endif
3081                 switch (xop) {
3082 #ifdef TARGET_SPARC64
3083 #define FMOVCC(fcc, sz)                                                 \
3084                     do {                                                \
3085                         DisasCompare cmp;                               \
3086                         cond = GET_FIELD_SP(insn, 14, 17);              \
3087                         gen_fcompare(&cmp, fcc, cond);                  \
3088                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3089                         free_compare(&cmp);                             \
3090                     } while (0)
3091
3092                     case 0x001: /* V9 fmovscc %fcc0 */
3093                         FMOVCC(0, s);
3094                         break;
3095                     case 0x002: /* V9 fmovdcc %fcc0 */
3096                         FMOVCC(0, d);
3097                         break;
3098                     case 0x003: /* V9 fmovqcc %fcc0 */
3099                         CHECK_FPU_FEATURE(dc, FLOAT128);
3100                         FMOVCC(0, q);
3101                         break;
3102                     case 0x041: /* V9 fmovscc %fcc1 */
3103                         FMOVCC(1, s);
3104                         break;
3105                     case 0x042: /* V9 fmovdcc %fcc1 */
3106                         FMOVCC(1, d);
3107                         break;
3108                     case 0x043: /* V9 fmovqcc %fcc1 */
3109                         CHECK_FPU_FEATURE(dc, FLOAT128);
3110                         FMOVCC(1, q);
3111                         break;
3112                     case 0x081: /* V9 fmovscc %fcc2 */
3113                         FMOVCC(2, s);
3114                         break;
3115                     case 0x082: /* V9 fmovdcc %fcc2 */
3116                         FMOVCC(2, d);
3117                         break;
3118                     case 0x083: /* V9 fmovqcc %fcc2 */
3119                         CHECK_FPU_FEATURE(dc, FLOAT128);
3120                         FMOVCC(2, q);
3121                         break;
3122                     case 0x0c1: /* V9 fmovscc %fcc3 */
3123                         FMOVCC(3, s);
3124                         break;
3125                     case 0x0c2: /* V9 fmovdcc %fcc3 */
3126                         FMOVCC(3, d);
3127                         break;
3128                     case 0x0c3: /* V9 fmovqcc %fcc3 */
3129                         CHECK_FPU_FEATURE(dc, FLOAT128);
3130                         FMOVCC(3, q);
3131                         break;
3132 #undef FMOVCC
3133 #define FMOVCC(xcc, sz)                                                 \
3134                     do {                                                \
3135                         DisasCompare cmp;                               \
3136                         cond = GET_FIELD_SP(insn, 14, 17);              \
3137                         gen_compare(&cmp, xcc, cond, dc);               \
3138                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3139                         free_compare(&cmp);                             \
3140                     } while (0)
3141
3142                     case 0x101: /* V9 fmovscc %icc */
3143                         FMOVCC(0, s);
3144                         break;
3145                     case 0x102: /* V9 fmovdcc %icc */
3146                         FMOVCC(0, d);
3147                         break;
3148                     case 0x103: /* V9 fmovqcc %icc */
3149                         CHECK_FPU_FEATURE(dc, FLOAT128);
3150                         FMOVCC(0, q);
3151                         break;
3152                     case 0x181: /* V9 fmovscc %xcc */
3153                         FMOVCC(1, s);
3154                         break;
3155                     case 0x182: /* V9 fmovdcc %xcc */
3156                         FMOVCC(1, d);
3157                         break;
3158                     case 0x183: /* V9 fmovqcc %xcc */
3159                         CHECK_FPU_FEATURE(dc, FLOAT128);
3160                         FMOVCC(1, q);
3161                         break;
3162 #undef FMOVCC
3163 #endif
3164                     case 0x51: /* fcmps, V9 %fcc */
3165                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3166                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3167                         gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3168                         break;
3169                     case 0x52: /* fcmpd, V9 %fcc */
3170                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3171                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3172                         gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3173                         break;
3174                     case 0x53: /* fcmpq, V9 %fcc */
3175                         CHECK_FPU_FEATURE(dc, FLOAT128);
3176                         gen_op_load_fpr_QT0(QFPREG(rs1));
3177                         gen_op_load_fpr_QT1(QFPREG(rs2));
3178                         gen_op_fcmpq(rd & 3);
3179                         break;
3180                     case 0x55: /* fcmpes, V9 %fcc */
3181                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3182                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3183                         gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3184                         break;
3185                     case 0x56: /* fcmped, V9 %fcc */
3186                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3187                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3188                         gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3189                         break;
3190                     case 0x57: /* fcmpeq, V9 %fcc */
3191                         CHECK_FPU_FEATURE(dc, FLOAT128);
3192                         gen_op_load_fpr_QT0(QFPREG(rs1));
3193                         gen_op_load_fpr_QT1(QFPREG(rs2));
3194                         gen_op_fcmpeq(rd & 3);
3195                         break;
3196                     default:
3197                         goto illegal_insn;
3198                 }
3199             } else if (xop == 0x2) {
3200                 // clr/mov shortcut
3201
3202                 rs1 = GET_FIELD(insn, 13, 17);
3203                 if (rs1 == 0) {
3204                     // or %g0, x, y -> mov T0, x; mov y, T0
3205                     if (IS_IMM) {       /* immediate */
3206                         TCGv r_const;
3207
3208                         simm = GET_FIELDs(insn, 19, 31);
3209                         r_const = tcg_const_tl(simm);
3210                         gen_movl_TN_reg(rd, r_const);
3211                         tcg_temp_free(r_const);
3212                     } else {            /* register */
3213                         rs2 = GET_FIELD(insn, 27, 31);
3214                         gen_movl_reg_TN(rs2, cpu_dst);
3215                         gen_movl_TN_reg(rd, cpu_dst);
3216                     }
3217                 } else {
3218                     cpu_src1 = get_src1(insn, cpu_src1);
3219                     if (IS_IMM) {       /* immediate */
3220                         simm = GET_FIELDs(insn, 19, 31);
3221                         tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3222                         gen_movl_TN_reg(rd, cpu_dst);
3223                     } else {            /* register */
3224                         // or x, %g0, y -> mov T1, x; mov y, T1
3225                         rs2 = GET_FIELD(insn, 27, 31);
3226                         if (rs2 != 0) {
3227                             gen_movl_reg_TN(rs2, cpu_src2);
3228                             tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3229                             gen_movl_TN_reg(rd, cpu_dst);
3230                         } else
3231                             gen_movl_TN_reg(rd, cpu_src1);
3232                     }
3233                 }
3234 #ifdef TARGET_SPARC64
3235             } else if (xop == 0x25) { /* sll, V9 sllx */
3236                 cpu_src1 = get_src1(insn, cpu_src1);
3237                 if (IS_IMM) {   /* immediate */
3238                     simm = GET_FIELDs(insn, 20, 31);
3239                     if (insn & (1 << 12)) {
3240                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3241                     } else {
3242                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3243                     }
3244                 } else {                /* register */
3245                     rs2 = GET_FIELD(insn, 27, 31);
3246                     gen_movl_reg_TN(rs2, cpu_src2);
3247                     if (insn & (1 << 12)) {
3248                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3249                     } else {
3250                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3251                     }
3252                     tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3253                 }
3254                 gen_movl_TN_reg(rd, cpu_dst);
3255             } else if (xop == 0x26) { /* srl, V9 srlx */
3256                 cpu_src1 = get_src1(insn, cpu_src1);
3257                 if (IS_IMM) {   /* immediate */
3258                     simm = GET_FIELDs(insn, 20, 31);
3259                     if (insn & (1 << 12)) {
3260                         tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3261                     } else {
3262                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3263                         tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3264                     }
3265                 } else {                /* register */
3266                     rs2 = GET_FIELD(insn, 27, 31);
3267                     gen_movl_reg_TN(rs2, cpu_src2);
3268                     if (insn & (1 << 12)) {
3269                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3270                         tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3271                     } else {
3272                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3273                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3274                         tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3275                     }
3276                 }
3277                 gen_movl_TN_reg(rd, cpu_dst);
3278             } else if (xop == 0x27) { /* sra, V9 srax */
3279                 cpu_src1 = get_src1(insn, cpu_src1);
3280                 if (IS_IMM) {   /* immediate */
3281                     simm = GET_FIELDs(insn, 20, 31);
3282                     if (insn & (1 << 12)) {
3283                         tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3284                     } else {
3285                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3286                         tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3287                         tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3288                     }
3289                 } else {                /* register */
3290                     rs2 = GET_FIELD(insn, 27, 31);
3291                     gen_movl_reg_TN(rs2, cpu_src2);
3292                     if (insn & (1 << 12)) {
3293                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3294                         tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3295                     } else {
3296                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3297                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3298                         tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3299                         tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3300                     }
3301                 }
3302                 gen_movl_TN_reg(rd, cpu_dst);
3303 #endif
3304             } else if (xop < 0x36) {
3305                 if (xop < 0x20) {
3306                     cpu_src1 = get_src1(insn, cpu_src1);
3307                     cpu_src2 = get_src2(insn, cpu_src2);
3308                     switch (xop & ~0x10) {
3309                     case 0x0: /* add */
3310                         if (IS_IMM) {
3311                             simm = GET_FIELDs(insn, 19, 31);
3312                             if (xop & 0x10) {
3313                                 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3314                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3315                                 dc->cc_op = CC_OP_ADD;
3316                             } else {
3317                                 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3318                             }
3319                         } else {
3320                             if (xop & 0x10) {
3321                                 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3322                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3323                                 dc->cc_op = CC_OP_ADD;
3324                             } else {
3325                                 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3326                             }
3327                         }
3328                         break;
3329                     case 0x1: /* and */
3330                         if (IS_IMM) {
3331                             simm = GET_FIELDs(insn, 19, 31);
3332                             tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3333                         } else {
3334                             tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3335                         }
3336                         if (xop & 0x10) {
3337                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3338                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3339                             dc->cc_op = CC_OP_LOGIC;
3340                         }
3341                         break;
3342                     case 0x2: /* or */
3343                         if (IS_IMM) {
3344                             simm = GET_FIELDs(insn, 19, 31);
3345                             tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3346                         } else {
3347                             tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3348                         }
3349                         if (xop & 0x10) {
3350                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3351                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3352                             dc->cc_op = CC_OP_LOGIC;
3353                         }
3354                         break;
3355                     case 0x3: /* xor */
3356                         if (IS_IMM) {
3357                             simm = GET_FIELDs(insn, 19, 31);
3358                             tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3359                         } else {
3360                             tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3361                         }
3362                         if (xop & 0x10) {
3363                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3364                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3365                             dc->cc_op = CC_OP_LOGIC;
3366                         }
3367                         break;
3368                     case 0x4: /* sub */
3369                         if (IS_IMM) {
3370                             simm = GET_FIELDs(insn, 19, 31);
3371                             if (xop & 0x10) {
3372                                 gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
3373                             } else {
3374                                 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3375                             }
3376                         } else {
3377                             if (xop & 0x10) {
3378                                 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3379                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3380                                 dc->cc_op = CC_OP_SUB;
3381                             } else {
3382                                 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3383                             }
3384                         }
3385                         break;
3386                     case 0x5: /* andn */
3387                         if (IS_IMM) {
3388                             simm = GET_FIELDs(insn, 19, 31);
3389                             tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3390                         } else {
3391                             tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3392                         }
3393                         if (xop & 0x10) {
3394                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3395                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3396                             dc->cc_op = CC_OP_LOGIC;
3397                         }
3398                         break;
3399                     case 0x6: /* orn */
3400                         if (IS_IMM) {
3401                             simm = GET_FIELDs(insn, 19, 31);
3402                             tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3403                         } else {
3404                             tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3405                         }
3406                         if (xop & 0x10) {
3407                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3408                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3409                             dc->cc_op = CC_OP_LOGIC;
3410                         }
3411                         break;
3412                     case 0x7: /* xorn */
3413                         if (IS_IMM) {
3414                             simm = GET_FIELDs(insn, 19, 31);
3415                             tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3416                         } else {
3417                             tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3418                             tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3419                         }
3420                         if (xop & 0x10) {
3421                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3422                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3423                             dc->cc_op = CC_OP_LOGIC;
3424                         }
3425                         break;
3426                     case 0x8: /* addx, V9 addc */
3427                         gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3428                                         (xop & 0x10));
3429                         break;
3430 #ifdef TARGET_SPARC64
3431                     case 0x9: /* V9 mulx */
3432                         if (IS_IMM) {
3433                             simm = GET_FIELDs(insn, 19, 31);
3434                             tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3435                         } else {
3436                             tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3437                         }
3438                         break;
3439 #endif
3440                     case 0xa: /* umul */
3441                         CHECK_IU_FEATURE(dc, MUL);
3442                         gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3443                         if (xop & 0x10) {
3444                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3445                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3446                             dc->cc_op = CC_OP_LOGIC;
3447                         }
3448                         break;
3449                     case 0xb: /* smul */
3450                         CHECK_IU_FEATURE(dc, MUL);
3451                         gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3452                         if (xop & 0x10) {
3453                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3454                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3455                             dc->cc_op = CC_OP_LOGIC;
3456                         }
3457                         break;
3458                     case 0xc: /* subx, V9 subc */
3459                         gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3460                                         (xop & 0x10));
3461                         break;
3462 #ifdef TARGET_SPARC64
3463                     case 0xd: /* V9 udivx */
3464                         gen_helper_udivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3465                         break;
3466 #endif
3467                     case 0xe: /* udiv */
3468                         CHECK_IU_FEATURE(dc, DIV);
3469                         if (xop & 0x10) {
3470                             gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
3471                                                cpu_src2);
3472                             dc->cc_op = CC_OP_DIV;
3473                         } else {
3474                             gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
3475                                             cpu_src2);
3476                         }
3477                         break;
3478                     case 0xf: /* sdiv */
3479                         CHECK_IU_FEATURE(dc, DIV);
3480                         if (xop & 0x10) {
3481                             gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
3482                                                cpu_src2);
3483                             dc->cc_op = CC_OP_DIV;
3484                         } else {
3485                             gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
3486                                             cpu_src2);
3487                         }
3488                         break;
3489                     default:
3490                         goto illegal_insn;
3491                     }
3492                     gen_movl_TN_reg(rd, cpu_dst);
3493                 } else {
3494                     cpu_src1 = get_src1(insn, cpu_src1);
3495                     cpu_src2 = get_src2(insn, cpu_src2);
3496                     switch (xop) {
3497                     case 0x20: /* taddcc */
3498                         gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3499                         gen_movl_TN_reg(rd, cpu_dst);
3500                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3501                         dc->cc_op = CC_OP_TADD;
3502                         break;
3503                     case 0x21: /* tsubcc */
3504                         gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3505                         gen_movl_TN_reg(rd, cpu_dst);
3506                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3507                         dc->cc_op = CC_OP_TSUB;
3508                         break;
3509                     case 0x22: /* taddcctv */
3510                         gen_helper_taddcctv(cpu_dst, cpu_env,
3511                                             cpu_src1, cpu_src2);
3512                         gen_movl_TN_reg(rd, cpu_dst);
3513                         dc->cc_op = CC_OP_TADDTV;
3514                         break;
3515                     case 0x23: /* tsubcctv */
3516                         gen_helper_tsubcctv(cpu_dst, cpu_env,
3517                                             cpu_src1, cpu_src2);
3518                         gen_movl_TN_reg(rd, cpu_dst);
3519                         dc->cc_op = CC_OP_TSUBTV;
3520                         break;
3521                     case 0x24: /* mulscc */
3522                         gen_helper_compute_psr(cpu_env);
3523                         gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3524                         gen_movl_TN_reg(rd, cpu_dst);
3525                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3526                         dc->cc_op = CC_OP_ADD;
3527                         break;
3528 #ifndef TARGET_SPARC64
3529                     case 0x25:  /* sll */
3530                         if (IS_IMM) { /* immediate */
3531                             simm = GET_FIELDs(insn, 20, 31);
3532                             tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3533                         } else { /* register */
3534                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3535                             tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3536                         }
3537                         gen_movl_TN_reg(rd, cpu_dst);
3538                         break;
3539                     case 0x26:  /* srl */
3540                         if (IS_IMM) { /* immediate */
3541                             simm = GET_FIELDs(insn, 20, 31);
3542                             tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3543                         } else { /* register */
3544                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3545                             tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3546                         }
3547                         gen_movl_TN_reg(rd, cpu_dst);
3548                         break;
3549                     case 0x27:  /* sra */
3550                         if (IS_IMM) { /* immediate */
3551                             simm = GET_FIELDs(insn, 20, 31);
3552                             tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3553                         } else { /* register */
3554                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3555                             tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3556                         }
3557                         gen_movl_TN_reg(rd, cpu_dst);
3558                         break;
3559 #endif
3560                     case 0x30:
3561                         {
3562                             switch(rd) {
3563                             case 0: /* wry */
3564                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3565                                 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3566                                 break;
3567 #ifndef TARGET_SPARC64
3568                             case 0x01 ... 0x0f: /* undefined in the
3569                                                    SPARCv8 manual, nop
3570                                                    on the microSPARC
3571                                                    II */
3572                             case 0x10 ... 0x1f: /* implementation-dependent
3573                                                    in the SPARCv8
3574                                                    manual, nop on the
3575                                                    microSPARC II */
3576                                 break;
3577 #else
3578                             case 0x2: /* V9 wrccr */
3579                                 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3580                                 gen_helper_wrccr(cpu_env, cpu_dst);
3581                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3582                                 dc->cc_op = CC_OP_FLAGS;
3583                                 break;
3584                             case 0x3: /* V9 wrasi */
3585                                 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3586                                 tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3587                                 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3588                                 break;
3589                             case 0x6: /* V9 wrfprs */
3590                                 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3591                                 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3592                                 save_state(dc);
3593                                 gen_op_next_insn();
3594                                 tcg_gen_exit_tb(0);
3595                                 dc->is_br = 1;
3596                                 break;
3597                             case 0xf: /* V9 sir, nop if user */
3598 #if !defined(CONFIG_USER_ONLY)
3599                                 if (supervisor(dc)) {
3600                                     ; // XXX
3601                                 }
3602 #endif
3603                                 break;
3604                             case 0x13: /* Graphics Status */
3605                                 if (gen_trap_ifnofpu(dc)) {
3606                                     goto jmp_insn;
3607                                 }
3608                                 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3609                                 break;
3610                             case 0x14: /* Softint set */
3611                                 if (!supervisor(dc))
3612                                     goto illegal_insn;
3613                                 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3614                                 gen_helper_set_softint(cpu_env, cpu_tmp64);
3615                                 break;
3616                             case 0x15: /* Softint clear */
3617                                 if (!supervisor(dc))
3618                                     goto illegal_insn;
3619                                 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3620                                 gen_helper_clear_softint(cpu_env, cpu_tmp64);
3621                                 break;
3622                             case 0x16: /* Softint write */
3623                                 if (!supervisor(dc))
3624                                     goto illegal_insn;
3625                                 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3626                                 gen_helper_write_softint(cpu_env, cpu_tmp64);
3627                                 break;
3628                             case 0x17: /* Tick compare */
3629 #if !defined(CONFIG_USER_ONLY)
3630                                 if (!supervisor(dc))
3631                                     goto illegal_insn;
3632 #endif
3633                                 {
3634                                     TCGv_ptr r_tickptr;
3635
3636                                     tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3637                                                    cpu_src2);
3638                                     r_tickptr = tcg_temp_new_ptr();
3639                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3640                                                    offsetof(CPUSPARCState, tick));
3641                                     gen_helper_tick_set_limit(r_tickptr,
3642                                                               cpu_tick_cmpr);
3643                                     tcg_temp_free_ptr(r_tickptr);
3644                                 }
3645                                 break;
3646                             case 0x18: /* System tick */
3647 #if !defined(CONFIG_USER_ONLY)
3648                                 if (!supervisor(dc))
3649                                     goto illegal_insn;
3650 #endif
3651                                 {
3652                                     TCGv_ptr r_tickptr;
3653
3654                                     tcg_gen_xor_tl(cpu_dst, cpu_src1,
3655                                                    cpu_src2);
3656                                     r_tickptr = tcg_temp_new_ptr();
3657                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3658                                                    offsetof(CPUSPARCState, stick));
3659                                     gen_helper_tick_set_count(r_tickptr,
3660                                                               cpu_dst);
3661                                     tcg_temp_free_ptr(r_tickptr);
3662                                 }
3663                                 break;
3664                             case 0x19: /* System tick compare */
3665 #if !defined(CONFIG_USER_ONLY)
3666                                 if (!supervisor(dc))
3667                                     goto illegal_insn;
3668 #endif
3669                                 {
3670                                     TCGv_ptr r_tickptr;
3671
3672                                     tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3673                                                    cpu_src2);
3674                                     r_tickptr = tcg_temp_new_ptr();
3675                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3676                                                    offsetof(CPUSPARCState, stick));
3677                                     gen_helper_tick_set_limit(r_tickptr,
3678                                                               cpu_stick_cmpr);
3679                                     tcg_temp_free_ptr(r_tickptr);
3680                                 }
3681                                 break;
3682
3683                             case 0x10: /* Performance Control */
3684                             case 0x11: /* Performance Instrumentation
3685                                           Counter */
3686                             case 0x12: /* Dispatch Control */
3687 #endif
3688                             default:
3689                                 goto illegal_insn;
3690                             }
3691                         }
3692                         break;
3693 #if !defined(CONFIG_USER_ONLY)
3694                     case 0x31: /* wrpsr, V9 saved, restored */
3695                         {
3696                             if (!supervisor(dc))
3697                                 goto priv_insn;
3698 #ifdef TARGET_SPARC64
3699                             switch (rd) {
3700                             case 0:
3701                                 gen_helper_saved(cpu_env);
3702                                 break;
3703                             case 1:
3704                                 gen_helper_restored(cpu_env);
3705                                 break;
3706                             case 2: /* UA2005 allclean */
3707                             case 3: /* UA2005 otherw */
3708                             case 4: /* UA2005 normalw */
3709                             case 5: /* UA2005 invalw */
3710                                 // XXX
3711                             default:
3712                                 goto illegal_insn;
3713                             }
3714 #else
3715                             tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3716                             gen_helper_wrpsr(cpu_env, cpu_dst);
3717                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3718                             dc->cc_op = CC_OP_FLAGS;
3719                             save_state(dc);
3720                             gen_op_next_insn();
3721                             tcg_gen_exit_tb(0);
3722                             dc->is_br = 1;
3723 #endif
3724                         }
3725                         break;
3726                     case 0x32: /* wrwim, V9 wrpr */
3727                         {
3728                             if (!supervisor(dc))
3729                                 goto priv_insn;
3730                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3731 #ifdef TARGET_SPARC64
3732                             switch (rd) {
3733                             case 0: // tpc
3734                                 {
3735                                     TCGv_ptr r_tsptr;
3736
3737                                     r_tsptr = tcg_temp_new_ptr();
3738                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3739                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3740                                                   offsetof(trap_state, tpc));
3741                                     tcg_temp_free_ptr(r_tsptr);
3742                                 }
3743                                 break;
3744                             case 1: // tnpc
3745                                 {
3746                                     TCGv_ptr r_tsptr;
3747
3748                                     r_tsptr = tcg_temp_new_ptr();
3749                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3750                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3751                                                   offsetof(trap_state, tnpc));
3752                                     tcg_temp_free_ptr(r_tsptr);
3753                                 }
3754                                 break;
3755                             case 2: // tstate
3756                                 {
3757                                     TCGv_ptr r_tsptr;
3758
3759                                     r_tsptr = tcg_temp_new_ptr();
3760                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3761                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3762                                                   offsetof(trap_state,
3763                                                            tstate));
3764                                     tcg_temp_free_ptr(r_tsptr);
3765                                 }
3766                                 break;
3767                             case 3: // tt
3768                                 {
3769                                     TCGv_ptr r_tsptr;
3770
3771                                     r_tsptr = tcg_temp_new_ptr();
3772                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3773                                     tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3774                                     tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3775                                                    offsetof(trap_state, tt));
3776                                     tcg_temp_free_ptr(r_tsptr);
3777                                 }
3778                                 break;
3779                             case 4: // tick
3780                                 {
3781                                     TCGv_ptr r_tickptr;
3782
3783                                     r_tickptr = tcg_temp_new_ptr();
3784                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3785                                                    offsetof(CPUSPARCState, tick));
3786                                     gen_helper_tick_set_count(r_tickptr,
3787                                                               cpu_tmp0);
3788                                     tcg_temp_free_ptr(r_tickptr);
3789                                 }
3790                                 break;
3791                             case 5: // tba
3792                                 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3793                                 break;
3794                             case 6: // pstate
3795                                 {
3796                                     TCGv r_tmp = tcg_temp_local_new();
3797
3798                                     tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3799                                     save_state(dc);
3800                                     gen_helper_wrpstate(cpu_env, r_tmp);
3801                                     tcg_temp_free(r_tmp);
3802                                     dc->npc = DYNAMIC_PC;
3803                                 }
3804                                 break;
3805                             case 7: // tl
3806                                 {
3807                                     TCGv r_tmp = tcg_temp_local_new();
3808
3809                                     tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3810                                     save_state(dc);
3811                                     tcg_gen_trunc_tl_i32(cpu_tmp32, r_tmp);
3812                                     tcg_temp_free(r_tmp);
3813                                     tcg_gen_st_i32(cpu_tmp32, cpu_env,
3814                                                    offsetof(CPUSPARCState, tl));
3815                                     dc->npc = DYNAMIC_PC;
3816                                 }
3817                                 break;
3818                             case 8: // pil
3819                                 gen_helper_wrpil(cpu_env, cpu_tmp0);
3820                                 break;
3821                             case 9: // cwp
3822                                 gen_helper_wrcwp(cpu_env, cpu_tmp0);
3823                                 break;
3824                             case 10: // cansave
3825                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3826                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3827                                                offsetof(CPUSPARCState,
3828                                                         cansave));
3829                                 break;
3830                             case 11: // canrestore
3831                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3832                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3833                                                offsetof(CPUSPARCState,
3834                                                         canrestore));
3835                                 break;
3836                             case 12: // cleanwin
3837                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3838                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3839                                                offsetof(CPUSPARCState,
3840                                                         cleanwin));
3841                                 break;
3842                             case 13: // otherwin
3843                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3844                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3845                                                offsetof(CPUSPARCState,
3846                                                         otherwin));
3847                                 break;
3848                             case 14: // wstate
3849                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3850                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3851                                                offsetof(CPUSPARCState,
3852                                                         wstate));
3853                                 break;
3854                             case 16: // UA2005 gl
3855                                 CHECK_IU_FEATURE(dc, GL);
3856                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3857                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3858                                                offsetof(CPUSPARCState, gl));
3859                                 break;
3860                             case 26: // UA2005 strand status
3861                                 CHECK_IU_FEATURE(dc, HYPV);
3862                                 if (!hypervisor(dc))
3863                                     goto priv_insn;
3864                                 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3865                                 break;
3866                             default:
3867                                 goto illegal_insn;
3868                             }
3869 #else
3870                             tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3871                             if (dc->def->nwindows != 32)
3872                                 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3873                                                 (1 << dc->def->nwindows) - 1);
3874                             tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3875 #endif
3876                         }
3877                         break;
3878                     case 0x33: /* wrtbr, UA2005 wrhpr */
3879                         {
3880 #ifndef TARGET_SPARC64
3881                             if (!supervisor(dc))
3882                                 goto priv_insn;
3883                             tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3884 #else
3885                             CHECK_IU_FEATURE(dc, HYPV);
3886                             if (!hypervisor(dc))
3887                                 goto priv_insn;
3888                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3889                             switch (rd) {
3890                             case 0: // hpstate
3891                                 // XXX gen_op_wrhpstate();
3892                                 save_state(dc);
3893                                 gen_op_next_insn();
3894                                 tcg_gen_exit_tb(0);
3895                                 dc->is_br = 1;
3896                                 break;
3897                             case 1: // htstate
3898                                 // XXX gen_op_wrhtstate();
3899                                 break;
3900                             case 3: // hintp
3901                                 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3902                                 break;
3903                             case 5: // htba
3904                                 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3905                                 break;
3906                             case 31: // hstick_cmpr
3907                                 {
3908                                     TCGv_ptr r_tickptr;
3909
3910                                     tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3911                                     r_tickptr = tcg_temp_new_ptr();
3912                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3913                                                    offsetof(CPUSPARCState, hstick));
3914                                     gen_helper_tick_set_limit(r_tickptr,
3915                                                               cpu_hstick_cmpr);
3916                                     tcg_temp_free_ptr(r_tickptr);
3917                                 }
3918                                 break;
3919                             case 6: // hver readonly
3920                             default:
3921                                 goto illegal_insn;
3922                             }
3923 #endif
3924                         }
3925                         break;
3926 #endif
3927 #ifdef TARGET_SPARC64
3928                     case 0x2c: /* V9 movcc */
3929                         {
3930                             int cc = GET_FIELD_SP(insn, 11, 12);
3931                             int cond = GET_FIELD_SP(insn, 14, 17);
3932                             DisasCompare cmp;
3933
3934                             if (insn & (1 << 18)) {
3935                                 if (cc == 0) {
3936                                     gen_compare(&cmp, 0, cond, dc);
3937                                 } else if (cc == 2) {
3938                                     gen_compare(&cmp, 1, cond, dc);
3939                                 } else {
3940                                     goto illegal_insn;
3941                                 }
3942                             } else {
3943                                 gen_fcompare(&cmp, cc, cond);
3944                             }
3945
3946                             /* The get_src2 above loaded the normal 13-bit
3947                                immediate field, not the 11-bit field we have
3948                                in movcc.  But it did handle the reg case.  */
3949                             if (IS_IMM) {
3950                                 simm = GET_FIELD_SPs(insn, 0, 10);
3951                                 tcg_gen_movi_tl(cpu_src2, simm);
3952                             }
3953
3954                             gen_movl_reg_TN(rd, cpu_dst);
3955                             tcg_gen_movcond_tl(cmp.cond, cpu_dst,
3956                                                cmp.c1, cmp.c2,
3957                                                cpu_src2, cpu_dst);
3958                             free_compare(&cmp);
3959                             gen_movl_TN_reg(rd, cpu_dst);
3960                             break;
3961                         }
3962                     case 0x2d: /* V9 sdivx */
3963                         gen_helper_sdivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3964                         gen_movl_TN_reg(rd, cpu_dst);
3965                         break;
3966                     case 0x2e: /* V9 popc */
3967                         {
3968                             cpu_src2 = get_src2(insn, cpu_src2);
3969                             gen_helper_popc(cpu_dst, cpu_src2);
3970                             gen_movl_TN_reg(rd, cpu_dst);
3971                         }
3972                     case 0x2f: /* V9 movr */
3973                         {
3974                             int cond = GET_FIELD_SP(insn, 10, 12);
3975                             DisasCompare cmp;
3976
3977                             gen_compare_reg(&cmp, cond, cpu_src1);
3978
3979                             /* The get_src2 above loaded the normal 13-bit
3980                                immediate field, not the 10-bit field we have
3981                                in movr.  But it did handle the reg case.  */
3982                             if (IS_IMM) {
3983                                 simm = GET_FIELD_SPs(insn, 0, 9);
3984                                 tcg_gen_movi_tl(cpu_src2, simm);
3985                             }
3986
3987                             gen_movl_reg_TN(rd, cpu_dst);
3988                             tcg_gen_movcond_tl(cmp.cond, cpu_dst,
3989                                                cmp.c1, cmp.c2,
3990                                                cpu_src2, cpu_dst);
3991                             free_compare(&cmp);
3992                             gen_movl_TN_reg(rd, cpu_dst);
3993                             break;
3994                         }
3995 #endif
3996                     default:
3997                         goto illegal_insn;
3998                     }
3999                 }
4000             } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4001 #ifdef TARGET_SPARC64
4002                 int opf = GET_FIELD_SP(insn, 5, 13);
4003                 rs1 = GET_FIELD(insn, 13, 17);
4004                 rs2 = GET_FIELD(insn, 27, 31);
4005                 if (gen_trap_ifnofpu(dc)) {
4006                     goto jmp_insn;
4007                 }
4008
4009                 switch (opf) {
4010                 case 0x000: /* VIS I edge8cc */
4011                     CHECK_FPU_FEATURE(dc, VIS1);
4012                     gen_movl_reg_TN(rs1, cpu_src1);
4013                     gen_movl_reg_TN(rs2, cpu_src2);
4014                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4015                     gen_movl_TN_reg(rd, cpu_dst);
4016                     break;
4017                 case 0x001: /* VIS II edge8n */
4018                     CHECK_FPU_FEATURE(dc, VIS2);
4019                     gen_movl_reg_TN(rs1, cpu_src1);
4020                     gen_movl_reg_TN(rs2, cpu_src2);
4021                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4022                     gen_movl_TN_reg(rd, cpu_dst);
4023                     break;
4024                 case 0x002: /* VIS I edge8lcc */
4025                     CHECK_FPU_FEATURE(dc, VIS1);
4026                     gen_movl_reg_TN(rs1, cpu_src1);
4027                     gen_movl_reg_TN(rs2, cpu_src2);
4028                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4029                     gen_movl_TN_reg(rd, cpu_dst);
4030                     break;
4031                 case 0x003: /* VIS II edge8ln */
4032                     CHECK_FPU_FEATURE(dc, VIS2);
4033                     gen_movl_reg_TN(rs1, cpu_src1);
4034                     gen_movl_reg_TN(rs2, cpu_src2);
4035                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4036                     gen_movl_TN_reg(rd, cpu_dst);
4037                     break;
4038                 case 0x004: /* VIS I edge16cc */
4039                     CHECK_FPU_FEATURE(dc, VIS1);
4040                     gen_movl_reg_TN(rs1, cpu_src1);
4041                     gen_movl_reg_TN(rs2, cpu_src2);
4042                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4043                     gen_movl_TN_reg(rd, cpu_dst);
4044                     break;
4045                 case 0x005: /* VIS II edge16n */
4046                     CHECK_FPU_FEATURE(dc, VIS2);
4047                     gen_movl_reg_TN(rs1, cpu_src1);
4048                     gen_movl_reg_TN(rs2, cpu_src2);
4049                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4050                     gen_movl_TN_reg(rd, cpu_dst);
4051                     break;
4052                 case 0x006: /* VIS I edge16lcc */
4053                     CHECK_FPU_FEATURE(dc, VIS1);
4054                     gen_movl_reg_TN(rs1, cpu_src1);
4055                     gen_movl_reg_TN(rs2, cpu_src2);
4056                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4057                     gen_movl_TN_reg(rd, cpu_dst);
4058                     break;
4059                 case 0x007: /* VIS II edge16ln */
4060                     CHECK_FPU_FEATURE(dc, VIS2);
4061                     gen_movl_reg_TN(rs1, cpu_src1);
4062                     gen_movl_reg_TN(rs2, cpu_src2);
4063                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4064                     gen_movl_TN_reg(rd, cpu_dst);
4065                     break;
4066                 case 0x008: /* VIS I edge32cc */
4067                     CHECK_FPU_FEATURE(dc, VIS1);
4068                     gen_movl_reg_TN(rs1, cpu_src1);
4069                     gen_movl_reg_TN(rs2, cpu_src2);
4070                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4071                     gen_movl_TN_reg(rd, cpu_dst);
4072                     break;
4073                 case 0x009: /* VIS II edge32n */
4074                     CHECK_FPU_FEATURE(dc, VIS2);
4075                     gen_movl_reg_TN(rs1, cpu_src1);
4076                     gen_movl_reg_TN(rs2, cpu_src2);
4077                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4078                     gen_movl_TN_reg(rd, cpu_dst);
4079                     break;
4080                 case 0x00a: /* VIS I edge32lcc */
4081                     CHECK_FPU_FEATURE(dc, VIS1);
4082                     gen_movl_reg_TN(rs1, cpu_src1);
4083                     gen_movl_reg_TN(rs2, cpu_src2);
4084                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4085                     gen_movl_TN_reg(rd, cpu_dst);
4086                     break;
4087                 case 0x00b: /* VIS II edge32ln */
4088                     CHECK_FPU_FEATURE(dc, VIS2);
4089                     gen_movl_reg_TN(rs1, cpu_src1);
4090                     gen_movl_reg_TN(rs2, cpu_src2);
4091                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4092                     gen_movl_TN_reg(rd, cpu_dst);
4093                     break;
4094                 case 0x010: /* VIS I array8 */
4095                     CHECK_FPU_FEATURE(dc, VIS1);
4096                     cpu_src1 = get_src1(insn, cpu_src1);
4097                     gen_movl_reg_TN(rs2, cpu_src2);
4098                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4099                     gen_movl_TN_reg(rd, cpu_dst);
4100                     break;
4101                 case 0x012: /* VIS I array16 */
4102                     CHECK_FPU_FEATURE(dc, VIS1);
4103                     cpu_src1 = get_src1(insn, cpu_src1);
4104                     gen_movl_reg_TN(rs2, cpu_src2);
4105                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4106                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4107                     gen_movl_TN_reg(rd, cpu_dst);
4108                     break;
4109                 case 0x014: /* VIS I array32 */
4110                     CHECK_FPU_FEATURE(dc, VIS1);
4111                     cpu_src1 = get_src1(insn, cpu_src1);
4112                     gen_movl_reg_TN(rs2, cpu_src2);
4113                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4114                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4115                     gen_movl_TN_reg(rd, cpu_dst);
4116                     break;
4117                 case 0x018: /* VIS I alignaddr */
4118                     CHECK_FPU_FEATURE(dc, VIS1);
4119                     cpu_src1 = get_src1(insn, cpu_src1);
4120                     gen_movl_reg_TN(rs2, cpu_src2);
4121                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4122                     gen_movl_TN_reg(rd, cpu_dst);
4123                     break;
4124                 case 0x01a: /* VIS I alignaddrl */
4125                     CHECK_FPU_FEATURE(dc, VIS1);
4126                     cpu_src1 = get_src1(insn, cpu_src1);
4127                     gen_movl_reg_TN(rs2, cpu_src2);
4128                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4129                     gen_movl_TN_reg(rd, cpu_dst);
4130                     break;
4131                 case 0x019: /* VIS II bmask */
4132                     CHECK_FPU_FEATURE(dc, VIS2);
4133                     cpu_src1 = get_src1(insn, cpu_src1);
4134                     cpu_src2 = get_src1(insn, cpu_src2);
4135                     tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4136                     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4137                     gen_movl_TN_reg(rd, cpu_dst);
4138                     break;
4139                 case 0x020: /* VIS I fcmple16 */
4140                     CHECK_FPU_FEATURE(dc, VIS1);
4141                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4142                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4143                     gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4144                     gen_movl_TN_reg(rd, cpu_dst);
4145                     break;
4146                 case 0x022: /* VIS I fcmpne16 */
4147                     CHECK_FPU_FEATURE(dc, VIS1);
4148                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4149                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4150                     gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4151                     gen_movl_TN_reg(rd, cpu_dst);
4152                     break;
4153                 case 0x024: /* VIS I fcmple32 */
4154                     CHECK_FPU_FEATURE(dc, VIS1);
4155                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4156                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4157                     gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4158                     gen_movl_TN_reg(rd, cpu_dst);
4159                     break;
4160                 case 0x026: /* VIS I fcmpne32 */
4161                     CHECK_FPU_FEATURE(dc, VIS1);
4162                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4163                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4164                     gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4165                     gen_movl_TN_reg(rd, cpu_dst);
4166                     break;
4167                 case 0x028: /* VIS I fcmpgt16 */
4168                     CHECK_FPU_FEATURE(dc, VIS1);
4169                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4170                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4171                     gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4172                     gen_movl_TN_reg(rd, cpu_dst);
4173                     break;
4174                 case 0x02a: /* VIS I fcmpeq16 */
4175                     CHECK_FPU_FEATURE(dc, VIS1);
4176                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4177                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4178                     gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4179                     gen_movl_TN_reg(rd, cpu_dst);
4180                     break;
4181                 case 0x02c: /* VIS I fcmpgt32 */
4182                     CHECK_FPU_FEATURE(dc, VIS1);
4183                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4184                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4185                     gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4186                     gen_movl_TN_reg(rd, cpu_dst);
4187                     break;
4188                 case 0x02e: /* VIS I fcmpeq32 */
4189                     CHECK_FPU_FEATURE(dc, VIS1);
4190                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4191                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4192                     gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4193                     gen_movl_TN_reg(rd, cpu_dst);
4194                     break;
4195                 case 0x031: /* VIS I fmul8x16 */
4196                     CHECK_FPU_FEATURE(dc, VIS1);
4197                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4198                     break;
4199                 case 0x033: /* VIS I fmul8x16au */
4200                     CHECK_FPU_FEATURE(dc, VIS1);
4201                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4202                     break;
4203                 case 0x035: /* VIS I fmul8x16al */
4204                     CHECK_FPU_FEATURE(dc, VIS1);
4205                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4206                     break;
4207                 case 0x036: /* VIS I fmul8sux16 */
4208                     CHECK_FPU_FEATURE(dc, VIS1);
4209                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4210                     break;
4211                 case 0x037: /* VIS I fmul8ulx16 */
4212                     CHECK_FPU_FEATURE(dc, VIS1);
4213                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4214                     break;
4215                 case 0x038: /* VIS I fmuld8sux16 */
4216                     CHECK_FPU_FEATURE(dc, VIS1);
4217                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4218                     break;
4219                 case 0x039: /* VIS I fmuld8ulx16 */
4220                     CHECK_FPU_FEATURE(dc, VIS1);
4221                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4222                     break;
4223                 case 0x03a: /* VIS I fpack32 */
4224                     CHECK_FPU_FEATURE(dc, VIS1);
4225                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4226                     break;
4227                 case 0x03b: /* VIS I fpack16 */
4228                     CHECK_FPU_FEATURE(dc, VIS1);
4229                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4230                     cpu_dst_32 = gen_dest_fpr_F();
4231                     gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4232                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4233                     break;
4234                 case 0x03d: /* VIS I fpackfix */
4235                     CHECK_FPU_FEATURE(dc, VIS1);
4236                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4237                     cpu_dst_32 = gen_dest_fpr_F();
4238                     gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4239                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4240                     break;
4241                 case 0x03e: /* VIS I pdist */
4242                     CHECK_FPU_FEATURE(dc, VIS1);
4243                     gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4244                     break;
4245                 case 0x048: /* VIS I faligndata */
4246                     CHECK_FPU_FEATURE(dc, VIS1);
4247                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4248                     break;
4249                 case 0x04b: /* VIS I fpmerge */
4250                     CHECK_FPU_FEATURE(dc, VIS1);
4251                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4252                     break;
4253                 case 0x04c: /* VIS II bshuffle */
4254                     CHECK_FPU_FEATURE(dc, VIS2);
4255                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4256                     break;
4257                 case 0x04d: /* VIS I fexpand */
4258                     CHECK_FPU_FEATURE(dc, VIS1);
4259                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4260                     break;
4261                 case 0x050: /* VIS I fpadd16 */
4262                     CHECK_FPU_FEATURE(dc, VIS1);
4263                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4264                     break;
4265                 case 0x051: /* VIS I fpadd16s */
4266                     CHECK_FPU_FEATURE(dc, VIS1);
4267                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4268                     break;
4269                 case 0x052: /* VIS I fpadd32 */
4270                     CHECK_FPU_FEATURE(dc, VIS1);
4271                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4272                     break;
4273                 case 0x053: /* VIS I fpadd32s */
4274                     CHECK_FPU_FEATURE(dc, VIS1);
4275                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4276                     break;
4277                 case 0x054: /* VIS I fpsub16 */
4278                     CHECK_FPU_FEATURE(dc, VIS1);
4279                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4280                     break;
4281                 case 0x055: /* VIS I fpsub16s */
4282                     CHECK_FPU_FEATURE(dc, VIS1);
4283                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4284                     break;
4285                 case 0x056: /* VIS I fpsub32 */
4286                     CHECK_FPU_FEATURE(dc, VIS1);
4287                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4288                     break;
4289                 case 0x057: /* VIS I fpsub32s */
4290                     CHECK_FPU_FEATURE(dc, VIS1);
4291                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4292                     break;
4293                 case 0x060: /* VIS I fzero */
4294                     CHECK_FPU_FEATURE(dc, VIS1);
4295                     cpu_dst_64 = gen_dest_fpr_D();
4296                     tcg_gen_movi_i64(cpu_dst_64, 0);
4297                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4298                     break;
4299                 case 0x061: /* VIS I fzeros */
4300                     CHECK_FPU_FEATURE(dc, VIS1);
4301                     cpu_dst_32 = gen_dest_fpr_F();
4302                     tcg_gen_movi_i32(cpu_dst_32, 0);
4303                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4304                     break;
4305                 case 0x062: /* VIS I fnor */
4306                     CHECK_FPU_FEATURE(dc, VIS1);
4307                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4308                     break;
4309                 case 0x063: /* VIS I fnors */
4310                     CHECK_FPU_FEATURE(dc, VIS1);
4311                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4312                     break;
4313                 case 0x064: /* VIS I fandnot2 */
4314                     CHECK_FPU_FEATURE(dc, VIS1);
4315                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4316                     break;
4317                 case 0x065: /* VIS I fandnot2s */
4318                     CHECK_FPU_FEATURE(dc, VIS1);
4319                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4320                     break;
4321                 case 0x066: /* VIS I fnot2 */
4322                     CHECK_FPU_FEATURE(dc, VIS1);
4323                     gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4324                     break;
4325                 case 0x067: /* VIS I fnot2s */
4326                     CHECK_FPU_FEATURE(dc, VIS1);
4327                     gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4328                     break;
4329                 case 0x068: /* VIS I fandnot1 */
4330                     CHECK_FPU_FEATURE(dc, VIS1);
4331                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4332                     break;
4333                 case 0x069: /* VIS I fandnot1s */
4334                     CHECK_FPU_FEATURE(dc, VIS1);
4335                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4336                     break;
4337                 case 0x06a: /* VIS I fnot1 */
4338                     CHECK_FPU_FEATURE(dc, VIS1);
4339                     gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4340                     break;
4341                 case 0x06b: /* VIS I fnot1s */
4342                     CHECK_FPU_FEATURE(dc, VIS1);
4343                     gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4344                     break;
4345                 case 0x06c: /* VIS I fxor */
4346                     CHECK_FPU_FEATURE(dc, VIS1);
4347                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4348                     break;
4349                 case 0x06d: /* VIS I fxors */
4350                     CHECK_FPU_FEATURE(dc, VIS1);
4351                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4352                     break;
4353                 case 0x06e: /* VIS I fnand */
4354                     CHECK_FPU_FEATURE(dc, VIS1);
4355                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4356                     break;
4357                 case 0x06f: /* VIS I fnands */
4358                     CHECK_FPU_FEATURE(dc, VIS1);
4359                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4360                     break;
4361                 case 0x070: /* VIS I fand */
4362                     CHECK_FPU_FEATURE(dc, VIS1);
4363                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4364                     break;
4365                 case 0x071: /* VIS I fands */
4366                     CHECK_FPU_FEATURE(dc, VIS1);
4367                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4368                     break;
4369                 case 0x072: /* VIS I fxnor */
4370                     CHECK_FPU_FEATURE(dc, VIS1);
4371                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4372                     break;
4373                 case 0x073: /* VIS I fxnors */
4374                     CHECK_FPU_FEATURE(dc, VIS1);
4375                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4376                     break;
4377                 case 0x074: /* VIS I fsrc1 */
4378                     CHECK_FPU_FEATURE(dc, VIS1);
4379                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4380                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4381                     break;
4382                 case 0x075: /* VIS I fsrc1s */
4383                     CHECK_FPU_FEATURE(dc, VIS1);
4384                     cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4385                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4386                     break;
4387                 case 0x076: /* VIS I fornot2 */
4388                     CHECK_FPU_FEATURE(dc, VIS1);
4389                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4390                     break;
4391                 case 0x077: /* VIS I fornot2s */
4392                     CHECK_FPU_FEATURE(dc, VIS1);
4393                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4394                     break;
4395                 case 0x078: /* VIS I fsrc2 */
4396                     CHECK_FPU_FEATURE(dc, VIS1);
4397                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4398                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4399                     break;
4400                 case 0x079: /* VIS I fsrc2s */
4401                     CHECK_FPU_FEATURE(dc, VIS1);
4402                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4403                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4404                     break;
4405                 case 0x07a: /* VIS I fornot1 */
4406                     CHECK_FPU_FEATURE(dc, VIS1);
4407                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4408                     break;
4409                 case 0x07b: /* VIS I fornot1s */
4410                     CHECK_FPU_FEATURE(dc, VIS1);
4411                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
4412                     break;
4413                 case 0x07c: /* VIS I for */
4414                     CHECK_FPU_FEATURE(dc, VIS1);
4415                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
4416                     break;
4417                 case 0x07d: /* VIS I fors */
4418                     CHECK_FPU_FEATURE(dc, VIS1);
4419                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
4420                     break;
4421                 case 0x07e: /* VIS I fone */
4422                     CHECK_FPU_FEATURE(dc, VIS1);
4423                     cpu_dst_64 = gen_dest_fpr_D();
4424                     tcg_gen_movi_i64(cpu_dst_64, -1);
4425                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4426                     break;
4427                 case 0x07f: /* VIS I fones */
4428                     CHECK_FPU_FEATURE(dc, VIS1);
4429                     cpu_dst_32 = gen_dest_fpr_F();
4430                     tcg_gen_movi_i32(cpu_dst_32, -1);
4431                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4432                     break;
4433                 case 0x080: /* VIS I shutdown */
4434                 case 0x081: /* VIS II siam */
4435                     // XXX
4436                     goto illegal_insn;
4437                 default:
4438                     goto illegal_insn;
4439                 }
4440 #else
4441                 goto ncp_insn;
4442 #endif
4443             } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4444 #ifdef TARGET_SPARC64
4445                 goto illegal_insn;
4446 #else
4447                 goto ncp_insn;
4448 #endif
4449 #ifdef TARGET_SPARC64
4450             } else if (xop == 0x39) { /* V9 return */
4451                 TCGv_i32 r_const;
4452
4453                 save_state(dc);
4454                 cpu_src1 = get_src1(insn, cpu_src1);
4455                 if (IS_IMM) {   /* immediate */
4456                     simm = GET_FIELDs(insn, 19, 31);
4457                     tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4458                 } else {                /* register */
4459                     rs2 = GET_FIELD(insn, 27, 31);
4460                     if (rs2) {
4461                         gen_movl_reg_TN(rs2, cpu_src2);
4462                         tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4463                     } else
4464                         tcg_gen_mov_tl(cpu_dst, cpu_src1);
4465                 }
4466                 gen_helper_restore(cpu_env);
4467                 gen_mov_pc_npc(dc);
4468                 r_const = tcg_const_i32(3);
4469                 gen_helper_check_align(cpu_env, cpu_dst, r_const);
4470                 tcg_temp_free_i32(r_const);
4471                 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4472                 dc->npc = DYNAMIC_PC;
4473                 goto jmp_insn;
4474 #endif
4475             } else {
4476                 cpu_src1 = get_src1(insn, cpu_src1);
4477                 if (IS_IMM) {   /* immediate */
4478                     simm = GET_FIELDs(insn, 19, 31);
4479                     tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4480                 } else {                /* register */
4481                     rs2 = GET_FIELD(insn, 27, 31);
4482                     if (rs2) {
4483                         gen_movl_reg_TN(rs2, cpu_src2);
4484                         tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4485                     } else
4486                         tcg_gen_mov_tl(cpu_dst, cpu_src1);
4487                 }
4488                 switch (xop) {
4489                 case 0x38:      /* jmpl */
4490                     {
4491                         TCGv r_pc;
4492                         TCGv_i32 r_const;
4493
4494                         r_pc = tcg_const_tl(dc->pc);
4495                         gen_movl_TN_reg(rd, r_pc);
4496                         tcg_temp_free(r_pc);
4497                         gen_mov_pc_npc(dc);
4498                         r_const = tcg_const_i32(3);
4499                         gen_helper_check_align(cpu_env, cpu_dst, r_const);
4500                         tcg_temp_free_i32(r_const);
4501                         gen_address_mask(dc, cpu_dst);
4502                         tcg_gen_mov_tl(cpu_npc, cpu_dst);
4503                         dc->npc = DYNAMIC_PC;
4504                     }
4505                     goto jmp_insn;
4506 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4507                 case 0x39:      /* rett, V9 return */
4508                     {
4509                         TCGv_i32 r_const;
4510
4511                         if (!supervisor(dc))
4512                             goto priv_insn;
4513                         gen_mov_pc_npc(dc);
4514                         r_const = tcg_const_i32(3);
4515                         gen_helper_check_align(cpu_env, cpu_dst, r_const);
4516                         tcg_temp_free_i32(r_const);
4517                         tcg_gen_mov_tl(cpu_npc, cpu_dst);
4518                         dc->npc = DYNAMIC_PC;
4519                         gen_helper_rett(cpu_env);
4520                     }
4521                     goto jmp_insn;
4522 #endif
4523                 case 0x3b: /* flush */
4524                     if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4525                         goto unimp_flush;
4526                     /* nop */
4527                     break;
4528                 case 0x3c:      /* save */
4529                     save_state(dc);
4530                     gen_helper_save(cpu_env);
4531                     gen_movl_TN_reg(rd, cpu_dst);
4532                     break;
4533                 case 0x3d:      /* restore */
4534                     save_state(dc);
4535                     gen_helper_restore(cpu_env);
4536                     gen_movl_TN_reg(rd, cpu_dst);
4537                     break;
4538 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4539                 case 0x3e:      /* V9 done/retry */
4540                     {
4541                         switch (rd) {
4542                         case 0:
4543                             if (!supervisor(dc))
4544                                 goto priv_insn;
4545                             dc->npc = DYNAMIC_PC;
4546                             dc->pc = DYNAMIC_PC;
4547                             gen_helper_done(cpu_env);
4548                             goto jmp_insn;
4549                         case 1:
4550                             if (!supervisor(dc))
4551                                 goto priv_insn;
4552                             dc->npc = DYNAMIC_PC;
4553                             dc->pc = DYNAMIC_PC;
4554                             gen_helper_retry(cpu_env);
4555                             goto jmp_insn;
4556                         default:
4557                             goto illegal_insn;
4558                         }
4559                     }
4560                     break;
4561 #endif
4562                 default:
4563                     goto illegal_insn;
4564                 }
4565             }
4566             break;
4567         }
4568         break;
4569     case 3:                     /* load/store instructions */
4570         {
4571             unsigned int xop = GET_FIELD(insn, 7, 12);
4572
4573             /* flush pending conditional evaluations before exposing
4574                cpu state */
4575             if (dc->cc_op != CC_OP_FLAGS) {
4576                 dc->cc_op = CC_OP_FLAGS;
4577                 gen_helper_compute_psr(cpu_env);
4578             }
4579             cpu_src1 = get_src1(insn, cpu_src1);
4580             if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4581                 rs2 = GET_FIELD(insn, 27, 31);
4582                 gen_movl_reg_TN(rs2, cpu_src2);
4583                 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4584             } else if (IS_IMM) {     /* immediate */
4585                 simm = GET_FIELDs(insn, 19, 31);
4586                 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4587             } else {            /* register */
4588                 rs2 = GET_FIELD(insn, 27, 31);
4589                 if (rs2 != 0) {
4590                     gen_movl_reg_TN(rs2, cpu_src2);
4591                     tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4592                 } else
4593                     tcg_gen_mov_tl(cpu_addr, cpu_src1);
4594             }
4595             if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4596                 (xop > 0x17 && xop <= 0x1d ) ||
4597                 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4598                 switch (xop) {
4599                 case 0x0:       /* ld, V9 lduw, load unsigned word */
4600                     gen_address_mask(dc, cpu_addr);
4601                     tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4602                     break;
4603                 case 0x1:       /* ldub, load unsigned byte */
4604                     gen_address_mask(dc, cpu_addr);
4605                     tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4606                     break;
4607                 case 0x2:       /* lduh, load unsigned halfword */
4608                     gen_address_mask(dc, cpu_addr);
4609                     tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4610                     break;
4611                 case 0x3:       /* ldd, load double word */
4612                     if (rd & 1)
4613                         goto illegal_insn;
4614                     else {
4615                         TCGv_i32 r_const;
4616
4617                         save_state(dc);
4618                         r_const = tcg_const_i32(7);
4619                         /* XXX remove alignment check */
4620                         gen_helper_check_align(cpu_env, cpu_addr, r_const);
4621                         tcg_temp_free_i32(r_const);
4622                         gen_address_mask(dc, cpu_addr);
4623                         tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4624                         tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4625                         tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4626                         gen_movl_TN_reg(rd + 1, cpu_tmp0);
4627                         tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4628                         tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4629                         tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4630                     }
4631                     break;
4632                 case 0x9:       /* ldsb, load signed byte */
4633                     gen_address_mask(dc, cpu_addr);
4634                     tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4635                     break;
4636                 case 0xa:       /* ldsh, load signed halfword */
4637                     gen_address_mask(dc, cpu_addr);
4638                     tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4639                     break;
4640                 case 0xd:       /* ldstub -- XXX: should be atomically */
4641                     {
4642                         TCGv r_const;
4643
4644                         gen_address_mask(dc, cpu_addr);
4645                         tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4646                         r_const = tcg_const_tl(0xff);
4647                         tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4648                         tcg_temp_free(r_const);
4649                     }
4650                     break;
4651                 case 0x0f:      /* swap, swap register with memory. Also
4652                                    atomically */
4653                     CHECK_IU_FEATURE(dc, SWAP);
4654                     gen_movl_reg_TN(rd, cpu_val);
4655                     gen_address_mask(dc, cpu_addr);
4656                     tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4657                     tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4658                     tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4659                     break;
4660 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4661                 case 0x10:      /* lda, V9 lduwa, load word alternate */
4662 #ifndef TARGET_SPARC64
4663                     if (IS_IMM)
4664                         goto illegal_insn;
4665                     if (!supervisor(dc))
4666                         goto priv_insn;
4667 #endif
4668                     save_state(dc);
4669                     gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4670                     break;
4671                 case 0x11:      /* lduba, load unsigned byte alternate */
4672 #ifndef TARGET_SPARC64
4673                     if (IS_IMM)
4674                         goto illegal_insn;
4675                     if (!supervisor(dc))
4676                         goto priv_insn;
4677 #endif
4678                     save_state(dc);
4679                     gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4680                     break;
4681                 case 0x12:      /* lduha, load unsigned halfword alternate */
4682 #ifndef TARGET_SPARC64
4683                     if (IS_IMM)
4684                         goto illegal_insn;
4685                     if (!supervisor(dc))
4686                         goto priv_insn;
4687 #endif
4688                     save_state(dc);
4689                     gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4690                     break;
4691                 case 0x13:      /* ldda, load double word alternate */
4692 #ifndef TARGET_SPARC64
4693                     if (IS_IMM)
4694                         goto illegal_insn;
4695                     if (!supervisor(dc))
4696                         goto priv_insn;
4697 #endif
4698                     if (rd & 1)
4699                         goto illegal_insn;
4700                     save_state(dc);
4701                     gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4702                     goto skip_move;
4703                 case 0x19:      /* ldsba, load signed byte alternate */
4704 #ifndef TARGET_SPARC64
4705                     if (IS_IMM)
4706                         goto illegal_insn;
4707                     if (!supervisor(dc))
4708                         goto priv_insn;
4709 #endif
4710                     save_state(dc);
4711                     gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4712                     break;
4713                 case 0x1a:      /* ldsha, load signed halfword alternate */
4714 #ifndef TARGET_SPARC64
4715                     if (IS_IMM)
4716                         goto illegal_insn;
4717                     if (!supervisor(dc))
4718                         goto priv_insn;
4719 #endif
4720                     save_state(dc);
4721                     gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4722                     break;
4723                 case 0x1d:      /* ldstuba -- XXX: should be atomically */
4724 #ifndef TARGET_SPARC64
4725                     if (IS_IMM)
4726                         goto illegal_insn;
4727                     if (!supervisor(dc))
4728                         goto priv_insn;
4729 #endif
4730                     save_state(dc);
4731                     gen_ldstub_asi(cpu_val, cpu_addr, insn);
4732                     break;
4733                 case 0x1f:      /* swapa, swap reg with alt. memory. Also
4734                                    atomically */
4735                     CHECK_IU_FEATURE(dc, SWAP);
4736 #ifndef TARGET_SPARC64
4737                     if (IS_IMM)
4738                         goto illegal_insn;
4739                     if (!supervisor(dc))
4740                         goto priv_insn;
4741 #endif
4742                     save_state(dc);
4743                     gen_movl_reg_TN(rd, cpu_val);
4744                     gen_swap_asi(cpu_val, cpu_addr, insn);
4745                     break;
4746
4747 #ifndef TARGET_SPARC64
4748                 case 0x30: /* ldc */
4749                 case 0x31: /* ldcsr */
4750                 case 0x33: /* lddc */
4751                     goto ncp_insn;
4752 #endif
4753 #endif
4754 #ifdef TARGET_SPARC64
4755                 case 0x08: /* V9 ldsw */
4756                     gen_address_mask(dc, cpu_addr);
4757                     tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4758                     break;
4759                 case 0x0b: /* V9 ldx */
4760                     gen_address_mask(dc, cpu_addr);
4761                     tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4762                     break;
4763                 case 0x18: /* V9 ldswa */
4764                     save_state(dc);
4765                     gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4766                     break;
4767                 case 0x1b: /* V9 ldxa */
4768                     save_state(dc);
4769                     gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4770                     break;
4771                 case 0x2d: /* V9 prefetch, no effect */
4772                     goto skip_move;
4773                 case 0x30: /* V9 ldfa */
4774                     if (gen_trap_ifnofpu(dc)) {
4775                         goto jmp_insn;
4776                     }
4777                     save_state(dc);
4778                     gen_ldf_asi(cpu_addr, insn, 4, rd);
4779                     gen_update_fprs_dirty(rd);
4780                     goto skip_move;
4781                 case 0x33: /* V9 lddfa */
4782                     if (gen_trap_ifnofpu(dc)) {
4783                         goto jmp_insn;
4784                     }
4785                     save_state(dc);
4786                     gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4787                     gen_update_fprs_dirty(DFPREG(rd));
4788                     goto skip_move;
4789                 case 0x3d: /* V9 prefetcha, no effect */
4790                     goto skip_move;
4791                 case 0x32: /* V9 ldqfa */
4792                     CHECK_FPU_FEATURE(dc, FLOAT128);
4793                     if (gen_trap_ifnofpu(dc)) {
4794                         goto jmp_insn;
4795                     }
4796                     save_state(dc);
4797                     gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4798                     gen_update_fprs_dirty(QFPREG(rd));
4799                     goto skip_move;
4800 #endif
4801                 default:
4802                     goto illegal_insn;
4803                 }
4804                 gen_movl_TN_reg(rd, cpu_val);
4805 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4806             skip_move: ;
4807 #endif
4808             } else if (xop >= 0x20 && xop < 0x24) {
4809                 if (gen_trap_ifnofpu(dc)) {
4810                     goto jmp_insn;
4811                 }
4812                 save_state(dc);
4813                 switch (xop) {
4814                 case 0x20:      /* ldf, load fpreg */
4815                     gen_address_mask(dc, cpu_addr);
4816                     tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4817                     cpu_dst_32 = gen_dest_fpr_F();
4818                     tcg_gen_trunc_tl_i32(cpu_dst_32, cpu_tmp0);
4819                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4820                     break;
4821                 case 0x21:      /* ldfsr, V9 ldxfsr */
4822 #ifdef TARGET_SPARC64
4823                     gen_address_mask(dc, cpu_addr);
4824                     if (rd == 1) {
4825                         tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4826                         gen_helper_ldxfsr(cpu_env, cpu_tmp64);
4827                     } else {
4828                         tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4829                         tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
4830                         gen_helper_ldfsr(cpu_env, cpu_tmp32);
4831                     }
4832 #else
4833                     {
4834                         tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4835                         gen_helper_ldfsr(cpu_env, cpu_tmp32);
4836                     }
4837 #endif
4838                     break;
4839                 case 0x22:      /* ldqf, load quad fpreg */
4840                     {
4841                         TCGv_i32 r_const;
4842
4843                         CHECK_FPU_FEATURE(dc, FLOAT128);
4844                         r_const = tcg_const_i32(dc->mem_idx);
4845                         gen_address_mask(dc, cpu_addr);
4846                         gen_helper_ldqf(cpu_env, cpu_addr, r_const);
4847                         tcg_temp_free_i32(r_const);
4848                         gen_op_store_QT0_fpr(QFPREG(rd));
4849                         gen_update_fprs_dirty(QFPREG(rd));
4850                     }
4851                     break;
4852                 case 0x23:      /* lddf, load double fpreg */
4853                     gen_address_mask(dc, cpu_addr);
4854                     cpu_dst_64 = gen_dest_fpr_D();
4855                     tcg_gen_qemu_ld64(cpu_dst_64, cpu_addr, dc->mem_idx);
4856                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4857                     break;
4858                 default:
4859                     goto illegal_insn;
4860                 }
4861             } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4862                        xop == 0xe || xop == 0x1e) {
4863                 gen_movl_reg_TN(rd, cpu_val);
4864                 switch (xop) {
4865                 case 0x4: /* st, store word */
4866                     gen_address_mask(dc, cpu_addr);
4867                     tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4868                     break;
4869                 case 0x5: /* stb, store byte */
4870                     gen_address_mask(dc, cpu_addr);
4871                     tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4872                     break;
4873                 case 0x6: /* sth, store halfword */
4874                     gen_address_mask(dc, cpu_addr);
4875                     tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4876                     break;
4877                 case 0x7: /* std, store double word */
4878                     if (rd & 1)
4879                         goto illegal_insn;
4880                     else {
4881                         TCGv_i32 r_const;
4882
4883                         save_state(dc);
4884                         gen_address_mask(dc, cpu_addr);
4885                         r_const = tcg_const_i32(7);
4886                         /* XXX remove alignment check */
4887                         gen_helper_check_align(cpu_env, cpu_addr, r_const);
4888                         tcg_temp_free_i32(r_const);
4889                         gen_movl_reg_TN(rd + 1, cpu_tmp0);
4890                         tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4891                         tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4892                     }
4893                     break;
4894 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4895                 case 0x14: /* sta, V9 stwa, store word alternate */
4896 #ifndef TARGET_SPARC64
4897                     if (IS_IMM)
4898                         goto illegal_insn;
4899                     if (!supervisor(dc))
4900                         goto priv_insn;
4901 #endif
4902                     save_state(dc);
4903                     gen_st_asi(cpu_val, cpu_addr, insn, 4);
4904                     dc->npc = DYNAMIC_PC;
4905                     break;
4906                 case 0x15: /* stba, store byte alternate */
4907 #ifndef TARGET_SPARC64
4908                     if (IS_IMM)
4909                         goto illegal_insn;
4910                     if (!supervisor(dc))
4911                         goto priv_insn;
4912 #endif
4913                     save_state(dc);
4914                     gen_st_asi(cpu_val, cpu_addr, insn, 1);
4915                     dc->npc = DYNAMIC_PC;
4916                     break;
4917                 case 0x16: /* stha, store halfword alternate */
4918 #ifndef TARGET_SPARC64
4919                     if (IS_IMM)
4920                         goto illegal_insn;
4921                     if (!supervisor(dc))
4922                         goto priv_insn;
4923 #endif
4924                     save_state(dc);
4925                     gen_st_asi(cpu_val, cpu_addr, insn, 2);
4926                     dc->npc = DYNAMIC_PC;
4927                     break;
4928                 case 0x17: /* stda, store double word alternate */
4929 #ifndef TARGET_SPARC64
4930                     if (IS_IMM)
4931                         goto illegal_insn;
4932                     if (!supervisor(dc))
4933                         goto priv_insn;
4934 #endif
4935                     if (rd & 1)
4936                         goto illegal_insn;
4937                     else {
4938                         save_state(dc);
4939                         gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4940                     }
4941                     break;
4942 #endif
4943 #ifdef TARGET_SPARC64
4944                 case 0x0e: /* V9 stx */
4945                     gen_address_mask(dc, cpu_addr);
4946                     tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4947                     break;
4948                 case 0x1e: /* V9 stxa */
4949                     save_state(dc);
4950                     gen_st_asi(cpu_val, cpu_addr, insn, 8);
4951                     dc->npc = DYNAMIC_PC;
4952                     break;
4953 #endif
4954                 default:
4955                     goto illegal_insn;
4956                 }
4957             } else if (xop > 0x23 && xop < 0x28) {
4958                 if (gen_trap_ifnofpu(dc)) {
4959                     goto jmp_insn;
4960                 }
4961                 save_state(dc);
4962                 switch (xop) {
4963                 case 0x24: /* stf, store fpreg */
4964                     gen_address_mask(dc, cpu_addr);
4965                     cpu_src1_32 = gen_load_fpr_F(dc, rd);
4966                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_src1_32);
4967                     tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4968                     break;
4969                 case 0x25: /* stfsr, V9 stxfsr */
4970 #ifdef TARGET_SPARC64
4971                     gen_address_mask(dc, cpu_addr);
4972                     tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUSPARCState, fsr));
4973                     if (rd == 1)
4974                         tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4975                     else
4976                         tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4977 #else
4978                     tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fsr));
4979                     tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4980 #endif
4981                     break;
4982                 case 0x26:
4983 #ifdef TARGET_SPARC64
4984                     /* V9 stqf, store quad fpreg */
4985                     {
4986                         TCGv_i32 r_const;
4987
4988                         CHECK_FPU_FEATURE(dc, FLOAT128);
4989                         gen_op_load_fpr_QT0(QFPREG(rd));
4990                         r_const = tcg_const_i32(dc->mem_idx);
4991                         gen_address_mask(dc, cpu_addr);
4992                         gen_helper_stqf(cpu_env, cpu_addr, r_const);
4993                         tcg_temp_free_i32(r_const);
4994                     }
4995                     break;
4996 #else /* !TARGET_SPARC64 */
4997                     /* stdfq, store floating point queue */
4998 #if defined(CONFIG_USER_ONLY)
4999                     goto illegal_insn;
5000 #else
5001                     if (!supervisor(dc))
5002                         goto priv_insn;
5003                     if (gen_trap_ifnofpu(dc)) {
5004                         goto jmp_insn;
5005                     }
5006                     goto nfq_insn;
5007 #endif
5008 #endif
5009                 case 0x27: /* stdf, store double fpreg */
5010                     gen_address_mask(dc, cpu_addr);
5011                     cpu_src1_64 = gen_load_fpr_D(dc, rd);
5012                     tcg_gen_qemu_st64(cpu_src1_64, cpu_addr, dc->mem_idx);
5013                     break;
5014                 default:
5015                     goto illegal_insn;
5016                 }
5017             } else if (xop > 0x33 && xop < 0x3f) {
5018                 save_state(dc);
5019                 switch (xop) {
5020 #ifdef TARGET_SPARC64
5021                 case 0x34: /* V9 stfa */
5022                     if (gen_trap_ifnofpu(dc)) {
5023                         goto jmp_insn;
5024                     }
5025                     gen_stf_asi(cpu_addr, insn, 4, rd);
5026                     break;
5027                 case 0x36: /* V9 stqfa */
5028                     {
5029                         TCGv_i32 r_const;
5030
5031                         CHECK_FPU_FEATURE(dc, FLOAT128);
5032                         if (gen_trap_ifnofpu(dc)) {
5033                             goto jmp_insn;
5034                         }
5035                         r_const = tcg_const_i32(7);
5036                         gen_helper_check_align(cpu_env, cpu_addr, r_const);
5037                         tcg_temp_free_i32(r_const);
5038                         gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
5039                     }
5040                     break;
5041                 case 0x37: /* V9 stdfa */
5042                     if (gen_trap_ifnofpu(dc)) {
5043                         goto jmp_insn;
5044                     }
5045                     gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
5046                     break;
5047                 case 0x3c: /* V9 casa */
5048                     gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
5049                     gen_movl_TN_reg(rd, cpu_val);
5050                     break;
5051                 case 0x3e: /* V9 casxa */
5052                     gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
5053                     gen_movl_TN_reg(rd, cpu_val);
5054                     break;
5055 #else
5056                 case 0x34: /* stc */
5057                 case 0x35: /* stcsr */
5058                 case 0x36: /* stdcq */
5059                 case 0x37: /* stdc */
5060                     goto ncp_insn;
5061 #endif
5062                 default:
5063                     goto illegal_insn;
5064                 }
5065             } else
5066                 goto illegal_insn;
5067         }
5068         break;
5069     }
5070     /* default case for non jump instructions */
5071     if (dc->npc == DYNAMIC_PC) {
5072         dc->pc = DYNAMIC_PC;
5073         gen_op_next_insn();
5074     } else if (dc->npc == JUMP_PC) {
5075         /* we can do a static jump */
5076         gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5077         dc->is_br = 1;
5078     } else {
5079         dc->pc = dc->npc;
5080         dc->npc = dc->npc + 4;
5081     }
5082  jmp_insn:
5083     goto egress;
5084  illegal_insn:
5085     {
5086         TCGv_i32 r_const;
5087
5088         save_state(dc);
5089         r_const = tcg_const_i32(TT_ILL_INSN);
5090         gen_helper_raise_exception(cpu_env, r_const);
5091         tcg_temp_free_i32(r_const);
5092         dc->is_br = 1;
5093     }
5094     goto egress;
5095  unimp_flush:
5096     {
5097         TCGv_i32 r_const;
5098
5099         save_state(dc);
5100         r_const = tcg_const_i32(TT_UNIMP_FLUSH);
5101         gen_helper_raise_exception(cpu_env, r_const);
5102         tcg_temp_free_i32(r_const);
5103         dc->is_br = 1;
5104     }
5105     goto egress;
5106 #if !defined(CONFIG_USER_ONLY)
5107  priv_insn:
5108     {
5109         TCGv_i32 r_const;
5110
5111         save_state(dc);
5112         r_const = tcg_const_i32(TT_PRIV_INSN);
5113         gen_helper_raise_exception(cpu_env, r_const);
5114         tcg_temp_free_i32(r_const);
5115         dc->is_br = 1;
5116     }
5117     goto egress;
5118 #endif
5119  nfpu_insn:
5120     save_state(dc);
5121     gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
5122     dc->is_br = 1;
5123     goto egress;
5124 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5125  nfq_insn:
5126     save_state(dc);
5127     gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
5128     dc->is_br = 1;
5129     goto egress;
5130 #endif
5131 #ifndef TARGET_SPARC64
5132  ncp_insn:
5133     {
5134         TCGv r_const;
5135
5136         save_state(dc);
5137         r_const = tcg_const_i32(TT_NCP_INSN);
5138         gen_helper_raise_exception(cpu_env, r_const);
5139         tcg_temp_free(r_const);
5140         dc->is_br = 1;
5141     }
5142     goto egress;
5143 #endif
5144  egress:
5145     tcg_temp_free(cpu_tmp1);
5146     tcg_temp_free(cpu_tmp2);
5147     if (dc->n_t32 != 0) {
5148         int i;
5149         for (i = dc->n_t32 - 1; i >= 0; --i) {
5150             tcg_temp_free_i32(dc->t32[i]);
5151         }
5152         dc->n_t32 = 0;
5153     }
5154 }
5155
5156 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
5157                                                   int spc, CPUSPARCState *env)
5158 {
5159     target_ulong pc_start, last_pc;
5160     uint16_t *gen_opc_end;
5161     DisasContext dc1, *dc = &dc1;
5162     CPUBreakpoint *bp;
5163     int j, lj = -1;
5164     int num_insns;
5165     int max_insns;
5166     unsigned int insn;
5167
5168     memset(dc, 0, sizeof(DisasContext));
5169     dc->tb = tb;
5170     pc_start = tb->pc;
5171     dc->pc = pc_start;
5172     last_pc = dc->pc;
5173     dc->npc = (target_ulong) tb->cs_base;
5174     dc->cc_op = CC_OP_DYNAMIC;
5175     dc->mem_idx = cpu_mmu_index(env);
5176     dc->def = env->def;
5177     dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5178     dc->address_mask_32bit = tb_am_enabled(tb->flags);
5179     dc->singlestep = (env->singlestep_enabled || singlestep);
5180     gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
5181
5182     num_insns = 0;
5183     max_insns = tb->cflags & CF_COUNT_MASK;
5184     if (max_insns == 0)
5185         max_insns = CF_COUNT_MASK;
5186     gen_icount_start();
5187     do {
5188         if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
5189             QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
5190                 if (bp->pc == dc->pc) {
5191                     if (dc->pc != pc_start)
5192                         save_state(dc);
5193                     gen_helper_debug(cpu_env);
5194                     tcg_gen_exit_tb(0);
5195                     dc->is_br = 1;
5196                     goto exit_gen_loop;
5197                 }
5198             }
5199         }
5200         if (spc) {
5201             qemu_log("Search PC...\n");
5202             j = gen_opc_ptr - gen_opc_buf;
5203             if (lj < j) {
5204                 lj++;
5205                 while (lj < j)
5206                     gen_opc_instr_start[lj++] = 0;
5207                 gen_opc_pc[lj] = dc->pc;
5208                 gen_opc_npc[lj] = dc->npc;
5209                 gen_opc_instr_start[lj] = 1;
5210                 gen_opc_icount[lj] = num_insns;
5211             }
5212         }
5213         if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
5214             gen_io_start();
5215         last_pc = dc->pc;
5216         insn = cpu_ldl_code(env, dc->pc);
5217
5218         cpu_tmp0 = tcg_temp_new();
5219         cpu_tmp32 = tcg_temp_new_i32();
5220         cpu_tmp64 = tcg_temp_new_i64();
5221         cpu_dst = tcg_temp_new();
5222         cpu_val = tcg_temp_new();
5223         cpu_addr = tcg_temp_new();
5224
5225         disas_sparc_insn(dc, insn);
5226         num_insns++;
5227
5228         tcg_temp_free(cpu_addr);
5229         tcg_temp_free(cpu_val);
5230         tcg_temp_free(cpu_dst);
5231         tcg_temp_free_i64(cpu_tmp64);
5232         tcg_temp_free_i32(cpu_tmp32);
5233         tcg_temp_free(cpu_tmp0);
5234
5235         if (dc->is_br)
5236             break;
5237         /* if the next PC is different, we abort now */
5238         if (dc->pc != (last_pc + 4))
5239             break;
5240         /* if we reach a page boundary, we stop generation so that the
5241            PC of a TT_TFAULT exception is always in the right page */
5242         if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5243             break;
5244         /* if single step mode, we generate only one instruction and
5245            generate an exception */
5246         if (dc->singlestep) {
5247             break;
5248         }
5249     } while ((gen_opc_ptr < gen_opc_end) &&
5250              (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5251              num_insns < max_insns);
5252
5253  exit_gen_loop:
5254     if (tb->cflags & CF_LAST_IO) {
5255         gen_io_end();
5256     }
5257     if (!dc->is_br) {
5258         if (dc->pc != DYNAMIC_PC &&
5259             (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5260             /* static PC and NPC: we can use direct chaining */
5261             gen_goto_tb(dc, 0, dc->pc, dc->npc);
5262         } else {
5263             if (dc->pc != DYNAMIC_PC) {
5264                 tcg_gen_movi_tl(cpu_pc, dc->pc);
5265             }
5266             save_npc(dc);
5267             tcg_gen_exit_tb(0);
5268         }
5269     }
5270     gen_icount_end(tb, num_insns);
5271     *gen_opc_ptr = INDEX_op_end;
5272     if (spc) {
5273         j = gen_opc_ptr - gen_opc_buf;
5274         lj++;
5275         while (lj <= j)
5276             gen_opc_instr_start[lj++] = 0;
5277 #if 0
5278         log_page_dump();
5279 #endif
5280         gen_opc_jump_pc[0] = dc->jump_pc[0];
5281         gen_opc_jump_pc[1] = dc->jump_pc[1];
5282     } else {
5283         tb->size = last_pc + 4 - pc_start;
5284         tb->icount = num_insns;
5285     }
5286 #ifdef DEBUG_DISAS
5287     if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5288         qemu_log("--------------\n");
5289         qemu_log("IN: %s\n", lookup_symbol(pc_start));
5290         log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
5291         qemu_log("\n");
5292     }
5293 #endif
5294 }
5295
5296 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5297 {
5298     gen_intermediate_code_internal(tb, 0, env);
5299 }
5300
5301 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5302 {
5303     gen_intermediate_code_internal(tb, 1, env);
5304 }
5305
5306 void gen_intermediate_code_init(CPUSPARCState *env)
5307 {
5308     unsigned int i;
5309     static int inited;
5310     static const char * const gregnames[8] = {
5311         NULL, // g0 not used
5312         "g1",
5313         "g2",
5314         "g3",
5315         "g4",
5316         "g5",
5317         "g6",
5318         "g7",
5319     };
5320     static const char * const fregnames[32] = {
5321         "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5322         "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5323         "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5324         "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5325     };
5326
5327     /* init various static tables */
5328     if (!inited) {
5329         inited = 1;
5330
5331         cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5332         cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5333                                              offsetof(CPUSPARCState, regwptr),
5334                                              "regwptr");
5335 #ifdef TARGET_SPARC64
5336         cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, xcc),
5337                                          "xcc");
5338         cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, asi),
5339                                          "asi");
5340         cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, fprs),
5341                                           "fprs");
5342         cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, gsr),
5343                                      "gsr");
5344         cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5345                                            offsetof(CPUSPARCState, tick_cmpr),
5346                                            "tick_cmpr");
5347         cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5348                                             offsetof(CPUSPARCState, stick_cmpr),
5349                                             "stick_cmpr");
5350         cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5351                                              offsetof(CPUSPARCState, hstick_cmpr),
5352                                              "hstick_cmpr");
5353         cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hintp),
5354                                        "hintp");
5355         cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, htba),
5356                                       "htba");
5357         cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hver),
5358                                       "hver");
5359         cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5360                                      offsetof(CPUSPARCState, ssr), "ssr");
5361         cpu_ver = tcg_global_mem_new(TCG_AREG0,
5362                                      offsetof(CPUSPARCState, version), "ver");
5363         cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5364                                              offsetof(CPUSPARCState, softint),
5365                                              "softint");
5366 #else
5367         cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, wim),
5368                                      "wim");
5369 #endif
5370         cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cond),
5371                                       "cond");
5372         cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_src),
5373                                         "cc_src");
5374         cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5375                                          offsetof(CPUSPARCState, cc_src2),
5376                                          "cc_src2");
5377         cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_dst),
5378                                         "cc_dst");
5379         cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, cc_op),
5380                                            "cc_op");
5381         cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, psr),
5382                                          "psr");
5383         cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, fsr),
5384                                      "fsr");
5385         cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, pc),
5386                                     "pc");
5387         cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, npc),
5388                                      "npc");
5389         cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, y), "y");
5390 #ifndef CONFIG_USER_ONLY
5391         cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, tbr),
5392                                      "tbr");
5393 #endif
5394         for (i = 1; i < 8; i++) {
5395             cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5396                                               offsetof(CPUSPARCState, gregs[i]),
5397                                               gregnames[i]);
5398         }
5399         for (i = 0; i < TARGET_DPREGS; i++) {
5400             cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
5401                                                 offsetof(CPUSPARCState, fpr[i]),
5402                                                 fregnames[i]);
5403         }
5404
5405         /* register helpers */
5406
5407 #define GEN_HELPER 2
5408 #include "helper.h"
5409     }
5410 }
5411
5412 void restore_state_to_opc(CPUSPARCState *env, TranslationBlock *tb, int pc_pos)
5413 {
5414     target_ulong npc;
5415     env->pc = gen_opc_pc[pc_pos];
5416     npc = gen_opc_npc[pc_pos];
5417     if (npc == 1) {
5418         /* dynamic NPC: already stored */
5419     } else if (npc == 2) {
5420         /* jump PC: use 'cond' and the jump targets of the translation */
5421         if (env->cond) {
5422             env->npc = gen_opc_jump_pc[0];
5423         } else {
5424             env->npc = gen_opc_jump_pc[1];
5425         }
5426     } else {
5427         env->npc = npc;
5428     }
5429
5430     /* flush pending conditional evaluations before exposing cpu state */
5431     if (CC_OP != CC_OP_FLAGS) {
5432         helper_compute_psr(env);
5433     }
5434 }
This page took 0.323927 seconds and 4 git commands to generate.