]> Git Repo - qemu.git/blob - target-sparc/translate.c
target-sparc: Create gen_exception
[qemu.git] / target-sparc / translate.c
1 /*
2    SPARC translation
3
4    Copyright (C) 2003 Thomas M. Ogrisegg <[email protected]>
5    Copyright (C) 2003-2005 Fabrice Bellard
6
7    This library is free software; you can redistribute it and/or
8    modify it under the terms of the GNU Lesser General Public
9    License as published by the Free Software Foundation; either
10    version 2 of the License, or (at your option) any later version.
11
12    This library is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15    Lesser General Public License for more details.
16
17    You should have received a copy of the GNU Lesser General Public
18    License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20
21 #include "qemu/osdep.h"
22
23 #include "cpu.h"
24 #include "disas/disas.h"
25 #include "exec/helper-proto.h"
26 #include "exec/exec-all.h"
27 #include "tcg-op.h"
28 #include "exec/cpu_ldst.h"
29
30 #include "exec/helper-gen.h"
31
32 #include "trace-tcg.h"
33 #include "exec/log.h"
34
35
36 #define DEBUG_DISAS
37
38 #define DYNAMIC_PC  1 /* dynamic pc value */
39 #define JUMP_PC     2 /* dynamic pc value which takes only two values
40                          according to jump_pc[T2] */
41
42 /* global register indexes */
43 static TCGv_env cpu_env;
44 static TCGv_ptr cpu_regwptr;
45 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
46 static TCGv_i32 cpu_cc_op;
47 static TCGv_i32 cpu_psr;
48 static TCGv cpu_fsr, cpu_pc, cpu_npc;
49 static TCGv cpu_regs[32];
50 static TCGv cpu_y;
51 #ifndef CONFIG_USER_ONLY
52 static TCGv cpu_tbr;
53 #endif
54 static TCGv cpu_cond;
55 #ifdef TARGET_SPARC64
56 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
57 static TCGv cpu_gsr;
58 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
59 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
60 #else
61 static TCGv cpu_wim;
62 #endif
63 /* Floating point registers */
64 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
65
66 #include "exec/gen-icount.h"
67
68 typedef struct DisasContext {
69     target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
70     target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
71     target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
72     int is_br;
73     int mem_idx;
74     int fpu_enabled;
75     int address_mask_32bit;
76     int singlestep;
77     uint32_t cc_op;  /* current CC operation */
78     struct TranslationBlock *tb;
79     sparc_def_t *def;
80     TCGv_i32 t32[3];
81     TCGv ttl[5];
82     int n_t32;
83     int n_ttl;
84 } DisasContext;
85
86 typedef struct {
87     TCGCond cond;
88     bool is_bool;
89     bool g1, g2;
90     TCGv c1, c2;
91 } DisasCompare;
92
93 // This function uses non-native bit order
94 #define GET_FIELD(X, FROM, TO)                                  \
95     ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
96
97 // This function uses the order in the manuals, i.e. bit 0 is 2^0
98 #define GET_FIELD_SP(X, FROM, TO)               \
99     GET_FIELD(X, 31 - (TO), 31 - (FROM))
100
101 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
102 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
103
104 #ifdef TARGET_SPARC64
105 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
106 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
107 #else
108 #define DFPREG(r) (r & 0x1e)
109 #define QFPREG(r) (r & 0x1c)
110 #endif
111
112 #define UA2005_HTRAP_MASK 0xff
113 #define V8_TRAP_MASK 0x7f
114
115 static int sign_extend(int x, int len)
116 {
117     len = 32 - len;
118     return (x << len) >> len;
119 }
120
121 #define IS_IMM (insn & (1<<13))
122
123 static inline TCGv_i32 get_temp_i32(DisasContext *dc)
124 {
125     TCGv_i32 t;
126     assert(dc->n_t32 < ARRAY_SIZE(dc->t32));
127     dc->t32[dc->n_t32++] = t = tcg_temp_new_i32();
128     return t;
129 }
130
131 static inline TCGv get_temp_tl(DisasContext *dc)
132 {
133     TCGv t;
134     assert(dc->n_ttl < ARRAY_SIZE(dc->ttl));
135     dc->ttl[dc->n_ttl++] = t = tcg_temp_new();
136     return t;
137 }
138
139 static inline void gen_update_fprs_dirty(int rd)
140 {
141 #if defined(TARGET_SPARC64)
142     tcg_gen_ori_i32(cpu_fprs, cpu_fprs, (rd < 32) ? 1 : 2);
143 #endif
144 }
145
146 /* floating point registers moves */
147 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
148 {
149 #if TCG_TARGET_REG_BITS == 32
150     if (src & 1) {
151         return TCGV_LOW(cpu_fpr[src / 2]);
152     } else {
153         return TCGV_HIGH(cpu_fpr[src / 2]);
154     }
155 #else
156     if (src & 1) {
157         return MAKE_TCGV_I32(GET_TCGV_I64(cpu_fpr[src / 2]));
158     } else {
159         TCGv_i32 ret = get_temp_i32(dc);
160         TCGv_i64 t = tcg_temp_new_i64();
161
162         tcg_gen_shri_i64(t, cpu_fpr[src / 2], 32);
163         tcg_gen_extrl_i64_i32(ret, t);
164         tcg_temp_free_i64(t);
165
166         return ret;
167     }
168 #endif
169 }
170
171 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
172 {
173 #if TCG_TARGET_REG_BITS == 32
174     if (dst & 1) {
175         tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
176     } else {
177         tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
178     }
179 #else
180     TCGv_i64 t = MAKE_TCGV_I64(GET_TCGV_I32(v));
181     tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
182                         (dst & 1 ? 0 : 32), 32);
183 #endif
184     gen_update_fprs_dirty(dst);
185 }
186
187 static TCGv_i32 gen_dest_fpr_F(DisasContext *dc)
188 {
189     return get_temp_i32(dc);
190 }
191
192 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
193 {
194     src = DFPREG(src);
195     return cpu_fpr[src / 2];
196 }
197
198 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
199 {
200     dst = DFPREG(dst);
201     tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
202     gen_update_fprs_dirty(dst);
203 }
204
205 static TCGv_i64 gen_dest_fpr_D(DisasContext *dc, unsigned int dst)
206 {
207     return cpu_fpr[DFPREG(dst) / 2];
208 }
209
210 static void gen_op_load_fpr_QT0(unsigned int src)
211 {
212     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
213                    offsetof(CPU_QuadU, ll.upper));
214     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
215                    offsetof(CPU_QuadU, ll.lower));
216 }
217
218 static void gen_op_load_fpr_QT1(unsigned int src)
219 {
220     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
221                    offsetof(CPU_QuadU, ll.upper));
222     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
223                    offsetof(CPU_QuadU, ll.lower));
224 }
225
226 static void gen_op_store_QT0_fpr(unsigned int dst)
227 {
228     tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
229                    offsetof(CPU_QuadU, ll.upper));
230     tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
231                    offsetof(CPU_QuadU, ll.lower));
232 }
233
234 #ifdef TARGET_SPARC64
235 static void gen_move_Q(unsigned int rd, unsigned int rs)
236 {
237     rd = QFPREG(rd);
238     rs = QFPREG(rs);
239
240     tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
241     tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
242     gen_update_fprs_dirty(rd);
243 }
244 #endif
245
246 /* moves */
247 #ifdef CONFIG_USER_ONLY
248 #define supervisor(dc) 0
249 #ifdef TARGET_SPARC64
250 #define hypervisor(dc) 0
251 #endif
252 #else
253 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
254 #ifdef TARGET_SPARC64
255 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
256 #else
257 #endif
258 #endif
259
260 #ifdef TARGET_SPARC64
261 #ifndef TARGET_ABI32
262 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
263 #else
264 #define AM_CHECK(dc) (1)
265 #endif
266 #endif
267
268 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
269 {
270 #ifdef TARGET_SPARC64
271     if (AM_CHECK(dc))
272         tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
273 #endif
274 }
275
276 static inline TCGv gen_load_gpr(DisasContext *dc, int reg)
277 {
278     if (reg > 0) {
279         assert(reg < 32);
280         return cpu_regs[reg];
281     } else {
282         TCGv t = get_temp_tl(dc);
283         tcg_gen_movi_tl(t, 0);
284         return t;
285     }
286 }
287
288 static inline void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
289 {
290     if (reg > 0) {
291         assert(reg < 32);
292         tcg_gen_mov_tl(cpu_regs[reg], v);
293     }
294 }
295
296 static inline TCGv gen_dest_gpr(DisasContext *dc, int reg)
297 {
298     if (reg > 0) {
299         assert(reg < 32);
300         return cpu_regs[reg];
301     } else {
302         return get_temp_tl(dc);
303     }
304 }
305
306 static inline bool use_goto_tb(DisasContext *s, target_ulong pc,
307                                target_ulong npc)
308 {
309     if (unlikely(s->singlestep)) {
310         return false;
311     }
312
313 #ifndef CONFIG_USER_ONLY
314     return (pc & TARGET_PAGE_MASK) == (s->tb->pc & TARGET_PAGE_MASK) &&
315            (npc & TARGET_PAGE_MASK) == (s->tb->pc & TARGET_PAGE_MASK);
316 #else
317     return true;
318 #endif
319 }
320
321 static inline void gen_goto_tb(DisasContext *s, int tb_num,
322                                target_ulong pc, target_ulong npc)
323 {
324     if (use_goto_tb(s, pc, npc))  {
325         /* jump to same page: we can use a direct jump */
326         tcg_gen_goto_tb(tb_num);
327         tcg_gen_movi_tl(cpu_pc, pc);
328         tcg_gen_movi_tl(cpu_npc, npc);
329         tcg_gen_exit_tb((uintptr_t)s->tb + tb_num);
330     } else {
331         /* jump to another page: currently not optimized */
332         tcg_gen_movi_tl(cpu_pc, pc);
333         tcg_gen_movi_tl(cpu_npc, npc);
334         tcg_gen_exit_tb(0);
335     }
336 }
337
338 // XXX suboptimal
339 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
340 {
341     tcg_gen_extu_i32_tl(reg, src);
342     tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
343     tcg_gen_andi_tl(reg, reg, 0x1);
344 }
345
346 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
347 {
348     tcg_gen_extu_i32_tl(reg, src);
349     tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
350     tcg_gen_andi_tl(reg, reg, 0x1);
351 }
352
353 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
354 {
355     tcg_gen_extu_i32_tl(reg, src);
356     tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
357     tcg_gen_andi_tl(reg, reg, 0x1);
358 }
359
360 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
361 {
362     tcg_gen_extu_i32_tl(reg, src);
363     tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
364     tcg_gen_andi_tl(reg, reg, 0x1);
365 }
366
367 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
368 {
369     tcg_gen_mov_tl(cpu_cc_src, src1);
370     tcg_gen_mov_tl(cpu_cc_src2, src2);
371     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
372     tcg_gen_mov_tl(dst, cpu_cc_dst);
373 }
374
375 static TCGv_i32 gen_add32_carry32(void)
376 {
377     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
378
379     /* Carry is computed from a previous add: (dst < src)  */
380 #if TARGET_LONG_BITS == 64
381     cc_src1_32 = tcg_temp_new_i32();
382     cc_src2_32 = tcg_temp_new_i32();
383     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_dst);
384     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src);
385 #else
386     cc_src1_32 = cpu_cc_dst;
387     cc_src2_32 = cpu_cc_src;
388 #endif
389
390     carry_32 = tcg_temp_new_i32();
391     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
392
393 #if TARGET_LONG_BITS == 64
394     tcg_temp_free_i32(cc_src1_32);
395     tcg_temp_free_i32(cc_src2_32);
396 #endif
397
398     return carry_32;
399 }
400
401 static TCGv_i32 gen_sub32_carry32(void)
402 {
403     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
404
405     /* Carry is computed from a previous borrow: (src1 < src2)  */
406 #if TARGET_LONG_BITS == 64
407     cc_src1_32 = tcg_temp_new_i32();
408     cc_src2_32 = tcg_temp_new_i32();
409     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_src);
410     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src2);
411 #else
412     cc_src1_32 = cpu_cc_src;
413     cc_src2_32 = cpu_cc_src2;
414 #endif
415
416     carry_32 = tcg_temp_new_i32();
417     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
418
419 #if TARGET_LONG_BITS == 64
420     tcg_temp_free_i32(cc_src1_32);
421     tcg_temp_free_i32(cc_src2_32);
422 #endif
423
424     return carry_32;
425 }
426
427 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
428                             TCGv src2, int update_cc)
429 {
430     TCGv_i32 carry_32;
431     TCGv carry;
432
433     switch (dc->cc_op) {
434     case CC_OP_DIV:
435     case CC_OP_LOGIC:
436         /* Carry is known to be zero.  Fall back to plain ADD.  */
437         if (update_cc) {
438             gen_op_add_cc(dst, src1, src2);
439         } else {
440             tcg_gen_add_tl(dst, src1, src2);
441         }
442         return;
443
444     case CC_OP_ADD:
445     case CC_OP_TADD:
446     case CC_OP_TADDTV:
447         if (TARGET_LONG_BITS == 32) {
448             /* We can re-use the host's hardware carry generation by using
449                an ADD2 opcode.  We discard the low part of the output.
450                Ideally we'd combine this operation with the add that
451                generated the carry in the first place.  */
452             carry = tcg_temp_new();
453             tcg_gen_add2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
454             tcg_temp_free(carry);
455             goto add_done;
456         }
457         carry_32 = gen_add32_carry32();
458         break;
459
460     case CC_OP_SUB:
461     case CC_OP_TSUB:
462     case CC_OP_TSUBTV:
463         carry_32 = gen_sub32_carry32();
464         break;
465
466     default:
467         /* We need external help to produce the carry.  */
468         carry_32 = tcg_temp_new_i32();
469         gen_helper_compute_C_icc(carry_32, cpu_env);
470         break;
471     }
472
473 #if TARGET_LONG_BITS == 64
474     carry = tcg_temp_new();
475     tcg_gen_extu_i32_i64(carry, carry_32);
476 #else
477     carry = carry_32;
478 #endif
479
480     tcg_gen_add_tl(dst, src1, src2);
481     tcg_gen_add_tl(dst, dst, carry);
482
483     tcg_temp_free_i32(carry_32);
484 #if TARGET_LONG_BITS == 64
485     tcg_temp_free(carry);
486 #endif
487
488  add_done:
489     if (update_cc) {
490         tcg_gen_mov_tl(cpu_cc_src, src1);
491         tcg_gen_mov_tl(cpu_cc_src2, src2);
492         tcg_gen_mov_tl(cpu_cc_dst, dst);
493         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
494         dc->cc_op = CC_OP_ADDX;
495     }
496 }
497
498 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
499 {
500     tcg_gen_mov_tl(cpu_cc_src, src1);
501     tcg_gen_mov_tl(cpu_cc_src2, src2);
502     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
503     tcg_gen_mov_tl(dst, cpu_cc_dst);
504 }
505
506 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
507                             TCGv src2, int update_cc)
508 {
509     TCGv_i32 carry_32;
510     TCGv carry;
511
512     switch (dc->cc_op) {
513     case CC_OP_DIV:
514     case CC_OP_LOGIC:
515         /* Carry is known to be zero.  Fall back to plain SUB.  */
516         if (update_cc) {
517             gen_op_sub_cc(dst, src1, src2);
518         } else {
519             tcg_gen_sub_tl(dst, src1, src2);
520         }
521         return;
522
523     case CC_OP_ADD:
524     case CC_OP_TADD:
525     case CC_OP_TADDTV:
526         carry_32 = gen_add32_carry32();
527         break;
528
529     case CC_OP_SUB:
530     case CC_OP_TSUB:
531     case CC_OP_TSUBTV:
532         if (TARGET_LONG_BITS == 32) {
533             /* We can re-use the host's hardware carry generation by using
534                a SUB2 opcode.  We discard the low part of the output.
535                Ideally we'd combine this operation with the add that
536                generated the carry in the first place.  */
537             carry = tcg_temp_new();
538             tcg_gen_sub2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
539             tcg_temp_free(carry);
540             goto sub_done;
541         }
542         carry_32 = gen_sub32_carry32();
543         break;
544
545     default:
546         /* We need external help to produce the carry.  */
547         carry_32 = tcg_temp_new_i32();
548         gen_helper_compute_C_icc(carry_32, cpu_env);
549         break;
550     }
551
552 #if TARGET_LONG_BITS == 64
553     carry = tcg_temp_new();
554     tcg_gen_extu_i32_i64(carry, carry_32);
555 #else
556     carry = carry_32;
557 #endif
558
559     tcg_gen_sub_tl(dst, src1, src2);
560     tcg_gen_sub_tl(dst, dst, carry);
561
562     tcg_temp_free_i32(carry_32);
563 #if TARGET_LONG_BITS == 64
564     tcg_temp_free(carry);
565 #endif
566
567  sub_done:
568     if (update_cc) {
569         tcg_gen_mov_tl(cpu_cc_src, src1);
570         tcg_gen_mov_tl(cpu_cc_src2, src2);
571         tcg_gen_mov_tl(cpu_cc_dst, dst);
572         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
573         dc->cc_op = CC_OP_SUBX;
574     }
575 }
576
577 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
578 {
579     TCGv r_temp, zero, t0;
580
581     r_temp = tcg_temp_new();
582     t0 = tcg_temp_new();
583
584     /* old op:
585     if (!(env->y & 1))
586         T1 = 0;
587     */
588     zero = tcg_const_tl(0);
589     tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
590     tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
591     tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
592     tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
593                        zero, cpu_cc_src2);
594     tcg_temp_free(zero);
595
596     // b2 = T0 & 1;
597     // env->y = (b2 << 31) | (env->y >> 1);
598     tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
599     tcg_gen_shli_tl(r_temp, r_temp, 31);
600     tcg_gen_shri_tl(t0, cpu_y, 1);
601     tcg_gen_andi_tl(t0, t0, 0x7fffffff);
602     tcg_gen_or_tl(t0, t0, r_temp);
603     tcg_gen_andi_tl(cpu_y, t0, 0xffffffff);
604
605     // b1 = N ^ V;
606     gen_mov_reg_N(t0, cpu_psr);
607     gen_mov_reg_V(r_temp, cpu_psr);
608     tcg_gen_xor_tl(t0, t0, r_temp);
609     tcg_temp_free(r_temp);
610
611     // T0 = (b1 << 31) | (T0 >> 1);
612     // src1 = T0;
613     tcg_gen_shli_tl(t0, t0, 31);
614     tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
615     tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
616     tcg_temp_free(t0);
617
618     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
619
620     tcg_gen_mov_tl(dst, cpu_cc_dst);
621 }
622
623 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
624 {
625 #if TARGET_LONG_BITS == 32
626     if (sign_ext) {
627         tcg_gen_muls2_tl(dst, cpu_y, src1, src2);
628     } else {
629         tcg_gen_mulu2_tl(dst, cpu_y, src1, src2);
630     }
631 #else
632     TCGv t0 = tcg_temp_new_i64();
633     TCGv t1 = tcg_temp_new_i64();
634
635     if (sign_ext) {
636         tcg_gen_ext32s_i64(t0, src1);
637         tcg_gen_ext32s_i64(t1, src2);
638     } else {
639         tcg_gen_ext32u_i64(t0, src1);
640         tcg_gen_ext32u_i64(t1, src2);
641     }
642
643     tcg_gen_mul_i64(dst, t0, t1);
644     tcg_temp_free(t0);
645     tcg_temp_free(t1);
646
647     tcg_gen_shri_i64(cpu_y, dst, 32);
648 #endif
649 }
650
651 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
652 {
653     /* zero-extend truncated operands before multiplication */
654     gen_op_multiply(dst, src1, src2, 0);
655 }
656
657 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
658 {
659     /* sign-extend truncated operands before multiplication */
660     gen_op_multiply(dst, src1, src2, 1);
661 }
662
663 // 1
664 static inline void gen_op_eval_ba(TCGv dst)
665 {
666     tcg_gen_movi_tl(dst, 1);
667 }
668
669 // Z
670 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
671 {
672     gen_mov_reg_Z(dst, src);
673 }
674
675 // Z | (N ^ V)
676 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
677 {
678     TCGv t0 = tcg_temp_new();
679     gen_mov_reg_N(t0, src);
680     gen_mov_reg_V(dst, src);
681     tcg_gen_xor_tl(dst, dst, t0);
682     gen_mov_reg_Z(t0, src);
683     tcg_gen_or_tl(dst, dst, t0);
684     tcg_temp_free(t0);
685 }
686
687 // N ^ V
688 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
689 {
690     TCGv t0 = tcg_temp_new();
691     gen_mov_reg_V(t0, src);
692     gen_mov_reg_N(dst, src);
693     tcg_gen_xor_tl(dst, dst, t0);
694     tcg_temp_free(t0);
695 }
696
697 // C | Z
698 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
699 {
700     TCGv t0 = tcg_temp_new();
701     gen_mov_reg_Z(t0, src);
702     gen_mov_reg_C(dst, src);
703     tcg_gen_or_tl(dst, dst, t0);
704     tcg_temp_free(t0);
705 }
706
707 // C
708 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
709 {
710     gen_mov_reg_C(dst, src);
711 }
712
713 // V
714 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
715 {
716     gen_mov_reg_V(dst, src);
717 }
718
719 // 0
720 static inline void gen_op_eval_bn(TCGv dst)
721 {
722     tcg_gen_movi_tl(dst, 0);
723 }
724
725 // N
726 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
727 {
728     gen_mov_reg_N(dst, src);
729 }
730
731 // !Z
732 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
733 {
734     gen_mov_reg_Z(dst, src);
735     tcg_gen_xori_tl(dst, dst, 0x1);
736 }
737
738 // !(Z | (N ^ V))
739 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
740 {
741     gen_op_eval_ble(dst, src);
742     tcg_gen_xori_tl(dst, dst, 0x1);
743 }
744
745 // !(N ^ V)
746 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
747 {
748     gen_op_eval_bl(dst, src);
749     tcg_gen_xori_tl(dst, dst, 0x1);
750 }
751
752 // !(C | Z)
753 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
754 {
755     gen_op_eval_bleu(dst, src);
756     tcg_gen_xori_tl(dst, dst, 0x1);
757 }
758
759 // !C
760 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
761 {
762     gen_mov_reg_C(dst, src);
763     tcg_gen_xori_tl(dst, dst, 0x1);
764 }
765
766 // !N
767 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
768 {
769     gen_mov_reg_N(dst, src);
770     tcg_gen_xori_tl(dst, dst, 0x1);
771 }
772
773 // !V
774 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
775 {
776     gen_mov_reg_V(dst, src);
777     tcg_gen_xori_tl(dst, dst, 0x1);
778 }
779
780 /*
781   FPSR bit field FCC1 | FCC0:
782    0 =
783    1 <
784    2 >
785    3 unordered
786 */
787 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
788                                     unsigned int fcc_offset)
789 {
790     tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
791     tcg_gen_andi_tl(reg, reg, 0x1);
792 }
793
794 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
795                                     unsigned int fcc_offset)
796 {
797     tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
798     tcg_gen_andi_tl(reg, reg, 0x1);
799 }
800
801 // !0: FCC0 | FCC1
802 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
803                                     unsigned int fcc_offset)
804 {
805     TCGv t0 = tcg_temp_new();
806     gen_mov_reg_FCC0(dst, src, fcc_offset);
807     gen_mov_reg_FCC1(t0, src, fcc_offset);
808     tcg_gen_or_tl(dst, dst, t0);
809     tcg_temp_free(t0);
810 }
811
812 // 1 or 2: FCC0 ^ FCC1
813 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
814                                     unsigned int fcc_offset)
815 {
816     TCGv t0 = tcg_temp_new();
817     gen_mov_reg_FCC0(dst, src, fcc_offset);
818     gen_mov_reg_FCC1(t0, src, fcc_offset);
819     tcg_gen_xor_tl(dst, dst, t0);
820     tcg_temp_free(t0);
821 }
822
823 // 1 or 3: FCC0
824 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
825                                     unsigned int fcc_offset)
826 {
827     gen_mov_reg_FCC0(dst, src, fcc_offset);
828 }
829
830 // 1: FCC0 & !FCC1
831 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
832                                     unsigned int fcc_offset)
833 {
834     TCGv t0 = tcg_temp_new();
835     gen_mov_reg_FCC0(dst, src, fcc_offset);
836     gen_mov_reg_FCC1(t0, src, fcc_offset);
837     tcg_gen_andc_tl(dst, dst, t0);
838     tcg_temp_free(t0);
839 }
840
841 // 2 or 3: FCC1
842 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
843                                     unsigned int fcc_offset)
844 {
845     gen_mov_reg_FCC1(dst, src, fcc_offset);
846 }
847
848 // 2: !FCC0 & FCC1
849 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
850                                     unsigned int fcc_offset)
851 {
852     TCGv t0 = tcg_temp_new();
853     gen_mov_reg_FCC0(dst, src, fcc_offset);
854     gen_mov_reg_FCC1(t0, src, fcc_offset);
855     tcg_gen_andc_tl(dst, t0, dst);
856     tcg_temp_free(t0);
857 }
858
859 // 3: FCC0 & FCC1
860 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
861                                     unsigned int fcc_offset)
862 {
863     TCGv t0 = tcg_temp_new();
864     gen_mov_reg_FCC0(dst, src, fcc_offset);
865     gen_mov_reg_FCC1(t0, src, fcc_offset);
866     tcg_gen_and_tl(dst, dst, t0);
867     tcg_temp_free(t0);
868 }
869
870 // 0: !(FCC0 | FCC1)
871 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
872                                     unsigned int fcc_offset)
873 {
874     TCGv t0 = tcg_temp_new();
875     gen_mov_reg_FCC0(dst, src, fcc_offset);
876     gen_mov_reg_FCC1(t0, src, fcc_offset);
877     tcg_gen_or_tl(dst, dst, t0);
878     tcg_gen_xori_tl(dst, dst, 0x1);
879     tcg_temp_free(t0);
880 }
881
882 // 0 or 3: !(FCC0 ^ FCC1)
883 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
884                                     unsigned int fcc_offset)
885 {
886     TCGv t0 = tcg_temp_new();
887     gen_mov_reg_FCC0(dst, src, fcc_offset);
888     gen_mov_reg_FCC1(t0, src, fcc_offset);
889     tcg_gen_xor_tl(dst, dst, t0);
890     tcg_gen_xori_tl(dst, dst, 0x1);
891     tcg_temp_free(t0);
892 }
893
894 // 0 or 2: !FCC0
895 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
896                                     unsigned int fcc_offset)
897 {
898     gen_mov_reg_FCC0(dst, src, fcc_offset);
899     tcg_gen_xori_tl(dst, dst, 0x1);
900 }
901
902 // !1: !(FCC0 & !FCC1)
903 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
904                                     unsigned int fcc_offset)
905 {
906     TCGv t0 = tcg_temp_new();
907     gen_mov_reg_FCC0(dst, src, fcc_offset);
908     gen_mov_reg_FCC1(t0, src, fcc_offset);
909     tcg_gen_andc_tl(dst, dst, t0);
910     tcg_gen_xori_tl(dst, dst, 0x1);
911     tcg_temp_free(t0);
912 }
913
914 // 0 or 1: !FCC1
915 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
916                                     unsigned int fcc_offset)
917 {
918     gen_mov_reg_FCC1(dst, src, fcc_offset);
919     tcg_gen_xori_tl(dst, dst, 0x1);
920 }
921
922 // !2: !(!FCC0 & FCC1)
923 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
924                                     unsigned int fcc_offset)
925 {
926     TCGv t0 = tcg_temp_new();
927     gen_mov_reg_FCC0(dst, src, fcc_offset);
928     gen_mov_reg_FCC1(t0, src, fcc_offset);
929     tcg_gen_andc_tl(dst, t0, dst);
930     tcg_gen_xori_tl(dst, dst, 0x1);
931     tcg_temp_free(t0);
932 }
933
934 // !3: !(FCC0 & FCC1)
935 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
936                                     unsigned int fcc_offset)
937 {
938     TCGv t0 = tcg_temp_new();
939     gen_mov_reg_FCC0(dst, src, fcc_offset);
940     gen_mov_reg_FCC1(t0, src, fcc_offset);
941     tcg_gen_and_tl(dst, dst, t0);
942     tcg_gen_xori_tl(dst, dst, 0x1);
943     tcg_temp_free(t0);
944 }
945
946 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
947                                target_ulong pc2, TCGv r_cond)
948 {
949     TCGLabel *l1 = gen_new_label();
950
951     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
952
953     gen_goto_tb(dc, 0, pc1, pc1 + 4);
954
955     gen_set_label(l1);
956     gen_goto_tb(dc, 1, pc2, pc2 + 4);
957 }
958
959 static void gen_branch_a(DisasContext *dc, target_ulong pc1)
960 {
961     TCGLabel *l1 = gen_new_label();
962     target_ulong npc = dc->npc;
963
964     tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cond, 0, l1);
965
966     gen_goto_tb(dc, 0, npc, pc1);
967
968     gen_set_label(l1);
969     gen_goto_tb(dc, 1, npc + 4, npc + 8);
970
971     dc->is_br = 1;
972 }
973
974 static void gen_branch_n(DisasContext *dc, target_ulong pc1)
975 {
976     target_ulong npc = dc->npc;
977
978     if (likely(npc != DYNAMIC_PC)) {
979         dc->pc = npc;
980         dc->jump_pc[0] = pc1;
981         dc->jump_pc[1] = npc + 4;
982         dc->npc = JUMP_PC;
983     } else {
984         TCGv t, z;
985
986         tcg_gen_mov_tl(cpu_pc, cpu_npc);
987
988         tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
989         t = tcg_const_tl(pc1);
990         z = tcg_const_tl(0);
991         tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, z, t, cpu_npc);
992         tcg_temp_free(t);
993         tcg_temp_free(z);
994
995         dc->pc = DYNAMIC_PC;
996     }
997 }
998
999 static inline void gen_generic_branch(DisasContext *dc)
1000 {
1001     TCGv npc0 = tcg_const_tl(dc->jump_pc[0]);
1002     TCGv npc1 = tcg_const_tl(dc->jump_pc[1]);
1003     TCGv zero = tcg_const_tl(0);
1004
1005     tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
1006
1007     tcg_temp_free(npc0);
1008     tcg_temp_free(npc1);
1009     tcg_temp_free(zero);
1010 }
1011
1012 /* call this function before using the condition register as it may
1013    have been set for a jump */
1014 static inline void flush_cond(DisasContext *dc)
1015 {
1016     if (dc->npc == JUMP_PC) {
1017         gen_generic_branch(dc);
1018         dc->npc = DYNAMIC_PC;
1019     }
1020 }
1021
1022 static inline void save_npc(DisasContext *dc)
1023 {
1024     if (dc->npc == JUMP_PC) {
1025         gen_generic_branch(dc);
1026         dc->npc = DYNAMIC_PC;
1027     } else if (dc->npc != DYNAMIC_PC) {
1028         tcg_gen_movi_tl(cpu_npc, dc->npc);
1029     }
1030 }
1031
1032 static inline void update_psr(DisasContext *dc)
1033 {
1034     if (dc->cc_op != CC_OP_FLAGS) {
1035         dc->cc_op = CC_OP_FLAGS;
1036         gen_helper_compute_psr(cpu_env);
1037     }
1038 }
1039
1040 static inline void save_state(DisasContext *dc)
1041 {
1042     tcg_gen_movi_tl(cpu_pc, dc->pc);
1043     save_npc(dc);
1044 }
1045
1046 static void gen_exception(DisasContext *dc, int which)
1047 {
1048     TCGv_i32 t;
1049
1050     save_state(dc);
1051     t = tcg_const_i32(which);
1052     gen_helper_raise_exception(cpu_env, t);
1053     tcg_temp_free_i32(t);
1054     dc->is_br = 1;
1055 }
1056
1057 static inline void gen_mov_pc_npc(DisasContext *dc)
1058 {
1059     if (dc->npc == JUMP_PC) {
1060         gen_generic_branch(dc);
1061         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1062         dc->pc = DYNAMIC_PC;
1063     } else if (dc->npc == DYNAMIC_PC) {
1064         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1065         dc->pc = DYNAMIC_PC;
1066     } else {
1067         dc->pc = dc->npc;
1068     }
1069 }
1070
1071 static inline void gen_op_next_insn(void)
1072 {
1073     tcg_gen_mov_tl(cpu_pc, cpu_npc);
1074     tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1075 }
1076
1077 static void free_compare(DisasCompare *cmp)
1078 {
1079     if (!cmp->g1) {
1080         tcg_temp_free(cmp->c1);
1081     }
1082     if (!cmp->g2) {
1083         tcg_temp_free(cmp->c2);
1084     }
1085 }
1086
1087 static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1088                         DisasContext *dc)
1089 {
1090     static int subcc_cond[16] = {
1091         TCG_COND_NEVER,
1092         TCG_COND_EQ,
1093         TCG_COND_LE,
1094         TCG_COND_LT,
1095         TCG_COND_LEU,
1096         TCG_COND_LTU,
1097         -1, /* neg */
1098         -1, /* overflow */
1099         TCG_COND_ALWAYS,
1100         TCG_COND_NE,
1101         TCG_COND_GT,
1102         TCG_COND_GE,
1103         TCG_COND_GTU,
1104         TCG_COND_GEU,
1105         -1, /* pos */
1106         -1, /* no overflow */
1107     };
1108
1109     static int logic_cond[16] = {
1110         TCG_COND_NEVER,
1111         TCG_COND_EQ,     /* eq:  Z */
1112         TCG_COND_LE,     /* le:  Z | (N ^ V) -> Z | N */
1113         TCG_COND_LT,     /* lt:  N ^ V -> N */
1114         TCG_COND_EQ,     /* leu: C | Z -> Z */
1115         TCG_COND_NEVER,  /* ltu: C -> 0 */
1116         TCG_COND_LT,     /* neg: N */
1117         TCG_COND_NEVER,  /* vs:  V -> 0 */
1118         TCG_COND_ALWAYS,
1119         TCG_COND_NE,     /* ne:  !Z */
1120         TCG_COND_GT,     /* gt:  !(Z | (N ^ V)) -> !(Z | N) */
1121         TCG_COND_GE,     /* ge:  !(N ^ V) -> !N */
1122         TCG_COND_NE,     /* gtu: !(C | Z) -> !Z */
1123         TCG_COND_ALWAYS, /* geu: !C -> 1 */
1124         TCG_COND_GE,     /* pos: !N */
1125         TCG_COND_ALWAYS, /* vc:  !V -> 1 */
1126     };
1127
1128     TCGv_i32 r_src;
1129     TCGv r_dst;
1130
1131 #ifdef TARGET_SPARC64
1132     if (xcc) {
1133         r_src = cpu_xcc;
1134     } else {
1135         r_src = cpu_psr;
1136     }
1137 #else
1138     r_src = cpu_psr;
1139 #endif
1140
1141     switch (dc->cc_op) {
1142     case CC_OP_LOGIC:
1143         cmp->cond = logic_cond[cond];
1144     do_compare_dst_0:
1145         cmp->is_bool = false;
1146         cmp->g2 = false;
1147         cmp->c2 = tcg_const_tl(0);
1148 #ifdef TARGET_SPARC64
1149         if (!xcc) {
1150             cmp->g1 = false;
1151             cmp->c1 = tcg_temp_new();
1152             tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1153             break;
1154         }
1155 #endif
1156         cmp->g1 = true;
1157         cmp->c1 = cpu_cc_dst;
1158         break;
1159
1160     case CC_OP_SUB:
1161         switch (cond) {
1162         case 6:  /* neg */
1163         case 14: /* pos */
1164             cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1165             goto do_compare_dst_0;
1166
1167         case 7: /* overflow */
1168         case 15: /* !overflow */
1169             goto do_dynamic;
1170
1171         default:
1172             cmp->cond = subcc_cond[cond];
1173             cmp->is_bool = false;
1174 #ifdef TARGET_SPARC64
1175             if (!xcc) {
1176                 /* Note that sign-extension works for unsigned compares as
1177                    long as both operands are sign-extended.  */
1178                 cmp->g1 = cmp->g2 = false;
1179                 cmp->c1 = tcg_temp_new();
1180                 cmp->c2 = tcg_temp_new();
1181                 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1182                 tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1183                 break;
1184             }
1185 #endif
1186             cmp->g1 = cmp->g2 = true;
1187             cmp->c1 = cpu_cc_src;
1188             cmp->c2 = cpu_cc_src2;
1189             break;
1190         }
1191         break;
1192
1193     default:
1194     do_dynamic:
1195         gen_helper_compute_psr(cpu_env);
1196         dc->cc_op = CC_OP_FLAGS;
1197         /* FALLTHRU */
1198
1199     case CC_OP_FLAGS:
1200         /* We're going to generate a boolean result.  */
1201         cmp->cond = TCG_COND_NE;
1202         cmp->is_bool = true;
1203         cmp->g1 = cmp->g2 = false;
1204         cmp->c1 = r_dst = tcg_temp_new();
1205         cmp->c2 = tcg_const_tl(0);
1206
1207         switch (cond) {
1208         case 0x0:
1209             gen_op_eval_bn(r_dst);
1210             break;
1211         case 0x1:
1212             gen_op_eval_be(r_dst, r_src);
1213             break;
1214         case 0x2:
1215             gen_op_eval_ble(r_dst, r_src);
1216             break;
1217         case 0x3:
1218             gen_op_eval_bl(r_dst, r_src);
1219             break;
1220         case 0x4:
1221             gen_op_eval_bleu(r_dst, r_src);
1222             break;
1223         case 0x5:
1224             gen_op_eval_bcs(r_dst, r_src);
1225             break;
1226         case 0x6:
1227             gen_op_eval_bneg(r_dst, r_src);
1228             break;
1229         case 0x7:
1230             gen_op_eval_bvs(r_dst, r_src);
1231             break;
1232         case 0x8:
1233             gen_op_eval_ba(r_dst);
1234             break;
1235         case 0x9:
1236             gen_op_eval_bne(r_dst, r_src);
1237             break;
1238         case 0xa:
1239             gen_op_eval_bg(r_dst, r_src);
1240             break;
1241         case 0xb:
1242             gen_op_eval_bge(r_dst, r_src);
1243             break;
1244         case 0xc:
1245             gen_op_eval_bgu(r_dst, r_src);
1246             break;
1247         case 0xd:
1248             gen_op_eval_bcc(r_dst, r_src);
1249             break;
1250         case 0xe:
1251             gen_op_eval_bpos(r_dst, r_src);
1252             break;
1253         case 0xf:
1254             gen_op_eval_bvc(r_dst, r_src);
1255             break;
1256         }
1257         break;
1258     }
1259 }
1260
1261 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1262 {
1263     unsigned int offset;
1264     TCGv r_dst;
1265
1266     /* For now we still generate a straight boolean result.  */
1267     cmp->cond = TCG_COND_NE;
1268     cmp->is_bool = true;
1269     cmp->g1 = cmp->g2 = false;
1270     cmp->c1 = r_dst = tcg_temp_new();
1271     cmp->c2 = tcg_const_tl(0);
1272
1273     switch (cc) {
1274     default:
1275     case 0x0:
1276         offset = 0;
1277         break;
1278     case 0x1:
1279         offset = 32 - 10;
1280         break;
1281     case 0x2:
1282         offset = 34 - 10;
1283         break;
1284     case 0x3:
1285         offset = 36 - 10;
1286         break;
1287     }
1288
1289     switch (cond) {
1290     case 0x0:
1291         gen_op_eval_bn(r_dst);
1292         break;
1293     case 0x1:
1294         gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1295         break;
1296     case 0x2:
1297         gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1298         break;
1299     case 0x3:
1300         gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1301         break;
1302     case 0x4:
1303         gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1304         break;
1305     case 0x5:
1306         gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1307         break;
1308     case 0x6:
1309         gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1310         break;
1311     case 0x7:
1312         gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1313         break;
1314     case 0x8:
1315         gen_op_eval_ba(r_dst);
1316         break;
1317     case 0x9:
1318         gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1319         break;
1320     case 0xa:
1321         gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1322         break;
1323     case 0xb:
1324         gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1325         break;
1326     case 0xc:
1327         gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1328         break;
1329     case 0xd:
1330         gen_op_eval_fble(r_dst, cpu_fsr, offset);
1331         break;
1332     case 0xe:
1333         gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1334         break;
1335     case 0xf:
1336         gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1337         break;
1338     }
1339 }
1340
1341 static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1342                      DisasContext *dc)
1343 {
1344     DisasCompare cmp;
1345     gen_compare(&cmp, cc, cond, dc);
1346
1347     /* The interface is to return a boolean in r_dst.  */
1348     if (cmp.is_bool) {
1349         tcg_gen_mov_tl(r_dst, cmp.c1);
1350     } else {
1351         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1352     }
1353
1354     free_compare(&cmp);
1355 }
1356
1357 static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1358 {
1359     DisasCompare cmp;
1360     gen_fcompare(&cmp, cc, cond);
1361
1362     /* The interface is to return a boolean in r_dst.  */
1363     if (cmp.is_bool) {
1364         tcg_gen_mov_tl(r_dst, cmp.c1);
1365     } else {
1366         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1367     }
1368
1369     free_compare(&cmp);
1370 }
1371
1372 #ifdef TARGET_SPARC64
1373 // Inverted logic
1374 static const int gen_tcg_cond_reg[8] = {
1375     -1,
1376     TCG_COND_NE,
1377     TCG_COND_GT,
1378     TCG_COND_GE,
1379     -1,
1380     TCG_COND_EQ,
1381     TCG_COND_LE,
1382     TCG_COND_LT,
1383 };
1384
1385 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1386 {
1387     cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1388     cmp->is_bool = false;
1389     cmp->g1 = true;
1390     cmp->g2 = false;
1391     cmp->c1 = r_src;
1392     cmp->c2 = tcg_const_tl(0);
1393 }
1394
1395 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1396 {
1397     DisasCompare cmp;
1398     gen_compare_reg(&cmp, cond, r_src);
1399
1400     /* The interface is to return a boolean in r_dst.  */
1401     tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1402
1403     free_compare(&cmp);
1404 }
1405 #endif
1406
1407 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1408 {
1409     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1410     target_ulong target = dc->pc + offset;
1411
1412 #ifdef TARGET_SPARC64
1413     if (unlikely(AM_CHECK(dc))) {
1414         target &= 0xffffffffULL;
1415     }
1416 #endif
1417     if (cond == 0x0) {
1418         /* unconditional not taken */
1419         if (a) {
1420             dc->pc = dc->npc + 4;
1421             dc->npc = dc->pc + 4;
1422         } else {
1423             dc->pc = dc->npc;
1424             dc->npc = dc->pc + 4;
1425         }
1426     } else if (cond == 0x8) {
1427         /* unconditional taken */
1428         if (a) {
1429             dc->pc = target;
1430             dc->npc = dc->pc + 4;
1431         } else {
1432             dc->pc = dc->npc;
1433             dc->npc = target;
1434             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1435         }
1436     } else {
1437         flush_cond(dc);
1438         gen_cond(cpu_cond, cc, cond, dc);
1439         if (a) {
1440             gen_branch_a(dc, target);
1441         } else {
1442             gen_branch_n(dc, target);
1443         }
1444     }
1445 }
1446
1447 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1448 {
1449     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1450     target_ulong target = dc->pc + offset;
1451
1452 #ifdef TARGET_SPARC64
1453     if (unlikely(AM_CHECK(dc))) {
1454         target &= 0xffffffffULL;
1455     }
1456 #endif
1457     if (cond == 0x0) {
1458         /* unconditional not taken */
1459         if (a) {
1460             dc->pc = dc->npc + 4;
1461             dc->npc = dc->pc + 4;
1462         } else {
1463             dc->pc = dc->npc;
1464             dc->npc = dc->pc + 4;
1465         }
1466     } else if (cond == 0x8) {
1467         /* unconditional taken */
1468         if (a) {
1469             dc->pc = target;
1470             dc->npc = dc->pc + 4;
1471         } else {
1472             dc->pc = dc->npc;
1473             dc->npc = target;
1474             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1475         }
1476     } else {
1477         flush_cond(dc);
1478         gen_fcond(cpu_cond, cc, cond);
1479         if (a) {
1480             gen_branch_a(dc, target);
1481         } else {
1482             gen_branch_n(dc, target);
1483         }
1484     }
1485 }
1486
1487 #ifdef TARGET_SPARC64
1488 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1489                           TCGv r_reg)
1490 {
1491     unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1492     target_ulong target = dc->pc + offset;
1493
1494     if (unlikely(AM_CHECK(dc))) {
1495         target &= 0xffffffffULL;
1496     }
1497     flush_cond(dc);
1498     gen_cond_reg(cpu_cond, cond, r_reg);
1499     if (a) {
1500         gen_branch_a(dc, target);
1501     } else {
1502         gen_branch_n(dc, target);
1503     }
1504 }
1505
1506 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1507 {
1508     switch (fccno) {
1509     case 0:
1510         gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1511         break;
1512     case 1:
1513         gen_helper_fcmps_fcc1(cpu_env, r_rs1, r_rs2);
1514         break;
1515     case 2:
1516         gen_helper_fcmps_fcc2(cpu_env, r_rs1, r_rs2);
1517         break;
1518     case 3:
1519         gen_helper_fcmps_fcc3(cpu_env, r_rs1, r_rs2);
1520         break;
1521     }
1522 }
1523
1524 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1525 {
1526     switch (fccno) {
1527     case 0:
1528         gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1529         break;
1530     case 1:
1531         gen_helper_fcmpd_fcc1(cpu_env, r_rs1, r_rs2);
1532         break;
1533     case 2:
1534         gen_helper_fcmpd_fcc2(cpu_env, r_rs1, r_rs2);
1535         break;
1536     case 3:
1537         gen_helper_fcmpd_fcc3(cpu_env, r_rs1, r_rs2);
1538         break;
1539     }
1540 }
1541
1542 static inline void gen_op_fcmpq(int fccno)
1543 {
1544     switch (fccno) {
1545     case 0:
1546         gen_helper_fcmpq(cpu_env);
1547         break;
1548     case 1:
1549         gen_helper_fcmpq_fcc1(cpu_env);
1550         break;
1551     case 2:
1552         gen_helper_fcmpq_fcc2(cpu_env);
1553         break;
1554     case 3:
1555         gen_helper_fcmpq_fcc3(cpu_env);
1556         break;
1557     }
1558 }
1559
1560 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1561 {
1562     switch (fccno) {
1563     case 0:
1564         gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1565         break;
1566     case 1:
1567         gen_helper_fcmpes_fcc1(cpu_env, r_rs1, r_rs2);
1568         break;
1569     case 2:
1570         gen_helper_fcmpes_fcc2(cpu_env, r_rs1, r_rs2);
1571         break;
1572     case 3:
1573         gen_helper_fcmpes_fcc3(cpu_env, r_rs1, r_rs2);
1574         break;
1575     }
1576 }
1577
1578 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1579 {
1580     switch (fccno) {
1581     case 0:
1582         gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1583         break;
1584     case 1:
1585         gen_helper_fcmped_fcc1(cpu_env, r_rs1, r_rs2);
1586         break;
1587     case 2:
1588         gen_helper_fcmped_fcc2(cpu_env, r_rs1, r_rs2);
1589         break;
1590     case 3:
1591         gen_helper_fcmped_fcc3(cpu_env, r_rs1, r_rs2);
1592         break;
1593     }
1594 }
1595
1596 static inline void gen_op_fcmpeq(int fccno)
1597 {
1598     switch (fccno) {
1599     case 0:
1600         gen_helper_fcmpeq(cpu_env);
1601         break;
1602     case 1:
1603         gen_helper_fcmpeq_fcc1(cpu_env);
1604         break;
1605     case 2:
1606         gen_helper_fcmpeq_fcc2(cpu_env);
1607         break;
1608     case 3:
1609         gen_helper_fcmpeq_fcc3(cpu_env);
1610         break;
1611     }
1612 }
1613
1614 #else
1615
1616 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1617 {
1618     gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1619 }
1620
1621 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1622 {
1623     gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1624 }
1625
1626 static inline void gen_op_fcmpq(int fccno)
1627 {
1628     gen_helper_fcmpq(cpu_env);
1629 }
1630
1631 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1632 {
1633     gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1634 }
1635
1636 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1637 {
1638     gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1639 }
1640
1641 static inline void gen_op_fcmpeq(int fccno)
1642 {
1643     gen_helper_fcmpeq(cpu_env);
1644 }
1645 #endif
1646
1647 static void gen_op_fpexception_im(DisasContext *dc, int fsr_flags)
1648 {
1649     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1650     tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1651     gen_exception(dc, TT_FP_EXCP);
1652 }
1653
1654 static int gen_trap_ifnofpu(DisasContext *dc)
1655 {
1656 #if !defined(CONFIG_USER_ONLY)
1657     if (!dc->fpu_enabled) {
1658         gen_exception(dc, TT_NFPU_INSN);
1659         return 1;
1660     }
1661 #endif
1662     return 0;
1663 }
1664
1665 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1666 {
1667     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1668 }
1669
1670 static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1671                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1672 {
1673     TCGv_i32 dst, src;
1674
1675     src = gen_load_fpr_F(dc, rs);
1676     dst = gen_dest_fpr_F(dc);
1677
1678     gen(dst, cpu_env, src);
1679
1680     gen_store_fpr_F(dc, rd, dst);
1681 }
1682
1683 static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1684                                  void (*gen)(TCGv_i32, TCGv_i32))
1685 {
1686     TCGv_i32 dst, src;
1687
1688     src = gen_load_fpr_F(dc, rs);
1689     dst = gen_dest_fpr_F(dc);
1690
1691     gen(dst, src);
1692
1693     gen_store_fpr_F(dc, rd, dst);
1694 }
1695
1696 static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1697                         void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1698 {
1699     TCGv_i32 dst, src1, src2;
1700
1701     src1 = gen_load_fpr_F(dc, rs1);
1702     src2 = gen_load_fpr_F(dc, rs2);
1703     dst = gen_dest_fpr_F(dc);
1704
1705     gen(dst, cpu_env, src1, src2);
1706
1707     gen_store_fpr_F(dc, rd, dst);
1708 }
1709
1710 #ifdef TARGET_SPARC64
1711 static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1712                                   void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1713 {
1714     TCGv_i32 dst, src1, src2;
1715
1716     src1 = gen_load_fpr_F(dc, rs1);
1717     src2 = gen_load_fpr_F(dc, rs2);
1718     dst = gen_dest_fpr_F(dc);
1719
1720     gen(dst, src1, src2);
1721
1722     gen_store_fpr_F(dc, rd, dst);
1723 }
1724 #endif
1725
1726 static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1727                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1728 {
1729     TCGv_i64 dst, src;
1730
1731     src = gen_load_fpr_D(dc, rs);
1732     dst = gen_dest_fpr_D(dc, rd);
1733
1734     gen(dst, cpu_env, src);
1735
1736     gen_store_fpr_D(dc, rd, dst);
1737 }
1738
1739 #ifdef TARGET_SPARC64
1740 static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1741                                  void (*gen)(TCGv_i64, TCGv_i64))
1742 {
1743     TCGv_i64 dst, src;
1744
1745     src = gen_load_fpr_D(dc, rs);
1746     dst = gen_dest_fpr_D(dc, rd);
1747
1748     gen(dst, src);
1749
1750     gen_store_fpr_D(dc, rd, dst);
1751 }
1752 #endif
1753
1754 static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1755                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1756 {
1757     TCGv_i64 dst, src1, src2;
1758
1759     src1 = gen_load_fpr_D(dc, rs1);
1760     src2 = gen_load_fpr_D(dc, rs2);
1761     dst = gen_dest_fpr_D(dc, rd);
1762
1763     gen(dst, cpu_env, src1, src2);
1764
1765     gen_store_fpr_D(dc, rd, dst);
1766 }
1767
1768 #ifdef TARGET_SPARC64
1769 static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1770                                   void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1771 {
1772     TCGv_i64 dst, src1, src2;
1773
1774     src1 = gen_load_fpr_D(dc, rs1);
1775     src2 = gen_load_fpr_D(dc, rs2);
1776     dst = gen_dest_fpr_D(dc, rd);
1777
1778     gen(dst, src1, src2);
1779
1780     gen_store_fpr_D(dc, rd, dst);
1781 }
1782
1783 static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1784                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1785 {
1786     TCGv_i64 dst, src1, src2;
1787
1788     src1 = gen_load_fpr_D(dc, rs1);
1789     src2 = gen_load_fpr_D(dc, rs2);
1790     dst = gen_dest_fpr_D(dc, rd);
1791
1792     gen(dst, cpu_gsr, src1, src2);
1793
1794     gen_store_fpr_D(dc, rd, dst);
1795 }
1796
1797 static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1798                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1799 {
1800     TCGv_i64 dst, src0, src1, src2;
1801
1802     src1 = gen_load_fpr_D(dc, rs1);
1803     src2 = gen_load_fpr_D(dc, rs2);
1804     src0 = gen_load_fpr_D(dc, rd);
1805     dst = gen_dest_fpr_D(dc, rd);
1806
1807     gen(dst, src0, src1, src2);
1808
1809     gen_store_fpr_D(dc, rd, dst);
1810 }
1811 #endif
1812
1813 static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1814                               void (*gen)(TCGv_ptr))
1815 {
1816     gen_op_load_fpr_QT1(QFPREG(rs));
1817
1818     gen(cpu_env);
1819
1820     gen_op_store_QT0_fpr(QFPREG(rd));
1821     gen_update_fprs_dirty(QFPREG(rd));
1822 }
1823
1824 #ifdef TARGET_SPARC64
1825 static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1826                                  void (*gen)(TCGv_ptr))
1827 {
1828     gen_op_load_fpr_QT1(QFPREG(rs));
1829
1830     gen(cpu_env);
1831
1832     gen_op_store_QT0_fpr(QFPREG(rd));
1833     gen_update_fprs_dirty(QFPREG(rd));
1834 }
1835 #endif
1836
1837 static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1838                                void (*gen)(TCGv_ptr))
1839 {
1840     gen_op_load_fpr_QT0(QFPREG(rs1));
1841     gen_op_load_fpr_QT1(QFPREG(rs2));
1842
1843     gen(cpu_env);
1844
1845     gen_op_store_QT0_fpr(QFPREG(rd));
1846     gen_update_fprs_dirty(QFPREG(rd));
1847 }
1848
1849 static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1850                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1851 {
1852     TCGv_i64 dst;
1853     TCGv_i32 src1, src2;
1854
1855     src1 = gen_load_fpr_F(dc, rs1);
1856     src2 = gen_load_fpr_F(dc, rs2);
1857     dst = gen_dest_fpr_D(dc, rd);
1858
1859     gen(dst, cpu_env, src1, src2);
1860
1861     gen_store_fpr_D(dc, rd, dst);
1862 }
1863
1864 static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1865                                void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1866 {
1867     TCGv_i64 src1, src2;
1868
1869     src1 = gen_load_fpr_D(dc, rs1);
1870     src2 = gen_load_fpr_D(dc, rs2);
1871
1872     gen(cpu_env, src1, src2);
1873
1874     gen_op_store_QT0_fpr(QFPREG(rd));
1875     gen_update_fprs_dirty(QFPREG(rd));
1876 }
1877
1878 #ifdef TARGET_SPARC64
1879 static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1880                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1881 {
1882     TCGv_i64 dst;
1883     TCGv_i32 src;
1884
1885     src = gen_load_fpr_F(dc, rs);
1886     dst = gen_dest_fpr_D(dc, rd);
1887
1888     gen(dst, cpu_env, src);
1889
1890     gen_store_fpr_D(dc, rd, dst);
1891 }
1892 #endif
1893
1894 static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1895                                  void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1896 {
1897     TCGv_i64 dst;
1898     TCGv_i32 src;
1899
1900     src = gen_load_fpr_F(dc, rs);
1901     dst = gen_dest_fpr_D(dc, rd);
1902
1903     gen(dst, cpu_env, src);
1904
1905     gen_store_fpr_D(dc, rd, dst);
1906 }
1907
1908 static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1909                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1910 {
1911     TCGv_i32 dst;
1912     TCGv_i64 src;
1913
1914     src = gen_load_fpr_D(dc, rs);
1915     dst = gen_dest_fpr_F(dc);
1916
1917     gen(dst, cpu_env, src);
1918
1919     gen_store_fpr_F(dc, rd, dst);
1920 }
1921
1922 static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1923                               void (*gen)(TCGv_i32, TCGv_ptr))
1924 {
1925     TCGv_i32 dst;
1926
1927     gen_op_load_fpr_QT1(QFPREG(rs));
1928     dst = gen_dest_fpr_F(dc);
1929
1930     gen(dst, cpu_env);
1931
1932     gen_store_fpr_F(dc, rd, dst);
1933 }
1934
1935 static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1936                               void (*gen)(TCGv_i64, TCGv_ptr))
1937 {
1938     TCGv_i64 dst;
1939
1940     gen_op_load_fpr_QT1(QFPREG(rs));
1941     dst = gen_dest_fpr_D(dc, rd);
1942
1943     gen(dst, cpu_env);
1944
1945     gen_store_fpr_D(dc, rd, dst);
1946 }
1947
1948 static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1949                                  void (*gen)(TCGv_ptr, TCGv_i32))
1950 {
1951     TCGv_i32 src;
1952
1953     src = gen_load_fpr_F(dc, rs);
1954
1955     gen(cpu_env, src);
1956
1957     gen_op_store_QT0_fpr(QFPREG(rd));
1958     gen_update_fprs_dirty(QFPREG(rd));
1959 }
1960
1961 static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1962                                  void (*gen)(TCGv_ptr, TCGv_i64))
1963 {
1964     TCGv_i64 src;
1965
1966     src = gen_load_fpr_D(dc, rs);
1967
1968     gen(cpu_env, src);
1969
1970     gen_op_store_QT0_fpr(QFPREG(rd));
1971     gen_update_fprs_dirty(QFPREG(rd));
1972 }
1973
1974 /* asi moves */
1975 #ifdef TARGET_SPARC64
1976 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1977 {
1978     int asi;
1979     TCGv_i32 r_asi;
1980
1981     if (IS_IMM) {
1982         r_asi = tcg_temp_new_i32();
1983         tcg_gen_mov_i32(r_asi, cpu_asi);
1984     } else {
1985         asi = GET_FIELD(insn, 19, 26);
1986         r_asi = tcg_const_i32(asi);
1987     }
1988     return r_asi;
1989 }
1990
1991 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1992                               int sign)
1993 {
1994     TCGv_i32 r_asi, r_size, r_sign;
1995
1996     r_asi = gen_get_asi(insn, addr);
1997     r_size = tcg_const_i32(size);
1998     r_sign = tcg_const_i32(sign);
1999     gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_size, r_sign);
2000     tcg_temp_free_i32(r_sign);
2001     tcg_temp_free_i32(r_size);
2002     tcg_temp_free_i32(r_asi);
2003 }
2004
2005 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2006 {
2007     TCGv_i32 r_asi, r_size;
2008
2009     r_asi = gen_get_asi(insn, addr);
2010     r_size = tcg_const_i32(size);
2011     gen_helper_st_asi(cpu_env, addr, src, r_asi, r_size);
2012     tcg_temp_free_i32(r_size);
2013     tcg_temp_free_i32(r_asi);
2014 }
2015
2016 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
2017 {
2018     TCGv_i32 r_asi, r_size, r_rd;
2019
2020     r_asi = gen_get_asi(insn, addr);
2021     r_size = tcg_const_i32(size);
2022     r_rd = tcg_const_i32(rd);
2023     gen_helper_ldf_asi(cpu_env, addr, r_asi, r_size, r_rd);
2024     tcg_temp_free_i32(r_rd);
2025     tcg_temp_free_i32(r_size);
2026     tcg_temp_free_i32(r_asi);
2027 }
2028
2029 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
2030 {
2031     TCGv_i32 r_asi, r_size, r_rd;
2032
2033     r_asi = gen_get_asi(insn, addr);
2034     r_size = tcg_const_i32(size);
2035     r_rd = tcg_const_i32(rd);
2036     gen_helper_stf_asi(cpu_env, addr, r_asi, r_size, r_rd);
2037     tcg_temp_free_i32(r_rd);
2038     tcg_temp_free_i32(r_size);
2039     tcg_temp_free_i32(r_asi);
2040 }
2041
2042 static inline void gen_swap_asi(TCGv dst, TCGv src, TCGv addr, int insn)
2043 {
2044     TCGv_i32 r_asi, r_size, r_sign;
2045     TCGv_i64 t64 = tcg_temp_new_i64();
2046
2047     r_asi = gen_get_asi(insn, addr);
2048     r_size = tcg_const_i32(4);
2049     r_sign = tcg_const_i32(0);
2050     gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2051     tcg_temp_free_i32(r_sign);
2052     gen_helper_st_asi(cpu_env, addr, src, r_asi, r_size);
2053     tcg_temp_free_i32(r_size);
2054     tcg_temp_free_i32(r_asi);
2055     tcg_gen_trunc_i64_tl(dst, t64);
2056     tcg_temp_free_i64(t64);
2057 }
2058
2059 static inline void gen_ldda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2060                                 int insn, int rd)
2061 {
2062     TCGv_i32 r_asi, r_rd;
2063
2064     r_asi = gen_get_asi(insn, addr);
2065     r_rd = tcg_const_i32(rd);
2066     gen_helper_ldda_asi(cpu_env, addr, r_asi, r_rd);
2067     tcg_temp_free_i32(r_rd);
2068     tcg_temp_free_i32(r_asi);
2069 }
2070
2071 static inline void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2072                                 int insn, int rd)
2073 {
2074     TCGv_i32 r_asi, r_size;
2075     TCGv lo = gen_load_gpr(dc, rd + 1);
2076     TCGv_i64 t64 = tcg_temp_new_i64();
2077
2078     tcg_gen_concat_tl_i64(t64, lo, hi);
2079     r_asi = gen_get_asi(insn, addr);
2080     r_size = tcg_const_i32(8);
2081     gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_size);
2082     tcg_temp_free_i32(r_size);
2083     tcg_temp_free_i32(r_asi);
2084     tcg_temp_free_i64(t64);
2085 }
2086
2087 static inline void gen_casx_asi(DisasContext *dc, TCGv addr,
2088                                 TCGv val2, int insn, int rd)
2089 {
2090     TCGv val1 = gen_load_gpr(dc, rd);
2091     TCGv dst = gen_dest_gpr(dc, rd);
2092     TCGv_i32 r_asi = gen_get_asi(insn, addr);
2093
2094     gen_helper_casx_asi(dst, cpu_env, addr, val1, val2, r_asi);
2095     tcg_temp_free_i32(r_asi);
2096     gen_store_gpr(dc, rd, dst);
2097 }
2098
2099 #elif !defined(CONFIG_USER_ONLY)
2100
2101 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
2102                               int sign)
2103 {
2104     TCGv_i32 r_asi, r_size, r_sign;
2105     TCGv_i64 t64 = tcg_temp_new_i64();
2106
2107     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2108     r_size = tcg_const_i32(size);
2109     r_sign = tcg_const_i32(sign);
2110     gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2111     tcg_temp_free_i32(r_sign);
2112     tcg_temp_free_i32(r_size);
2113     tcg_temp_free_i32(r_asi);
2114     tcg_gen_trunc_i64_tl(dst, t64);
2115     tcg_temp_free_i64(t64);
2116 }
2117
2118 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2119 {
2120     TCGv_i32 r_asi, r_size;
2121     TCGv_i64 t64 = tcg_temp_new_i64();
2122
2123     tcg_gen_extu_tl_i64(t64, src);
2124     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2125     r_size = tcg_const_i32(size);
2126     gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_size);
2127     tcg_temp_free_i32(r_size);
2128     tcg_temp_free_i32(r_asi);
2129     tcg_temp_free_i64(t64);
2130 }
2131
2132 static inline void gen_swap_asi(TCGv dst, TCGv src, TCGv addr, int insn)
2133 {
2134     TCGv_i32 r_asi, r_size, r_sign;
2135     TCGv_i64 r_val, t64;
2136
2137     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2138     r_size = tcg_const_i32(4);
2139     r_sign = tcg_const_i32(0);
2140     t64 = tcg_temp_new_i64();
2141     gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2142     tcg_temp_free(r_sign);
2143     r_val = tcg_temp_new_i64();
2144     tcg_gen_extu_tl_i64(r_val, src);
2145     gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2146     tcg_temp_free_i64(r_val);
2147     tcg_temp_free_i32(r_size);
2148     tcg_temp_free_i32(r_asi);
2149     tcg_gen_trunc_i64_tl(dst, t64);
2150     tcg_temp_free_i64(t64);
2151 }
2152
2153 static inline void gen_ldda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2154                                 int insn, int rd)
2155 {
2156     TCGv_i32 r_asi, r_size, r_sign;
2157     TCGv t;
2158     TCGv_i64 t64;
2159
2160     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2161     r_size = tcg_const_i32(8);
2162     r_sign = tcg_const_i32(0);
2163     t64 = tcg_temp_new_i64();
2164     gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2165     tcg_temp_free_i32(r_sign);
2166     tcg_temp_free_i32(r_size);
2167     tcg_temp_free_i32(r_asi);
2168
2169     /* ??? Work around an apparent bug in Ubuntu gcc 4.8.2-10ubuntu2+12,
2170        whereby "rd + 1" elicits "error: array subscript is above array".
2171        Since we have already asserted that rd is even, the semantics
2172        are unchanged.  */
2173     t = gen_dest_gpr(dc, rd | 1);
2174     tcg_gen_trunc_i64_tl(t, t64);
2175     gen_store_gpr(dc, rd | 1, t);
2176
2177     tcg_gen_shri_i64(t64, t64, 32);
2178     tcg_gen_trunc_i64_tl(hi, t64);
2179     tcg_temp_free_i64(t64);
2180     gen_store_gpr(dc, rd, hi);
2181 }
2182
2183 static inline void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2184                                 int insn, int rd)
2185 {
2186     TCGv_i32 r_asi, r_size;
2187     TCGv lo = gen_load_gpr(dc, rd + 1);
2188     TCGv_i64 t64 = tcg_temp_new_i64();
2189
2190     tcg_gen_concat_tl_i64(t64, lo, hi);
2191     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2192     r_size = tcg_const_i32(8);
2193     gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_size);
2194     tcg_temp_free_i32(r_size);
2195     tcg_temp_free_i32(r_asi);
2196     tcg_temp_free_i64(t64);
2197 }
2198 #endif
2199
2200 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2201 static inline void gen_cas_asi(DisasContext *dc, TCGv addr,
2202                                TCGv val2, int insn, int rd)
2203 {
2204     TCGv val1 = gen_load_gpr(dc, rd);
2205     TCGv dst = gen_dest_gpr(dc, rd);
2206 #ifdef TARGET_SPARC64
2207     TCGv_i32 r_asi = gen_get_asi(insn, addr);
2208 #else
2209     TCGv_i32 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2210 #endif
2211
2212     gen_helper_cas_asi(dst, cpu_env, addr, val1, val2, r_asi);
2213     tcg_temp_free_i32(r_asi);
2214     gen_store_gpr(dc, rd, dst);
2215 }
2216
2217 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
2218 {
2219     TCGv_i64 r_val;
2220     TCGv_i32 r_asi, r_size;
2221
2222     gen_ld_asi(dst, addr, insn, 1, 0);
2223
2224     r_val = tcg_const_i64(0xffULL);
2225     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2226     r_size = tcg_const_i32(1);
2227     gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2228     tcg_temp_free_i32(r_size);
2229     tcg_temp_free_i32(r_asi);
2230     tcg_temp_free_i64(r_val);
2231 }
2232 #endif
2233
2234 static TCGv get_src1(DisasContext *dc, unsigned int insn)
2235 {
2236     unsigned int rs1 = GET_FIELD(insn, 13, 17);
2237     return gen_load_gpr(dc, rs1);
2238 }
2239
2240 static TCGv get_src2(DisasContext *dc, unsigned int insn)
2241 {
2242     if (IS_IMM) { /* immediate */
2243         target_long simm = GET_FIELDs(insn, 19, 31);
2244         TCGv t = get_temp_tl(dc);
2245         tcg_gen_movi_tl(t, simm);
2246         return t;
2247     } else {      /* register */
2248         unsigned int rs2 = GET_FIELD(insn, 27, 31);
2249         return gen_load_gpr(dc, rs2);
2250     }
2251 }
2252
2253 #ifdef TARGET_SPARC64
2254 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2255 {
2256     TCGv_i32 c32, zero, dst, s1, s2;
2257
2258     /* We have two choices here: extend the 32 bit data and use movcond_i64,
2259        or fold the comparison down to 32 bits and use movcond_i32.  Choose
2260        the later.  */
2261     c32 = tcg_temp_new_i32();
2262     if (cmp->is_bool) {
2263         tcg_gen_extrl_i64_i32(c32, cmp->c1);
2264     } else {
2265         TCGv_i64 c64 = tcg_temp_new_i64();
2266         tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2267         tcg_gen_extrl_i64_i32(c32, c64);
2268         tcg_temp_free_i64(c64);
2269     }
2270
2271     s1 = gen_load_fpr_F(dc, rs);
2272     s2 = gen_load_fpr_F(dc, rd);
2273     dst = gen_dest_fpr_F(dc);
2274     zero = tcg_const_i32(0);
2275
2276     tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2277
2278     tcg_temp_free_i32(c32);
2279     tcg_temp_free_i32(zero);
2280     gen_store_fpr_F(dc, rd, dst);
2281 }
2282
2283 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2284 {
2285     TCGv_i64 dst = gen_dest_fpr_D(dc, rd);
2286     tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2287                         gen_load_fpr_D(dc, rs),
2288                         gen_load_fpr_D(dc, rd));
2289     gen_store_fpr_D(dc, rd, dst);
2290 }
2291
2292 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2293 {
2294     int qd = QFPREG(rd);
2295     int qs = QFPREG(rs);
2296
2297     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2298                         cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2299     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2300                         cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2301
2302     gen_update_fprs_dirty(qd);
2303 }
2304
2305 #ifndef CONFIG_USER_ONLY
2306 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_env cpu_env)
2307 {
2308     TCGv_i32 r_tl = tcg_temp_new_i32();
2309
2310     /* load env->tl into r_tl */
2311     tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2312
2313     /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2314     tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2315
2316     /* calculate offset to current trap state from env->ts, reuse r_tl */
2317     tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2318     tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2319
2320     /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2321     {
2322         TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2323         tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2324         tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2325         tcg_temp_free_ptr(r_tl_tmp);
2326     }
2327
2328     tcg_temp_free_i32(r_tl);
2329 }
2330 #endif
2331
2332 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2333                      int width, bool cc, bool left)
2334 {
2335     TCGv lo1, lo2, t1, t2;
2336     uint64_t amask, tabl, tabr;
2337     int shift, imask, omask;
2338
2339     if (cc) {
2340         tcg_gen_mov_tl(cpu_cc_src, s1);
2341         tcg_gen_mov_tl(cpu_cc_src2, s2);
2342         tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2343         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2344         dc->cc_op = CC_OP_SUB;
2345     }
2346
2347     /* Theory of operation: there are two tables, left and right (not to
2348        be confused with the left and right versions of the opcode).  These
2349        are indexed by the low 3 bits of the inputs.  To make things "easy",
2350        these tables are loaded into two constants, TABL and TABR below.
2351        The operation index = (input & imask) << shift calculates the index
2352        into the constant, while val = (table >> index) & omask calculates
2353        the value we're looking for.  */
2354     switch (width) {
2355     case 8:
2356         imask = 0x7;
2357         shift = 3;
2358         omask = 0xff;
2359         if (left) {
2360             tabl = 0x80c0e0f0f8fcfeffULL;
2361             tabr = 0xff7f3f1f0f070301ULL;
2362         } else {
2363             tabl = 0x0103070f1f3f7fffULL;
2364             tabr = 0xfffefcf8f0e0c080ULL;
2365         }
2366         break;
2367     case 16:
2368         imask = 0x6;
2369         shift = 1;
2370         omask = 0xf;
2371         if (left) {
2372             tabl = 0x8cef;
2373             tabr = 0xf731;
2374         } else {
2375             tabl = 0x137f;
2376             tabr = 0xfec8;
2377         }
2378         break;
2379     case 32:
2380         imask = 0x4;
2381         shift = 0;
2382         omask = 0x3;
2383         if (left) {
2384             tabl = (2 << 2) | 3;
2385             tabr = (3 << 2) | 1;
2386         } else {
2387             tabl = (1 << 2) | 3;
2388             tabr = (3 << 2) | 2;
2389         }
2390         break;
2391     default:
2392         abort();
2393     }
2394
2395     lo1 = tcg_temp_new();
2396     lo2 = tcg_temp_new();
2397     tcg_gen_andi_tl(lo1, s1, imask);
2398     tcg_gen_andi_tl(lo2, s2, imask);
2399     tcg_gen_shli_tl(lo1, lo1, shift);
2400     tcg_gen_shli_tl(lo2, lo2, shift);
2401
2402     t1 = tcg_const_tl(tabl);
2403     t2 = tcg_const_tl(tabr);
2404     tcg_gen_shr_tl(lo1, t1, lo1);
2405     tcg_gen_shr_tl(lo2, t2, lo2);
2406     tcg_gen_andi_tl(dst, lo1, omask);
2407     tcg_gen_andi_tl(lo2, lo2, omask);
2408
2409     amask = -8;
2410     if (AM_CHECK(dc)) {
2411         amask &= 0xffffffffULL;
2412     }
2413     tcg_gen_andi_tl(s1, s1, amask);
2414     tcg_gen_andi_tl(s2, s2, amask);
2415
2416     /* We want to compute
2417         dst = (s1 == s2 ? lo1 : lo1 & lo2).
2418        We've already done dst = lo1, so this reduces to
2419         dst &= (s1 == s2 ? -1 : lo2)
2420        Which we perform by
2421         lo2 |= -(s1 == s2)
2422         dst &= lo2
2423     */
2424     tcg_gen_setcond_tl(TCG_COND_EQ, t1, s1, s2);
2425     tcg_gen_neg_tl(t1, t1);
2426     tcg_gen_or_tl(lo2, lo2, t1);
2427     tcg_gen_and_tl(dst, dst, lo2);
2428
2429     tcg_temp_free(lo1);
2430     tcg_temp_free(lo2);
2431     tcg_temp_free(t1);
2432     tcg_temp_free(t2);
2433 }
2434
2435 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2436 {
2437     TCGv tmp = tcg_temp_new();
2438
2439     tcg_gen_add_tl(tmp, s1, s2);
2440     tcg_gen_andi_tl(dst, tmp, -8);
2441     if (left) {
2442         tcg_gen_neg_tl(tmp, tmp);
2443     }
2444     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2445
2446     tcg_temp_free(tmp);
2447 }
2448
2449 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2450 {
2451     TCGv t1, t2, shift;
2452
2453     t1 = tcg_temp_new();
2454     t2 = tcg_temp_new();
2455     shift = tcg_temp_new();
2456
2457     tcg_gen_andi_tl(shift, gsr, 7);
2458     tcg_gen_shli_tl(shift, shift, 3);
2459     tcg_gen_shl_tl(t1, s1, shift);
2460
2461     /* A shift of 64 does not produce 0 in TCG.  Divide this into a
2462        shift of (up to 63) followed by a constant shift of 1.  */
2463     tcg_gen_xori_tl(shift, shift, 63);
2464     tcg_gen_shr_tl(t2, s2, shift);
2465     tcg_gen_shri_tl(t2, t2, 1);
2466
2467     tcg_gen_or_tl(dst, t1, t2);
2468
2469     tcg_temp_free(t1);
2470     tcg_temp_free(t2);
2471     tcg_temp_free(shift);
2472 }
2473 #endif
2474
2475 #define CHECK_IU_FEATURE(dc, FEATURE)                      \
2476     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2477         goto illegal_insn;
2478 #define CHECK_FPU_FEATURE(dc, FEATURE)                     \
2479     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2480         goto nfpu_insn;
2481
2482 /* before an instruction, dc->pc must be static */
2483 static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
2484 {
2485     unsigned int opc, rs1, rs2, rd;
2486     TCGv cpu_src1, cpu_src2;
2487     TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
2488     TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
2489     target_long simm;
2490
2491     opc = GET_FIELD(insn, 0, 1);
2492     rd = GET_FIELD(insn, 2, 6);
2493
2494     switch (opc) {
2495     case 0:                     /* branches/sethi */
2496         {
2497             unsigned int xop = GET_FIELD(insn, 7, 9);
2498             int32_t target;
2499             switch (xop) {
2500 #ifdef TARGET_SPARC64
2501             case 0x1:           /* V9 BPcc */
2502                 {
2503                     int cc;
2504
2505                     target = GET_FIELD_SP(insn, 0, 18);
2506                     target = sign_extend(target, 19);
2507                     target <<= 2;
2508                     cc = GET_FIELD_SP(insn, 20, 21);
2509                     if (cc == 0)
2510                         do_branch(dc, target, insn, 0);
2511                     else if (cc == 2)
2512                         do_branch(dc, target, insn, 1);
2513                     else
2514                         goto illegal_insn;
2515                     goto jmp_insn;
2516                 }
2517             case 0x3:           /* V9 BPr */
2518                 {
2519                     target = GET_FIELD_SP(insn, 0, 13) |
2520                         (GET_FIELD_SP(insn, 20, 21) << 14);
2521                     target = sign_extend(target, 16);
2522                     target <<= 2;
2523                     cpu_src1 = get_src1(dc, insn);
2524                     do_branch_reg(dc, target, insn, cpu_src1);
2525                     goto jmp_insn;
2526                 }
2527             case 0x5:           /* V9 FBPcc */
2528                 {
2529                     int cc = GET_FIELD_SP(insn, 20, 21);
2530                     if (gen_trap_ifnofpu(dc)) {
2531                         goto jmp_insn;
2532                     }
2533                     target = GET_FIELD_SP(insn, 0, 18);
2534                     target = sign_extend(target, 19);
2535                     target <<= 2;
2536                     do_fbranch(dc, target, insn, cc);
2537                     goto jmp_insn;
2538                 }
2539 #else
2540             case 0x7:           /* CBN+x */
2541                 {
2542                     goto ncp_insn;
2543                 }
2544 #endif
2545             case 0x2:           /* BN+x */
2546                 {
2547                     target = GET_FIELD(insn, 10, 31);
2548                     target = sign_extend(target, 22);
2549                     target <<= 2;
2550                     do_branch(dc, target, insn, 0);
2551                     goto jmp_insn;
2552                 }
2553             case 0x6:           /* FBN+x */
2554                 {
2555                     if (gen_trap_ifnofpu(dc)) {
2556                         goto jmp_insn;
2557                     }
2558                     target = GET_FIELD(insn, 10, 31);
2559                     target = sign_extend(target, 22);
2560                     target <<= 2;
2561                     do_fbranch(dc, target, insn, 0);
2562                     goto jmp_insn;
2563                 }
2564             case 0x4:           /* SETHI */
2565                 /* Special-case %g0 because that's the canonical nop.  */
2566                 if (rd) {
2567                     uint32_t value = GET_FIELD(insn, 10, 31);
2568                     TCGv t = gen_dest_gpr(dc, rd);
2569                     tcg_gen_movi_tl(t, value << 10);
2570                     gen_store_gpr(dc, rd, t);
2571                 }
2572                 break;
2573             case 0x0:           /* UNIMPL */
2574             default:
2575                 goto illegal_insn;
2576             }
2577             break;
2578         }
2579         break;
2580     case 1:                     /*CALL*/
2581         {
2582             target_long target = GET_FIELDs(insn, 2, 31) << 2;
2583             TCGv o7 = gen_dest_gpr(dc, 15);
2584
2585             tcg_gen_movi_tl(o7, dc->pc);
2586             gen_store_gpr(dc, 15, o7);
2587             target += dc->pc;
2588             gen_mov_pc_npc(dc);
2589 #ifdef TARGET_SPARC64
2590             if (unlikely(AM_CHECK(dc))) {
2591                 target &= 0xffffffffULL;
2592             }
2593 #endif
2594             dc->npc = target;
2595         }
2596         goto jmp_insn;
2597     case 2:                     /* FPU & Logical Operations */
2598         {
2599             unsigned int xop = GET_FIELD(insn, 7, 12);
2600             TCGv cpu_dst = get_temp_tl(dc);
2601             TCGv cpu_tmp0;
2602
2603             if (xop == 0x3a) {  /* generate trap */
2604                 int cond = GET_FIELD(insn, 3, 6);
2605                 TCGv_i32 trap;
2606                 TCGLabel *l1 = NULL;
2607                 int mask;
2608
2609                 if (cond == 0) {
2610                     /* Trap never.  */
2611                     break;
2612                 }
2613
2614                 save_state(dc);
2615
2616                 if (cond != 8) {
2617                     /* Conditional trap.  */
2618                     DisasCompare cmp;
2619 #ifdef TARGET_SPARC64
2620                     /* V9 icc/xcc */
2621                     int cc = GET_FIELD_SP(insn, 11, 12);
2622                     if (cc == 0) {
2623                         gen_compare(&cmp, 0, cond, dc);
2624                     } else if (cc == 2) {
2625                         gen_compare(&cmp, 1, cond, dc);
2626                     } else {
2627                         goto illegal_insn;
2628                     }
2629 #else
2630                     gen_compare(&cmp, 0, cond, dc);
2631 #endif
2632                     l1 = gen_new_label();
2633                     tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
2634                                       cmp.c1, cmp.c2, l1);
2635                     free_compare(&cmp);
2636                 }
2637
2638                 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
2639                         ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
2640
2641                 /* Don't use the normal temporaries, as they may well have
2642                    gone out of scope with the branch above.  While we're
2643                    doing that we might as well pre-truncate to 32-bit.  */
2644                 trap = tcg_temp_new_i32();
2645
2646                 rs1 = GET_FIELD_SP(insn, 14, 18);
2647                 if (IS_IMM) {
2648                     rs2 = GET_FIELD_SP(insn, 0, 6);
2649                     if (rs1 == 0) {
2650                         tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
2651                         /* Signal that the trap value is fully constant.  */
2652                         mask = 0;
2653                     } else {
2654                         TCGv t1 = gen_load_gpr(dc, rs1);
2655                         tcg_gen_trunc_tl_i32(trap, t1);
2656                         tcg_gen_addi_i32(trap, trap, rs2);
2657                     }
2658                 } else {
2659                     TCGv t1, t2;
2660                     rs2 = GET_FIELD_SP(insn, 0, 4);
2661                     t1 = gen_load_gpr(dc, rs1);
2662                     t2 = gen_load_gpr(dc, rs2);
2663                     tcg_gen_add_tl(t1, t1, t2);
2664                     tcg_gen_trunc_tl_i32(trap, t1);
2665                 }
2666                 if (mask != 0) {
2667                     tcg_gen_andi_i32(trap, trap, mask);
2668                     tcg_gen_addi_i32(trap, trap, TT_TRAP);
2669                 }
2670
2671                 gen_helper_raise_exception(cpu_env, trap);
2672                 tcg_temp_free_i32(trap);
2673
2674                 if (cond == 8) {
2675                     /* An unconditional trap ends the TB.  */
2676                     dc->is_br = 1;
2677                     goto jmp_insn;
2678                 } else {
2679                     /* A conditional trap falls through to the next insn.  */
2680                     gen_set_label(l1);
2681                     break;
2682                 }
2683             } else if (xop == 0x28) {
2684                 rs1 = GET_FIELD(insn, 13, 17);
2685                 switch(rs1) {
2686                 case 0: /* rdy */
2687 #ifndef TARGET_SPARC64
2688                 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2689                                        manual, rdy on the microSPARC
2690                                        II */
2691                 case 0x0f:          /* stbar in the SPARCv8 manual,
2692                                        rdy on the microSPARC II */
2693                 case 0x10 ... 0x1f: /* implementation-dependent in the
2694                                        SPARCv8 manual, rdy on the
2695                                        microSPARC II */
2696                     /* Read Asr17 */
2697                     if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2698                         TCGv t = gen_dest_gpr(dc, rd);
2699                         /* Read Asr17 for a Leon3 monoprocessor */
2700                         tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
2701                         gen_store_gpr(dc, rd, t);
2702                         break;
2703                     }
2704 #endif
2705                     gen_store_gpr(dc, rd, cpu_y);
2706                     break;
2707 #ifdef TARGET_SPARC64
2708                 case 0x2: /* V9 rdccr */
2709                     update_psr(dc);
2710                     gen_helper_rdccr(cpu_dst, cpu_env);
2711                     gen_store_gpr(dc, rd, cpu_dst);
2712                     break;
2713                 case 0x3: /* V9 rdasi */
2714                     tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2715                     gen_store_gpr(dc, rd, cpu_dst);
2716                     break;
2717                 case 0x4: /* V9 rdtick */
2718                     {
2719                         TCGv_ptr r_tickptr;
2720                         TCGv_i32 r_const;
2721
2722                         r_tickptr = tcg_temp_new_ptr();
2723                         r_const = tcg_const_i32(dc->mem_idx);
2724                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2725                                        offsetof(CPUSPARCState, tick));
2726                         gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
2727                                                   r_const);
2728                         tcg_temp_free_ptr(r_tickptr);
2729                         tcg_temp_free_i32(r_const);
2730                         gen_store_gpr(dc, rd, cpu_dst);
2731                     }
2732                     break;
2733                 case 0x5: /* V9 rdpc */
2734                     {
2735                         TCGv t = gen_dest_gpr(dc, rd);
2736                         if (unlikely(AM_CHECK(dc))) {
2737                             tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
2738                         } else {
2739                             tcg_gen_movi_tl(t, dc->pc);
2740                         }
2741                         gen_store_gpr(dc, rd, t);
2742                     }
2743                     break;
2744                 case 0x6: /* V9 rdfprs */
2745                     tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2746                     gen_store_gpr(dc, rd, cpu_dst);
2747                     break;
2748                 case 0xf: /* V9 membar */
2749                     break; /* no effect */
2750                 case 0x13: /* Graphics Status */
2751                     if (gen_trap_ifnofpu(dc)) {
2752                         goto jmp_insn;
2753                     }
2754                     gen_store_gpr(dc, rd, cpu_gsr);
2755                     break;
2756                 case 0x16: /* Softint */
2757                     tcg_gen_ld32s_tl(cpu_dst, cpu_env,
2758                                      offsetof(CPUSPARCState, softint));
2759                     gen_store_gpr(dc, rd, cpu_dst);
2760                     break;
2761                 case 0x17: /* Tick compare */
2762                     gen_store_gpr(dc, rd, cpu_tick_cmpr);
2763                     break;
2764                 case 0x18: /* System tick */
2765                     {
2766                         TCGv_ptr r_tickptr;
2767                         TCGv_i32 r_const;
2768
2769                         r_tickptr = tcg_temp_new_ptr();
2770                         r_const = tcg_const_i32(dc->mem_idx);
2771                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2772                                        offsetof(CPUSPARCState, stick));
2773                         gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
2774                                                   r_const);
2775                         tcg_temp_free_ptr(r_tickptr);
2776                         tcg_temp_free_i32(r_const);
2777                         gen_store_gpr(dc, rd, cpu_dst);
2778                     }
2779                     break;
2780                 case 0x19: /* System tick compare */
2781                     gen_store_gpr(dc, rd, cpu_stick_cmpr);
2782                     break;
2783                 case 0x10: /* Performance Control */
2784                 case 0x11: /* Performance Instrumentation Counter */
2785                 case 0x12: /* Dispatch Control */
2786                 case 0x14: /* Softint set, WO */
2787                 case 0x15: /* Softint clear, WO */
2788 #endif
2789                 default:
2790                     goto illegal_insn;
2791                 }
2792 #if !defined(CONFIG_USER_ONLY)
2793             } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2794 #ifndef TARGET_SPARC64
2795                 if (!supervisor(dc)) {
2796                     goto priv_insn;
2797                 }
2798                 update_psr(dc);
2799                 gen_helper_rdpsr(cpu_dst, cpu_env);
2800 #else
2801                 CHECK_IU_FEATURE(dc, HYPV);
2802                 if (!hypervisor(dc))
2803                     goto priv_insn;
2804                 rs1 = GET_FIELD(insn, 13, 17);
2805                 switch (rs1) {
2806                 case 0: // hpstate
2807                     // gen_op_rdhpstate();
2808                     break;
2809                 case 1: // htstate
2810                     // gen_op_rdhtstate();
2811                     break;
2812                 case 3: // hintp
2813                     tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2814                     break;
2815                 case 5: // htba
2816                     tcg_gen_mov_tl(cpu_dst, cpu_htba);
2817                     break;
2818                 case 6: // hver
2819                     tcg_gen_mov_tl(cpu_dst, cpu_hver);
2820                     break;
2821                 case 31: // hstick_cmpr
2822                     tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2823                     break;
2824                 default:
2825                     goto illegal_insn;
2826                 }
2827 #endif
2828                 gen_store_gpr(dc, rd, cpu_dst);
2829                 break;
2830             } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2831                 if (!supervisor(dc)) {
2832                     goto priv_insn;
2833                 }
2834                 cpu_tmp0 = get_temp_tl(dc);
2835 #ifdef TARGET_SPARC64
2836                 rs1 = GET_FIELD(insn, 13, 17);
2837                 switch (rs1) {
2838                 case 0: // tpc
2839                     {
2840                         TCGv_ptr r_tsptr;
2841
2842                         r_tsptr = tcg_temp_new_ptr();
2843                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2844                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2845                                       offsetof(trap_state, tpc));
2846                         tcg_temp_free_ptr(r_tsptr);
2847                     }
2848                     break;
2849                 case 1: // tnpc
2850                     {
2851                         TCGv_ptr r_tsptr;
2852
2853                         r_tsptr = tcg_temp_new_ptr();
2854                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2855                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2856                                       offsetof(trap_state, tnpc));
2857                         tcg_temp_free_ptr(r_tsptr);
2858                     }
2859                     break;
2860                 case 2: // tstate
2861                     {
2862                         TCGv_ptr r_tsptr;
2863
2864                         r_tsptr = tcg_temp_new_ptr();
2865                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2866                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2867                                       offsetof(trap_state, tstate));
2868                         tcg_temp_free_ptr(r_tsptr);
2869                     }
2870                     break;
2871                 case 3: // tt
2872                     {
2873                         TCGv_ptr r_tsptr = tcg_temp_new_ptr();
2874
2875                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2876                         tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
2877                                          offsetof(trap_state, tt));
2878                         tcg_temp_free_ptr(r_tsptr);
2879                     }
2880                     break;
2881                 case 4: // tick
2882                     {
2883                         TCGv_ptr r_tickptr;
2884                         TCGv_i32 r_const;
2885
2886                         r_tickptr = tcg_temp_new_ptr();
2887                         r_const = tcg_const_i32(dc->mem_idx);
2888                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2889                                        offsetof(CPUSPARCState, tick));
2890                         gen_helper_tick_get_count(cpu_tmp0, cpu_env,
2891                                                   r_tickptr, r_const);
2892                         tcg_temp_free_ptr(r_tickptr);
2893                         tcg_temp_free_i32(r_const);
2894                     }
2895                     break;
2896                 case 5: // tba
2897                     tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2898                     break;
2899                 case 6: // pstate
2900                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2901                                      offsetof(CPUSPARCState, pstate));
2902                     break;
2903                 case 7: // tl
2904                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2905                                      offsetof(CPUSPARCState, tl));
2906                     break;
2907                 case 8: // pil
2908                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2909                                      offsetof(CPUSPARCState, psrpil));
2910                     break;
2911                 case 9: // cwp
2912                     gen_helper_rdcwp(cpu_tmp0, cpu_env);
2913                     break;
2914                 case 10: // cansave
2915                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2916                                      offsetof(CPUSPARCState, cansave));
2917                     break;
2918                 case 11: // canrestore
2919                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2920                                      offsetof(CPUSPARCState, canrestore));
2921                     break;
2922                 case 12: // cleanwin
2923                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2924                                      offsetof(CPUSPARCState, cleanwin));
2925                     break;
2926                 case 13: // otherwin
2927                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2928                                      offsetof(CPUSPARCState, otherwin));
2929                     break;
2930                 case 14: // wstate
2931                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2932                                      offsetof(CPUSPARCState, wstate));
2933                     break;
2934                 case 16: // UA2005 gl
2935                     CHECK_IU_FEATURE(dc, GL);
2936                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2937                                      offsetof(CPUSPARCState, gl));
2938                     break;
2939                 case 26: // UA2005 strand status
2940                     CHECK_IU_FEATURE(dc, HYPV);
2941                     if (!hypervisor(dc))
2942                         goto priv_insn;
2943                     tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2944                     break;
2945                 case 31: // ver
2946                     tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2947                     break;
2948                 case 15: // fq
2949                 default:
2950                     goto illegal_insn;
2951                 }
2952 #else
2953                 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2954 #endif
2955                 gen_store_gpr(dc, rd, cpu_tmp0);
2956                 break;
2957             } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2958 #ifdef TARGET_SPARC64
2959                 save_state(dc);
2960                 gen_helper_flushw(cpu_env);
2961 #else
2962                 if (!supervisor(dc))
2963                     goto priv_insn;
2964                 gen_store_gpr(dc, rd, cpu_tbr);
2965 #endif
2966                 break;
2967 #endif
2968             } else if (xop == 0x34) {   /* FPU Operations */
2969                 if (gen_trap_ifnofpu(dc)) {
2970                     goto jmp_insn;
2971                 }
2972                 gen_op_clear_ieee_excp_and_FTT();
2973                 rs1 = GET_FIELD(insn, 13, 17);
2974                 rs2 = GET_FIELD(insn, 27, 31);
2975                 xop = GET_FIELD(insn, 18, 26);
2976                 save_state(dc);
2977                 switch (xop) {
2978                 case 0x1: /* fmovs */
2979                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2980                     gen_store_fpr_F(dc, rd, cpu_src1_32);
2981                     break;
2982                 case 0x5: /* fnegs */
2983                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
2984                     break;
2985                 case 0x9: /* fabss */
2986                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
2987                     break;
2988                 case 0x29: /* fsqrts */
2989                     CHECK_FPU_FEATURE(dc, FSQRT);
2990                     gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
2991                     break;
2992                 case 0x2a: /* fsqrtd */
2993                     CHECK_FPU_FEATURE(dc, FSQRT);
2994                     gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
2995                     break;
2996                 case 0x2b: /* fsqrtq */
2997                     CHECK_FPU_FEATURE(dc, FLOAT128);
2998                     gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
2999                     break;
3000                 case 0x41: /* fadds */
3001                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
3002                     break;
3003                 case 0x42: /* faddd */
3004                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
3005                     break;
3006                 case 0x43: /* faddq */
3007                     CHECK_FPU_FEATURE(dc, FLOAT128);
3008                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
3009                     break;
3010                 case 0x45: /* fsubs */
3011                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3012                     break;
3013                 case 0x46: /* fsubd */
3014                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3015                     break;
3016                 case 0x47: /* fsubq */
3017                     CHECK_FPU_FEATURE(dc, FLOAT128);
3018                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3019                     break;
3020                 case 0x49: /* fmuls */
3021                     CHECK_FPU_FEATURE(dc, FMUL);
3022                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3023                     break;
3024                 case 0x4a: /* fmuld */
3025                     CHECK_FPU_FEATURE(dc, FMUL);
3026                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3027                     break;
3028                 case 0x4b: /* fmulq */
3029                     CHECK_FPU_FEATURE(dc, FLOAT128);
3030                     CHECK_FPU_FEATURE(dc, FMUL);
3031                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3032                     break;
3033                 case 0x4d: /* fdivs */
3034                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3035                     break;
3036                 case 0x4e: /* fdivd */
3037                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3038                     break;
3039                 case 0x4f: /* fdivq */
3040                     CHECK_FPU_FEATURE(dc, FLOAT128);
3041                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3042                     break;
3043                 case 0x69: /* fsmuld */
3044                     CHECK_FPU_FEATURE(dc, FSMULD);
3045                     gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3046                     break;
3047                 case 0x6e: /* fdmulq */
3048                     CHECK_FPU_FEATURE(dc, FLOAT128);
3049                     gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3050                     break;
3051                 case 0xc4: /* fitos */
3052                     gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3053                     break;
3054                 case 0xc6: /* fdtos */
3055                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3056                     break;
3057                 case 0xc7: /* fqtos */
3058                     CHECK_FPU_FEATURE(dc, FLOAT128);
3059                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3060                     break;
3061                 case 0xc8: /* fitod */
3062                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3063                     break;
3064                 case 0xc9: /* fstod */
3065                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3066                     break;
3067                 case 0xcb: /* fqtod */
3068                     CHECK_FPU_FEATURE(dc, FLOAT128);
3069                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3070                     break;
3071                 case 0xcc: /* fitoq */
3072                     CHECK_FPU_FEATURE(dc, FLOAT128);
3073                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3074                     break;
3075                 case 0xcd: /* fstoq */
3076                     CHECK_FPU_FEATURE(dc, FLOAT128);
3077                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3078                     break;
3079                 case 0xce: /* fdtoq */
3080                     CHECK_FPU_FEATURE(dc, FLOAT128);
3081                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3082                     break;
3083                 case 0xd1: /* fstoi */
3084                     gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3085                     break;
3086                 case 0xd2: /* fdtoi */
3087                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3088                     break;
3089                 case 0xd3: /* fqtoi */
3090                     CHECK_FPU_FEATURE(dc, FLOAT128);
3091                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3092                     break;
3093 #ifdef TARGET_SPARC64
3094                 case 0x2: /* V9 fmovd */
3095                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3096                     gen_store_fpr_D(dc, rd, cpu_src1_64);
3097                     break;
3098                 case 0x3: /* V9 fmovq */
3099                     CHECK_FPU_FEATURE(dc, FLOAT128);
3100                     gen_move_Q(rd, rs2);
3101                     break;
3102                 case 0x6: /* V9 fnegd */
3103                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3104                     break;
3105                 case 0x7: /* V9 fnegq */
3106                     CHECK_FPU_FEATURE(dc, FLOAT128);
3107                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3108                     break;
3109                 case 0xa: /* V9 fabsd */
3110                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3111                     break;
3112                 case 0xb: /* V9 fabsq */
3113                     CHECK_FPU_FEATURE(dc, FLOAT128);
3114                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3115                     break;
3116                 case 0x81: /* V9 fstox */
3117                     gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3118                     break;
3119                 case 0x82: /* V9 fdtox */
3120                     gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3121                     break;
3122                 case 0x83: /* V9 fqtox */
3123                     CHECK_FPU_FEATURE(dc, FLOAT128);
3124                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3125                     break;
3126                 case 0x84: /* V9 fxtos */
3127                     gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3128                     break;
3129                 case 0x88: /* V9 fxtod */
3130                     gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3131                     break;
3132                 case 0x8c: /* V9 fxtoq */
3133                     CHECK_FPU_FEATURE(dc, FLOAT128);
3134                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3135                     break;
3136 #endif
3137                 default:
3138                     goto illegal_insn;
3139                 }
3140             } else if (xop == 0x35) {   /* FPU Operations */
3141 #ifdef TARGET_SPARC64
3142                 int cond;
3143 #endif
3144                 if (gen_trap_ifnofpu(dc)) {
3145                     goto jmp_insn;
3146                 }
3147                 gen_op_clear_ieee_excp_and_FTT();
3148                 rs1 = GET_FIELD(insn, 13, 17);
3149                 rs2 = GET_FIELD(insn, 27, 31);
3150                 xop = GET_FIELD(insn, 18, 26);
3151                 save_state(dc);
3152
3153 #ifdef TARGET_SPARC64
3154 #define FMOVR(sz)                                                  \
3155                 do {                                               \
3156                     DisasCompare cmp;                              \
3157                     cond = GET_FIELD_SP(insn, 10, 12);             \
3158                     cpu_src1 = get_src1(dc, insn);                 \
3159                     gen_compare_reg(&cmp, cond, cpu_src1);         \
3160                     gen_fmov##sz(dc, &cmp, rd, rs2);               \
3161                     free_compare(&cmp);                            \
3162                 } while (0)
3163
3164                 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3165                     FMOVR(s);
3166                     break;
3167                 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3168                     FMOVR(d);
3169                     break;
3170                 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3171                     CHECK_FPU_FEATURE(dc, FLOAT128);
3172                     FMOVR(q);
3173                     break;
3174                 }
3175 #undef FMOVR
3176 #endif
3177                 switch (xop) {
3178 #ifdef TARGET_SPARC64
3179 #define FMOVCC(fcc, sz)                                                 \
3180                     do {                                                \
3181                         DisasCompare cmp;                               \
3182                         cond = GET_FIELD_SP(insn, 14, 17);              \
3183                         gen_fcompare(&cmp, fcc, cond);                  \
3184                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3185                         free_compare(&cmp);                             \
3186                     } while (0)
3187
3188                     case 0x001: /* V9 fmovscc %fcc0 */
3189                         FMOVCC(0, s);
3190                         break;
3191                     case 0x002: /* V9 fmovdcc %fcc0 */
3192                         FMOVCC(0, d);
3193                         break;
3194                     case 0x003: /* V9 fmovqcc %fcc0 */
3195                         CHECK_FPU_FEATURE(dc, FLOAT128);
3196                         FMOVCC(0, q);
3197                         break;
3198                     case 0x041: /* V9 fmovscc %fcc1 */
3199                         FMOVCC(1, s);
3200                         break;
3201                     case 0x042: /* V9 fmovdcc %fcc1 */
3202                         FMOVCC(1, d);
3203                         break;
3204                     case 0x043: /* V9 fmovqcc %fcc1 */
3205                         CHECK_FPU_FEATURE(dc, FLOAT128);
3206                         FMOVCC(1, q);
3207                         break;
3208                     case 0x081: /* V9 fmovscc %fcc2 */
3209                         FMOVCC(2, s);
3210                         break;
3211                     case 0x082: /* V9 fmovdcc %fcc2 */
3212                         FMOVCC(2, d);
3213                         break;
3214                     case 0x083: /* V9 fmovqcc %fcc2 */
3215                         CHECK_FPU_FEATURE(dc, FLOAT128);
3216                         FMOVCC(2, q);
3217                         break;
3218                     case 0x0c1: /* V9 fmovscc %fcc3 */
3219                         FMOVCC(3, s);
3220                         break;
3221                     case 0x0c2: /* V9 fmovdcc %fcc3 */
3222                         FMOVCC(3, d);
3223                         break;
3224                     case 0x0c3: /* V9 fmovqcc %fcc3 */
3225                         CHECK_FPU_FEATURE(dc, FLOAT128);
3226                         FMOVCC(3, q);
3227                         break;
3228 #undef FMOVCC
3229 #define FMOVCC(xcc, sz)                                                 \
3230                     do {                                                \
3231                         DisasCompare cmp;                               \
3232                         cond = GET_FIELD_SP(insn, 14, 17);              \
3233                         gen_compare(&cmp, xcc, cond, dc);               \
3234                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3235                         free_compare(&cmp);                             \
3236                     } while (0)
3237
3238                     case 0x101: /* V9 fmovscc %icc */
3239                         FMOVCC(0, s);
3240                         break;
3241                     case 0x102: /* V9 fmovdcc %icc */
3242                         FMOVCC(0, d);
3243                         break;
3244                     case 0x103: /* V9 fmovqcc %icc */
3245                         CHECK_FPU_FEATURE(dc, FLOAT128);
3246                         FMOVCC(0, q);
3247                         break;
3248                     case 0x181: /* V9 fmovscc %xcc */
3249                         FMOVCC(1, s);
3250                         break;
3251                     case 0x182: /* V9 fmovdcc %xcc */
3252                         FMOVCC(1, d);
3253                         break;
3254                     case 0x183: /* V9 fmovqcc %xcc */
3255                         CHECK_FPU_FEATURE(dc, FLOAT128);
3256                         FMOVCC(1, q);
3257                         break;
3258 #undef FMOVCC
3259 #endif
3260                     case 0x51: /* fcmps, V9 %fcc */
3261                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3262                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3263                         gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3264                         break;
3265                     case 0x52: /* fcmpd, V9 %fcc */
3266                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3267                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3268                         gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3269                         break;
3270                     case 0x53: /* fcmpq, V9 %fcc */
3271                         CHECK_FPU_FEATURE(dc, FLOAT128);
3272                         gen_op_load_fpr_QT0(QFPREG(rs1));
3273                         gen_op_load_fpr_QT1(QFPREG(rs2));
3274                         gen_op_fcmpq(rd & 3);
3275                         break;
3276                     case 0x55: /* fcmpes, V9 %fcc */
3277                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3278                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3279                         gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3280                         break;
3281                     case 0x56: /* fcmped, V9 %fcc */
3282                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3283                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3284                         gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3285                         break;
3286                     case 0x57: /* fcmpeq, V9 %fcc */
3287                         CHECK_FPU_FEATURE(dc, FLOAT128);
3288                         gen_op_load_fpr_QT0(QFPREG(rs1));
3289                         gen_op_load_fpr_QT1(QFPREG(rs2));
3290                         gen_op_fcmpeq(rd & 3);
3291                         break;
3292                     default:
3293                         goto illegal_insn;
3294                 }
3295             } else if (xop == 0x2) {
3296                 TCGv dst = gen_dest_gpr(dc, rd);
3297                 rs1 = GET_FIELD(insn, 13, 17);
3298                 if (rs1 == 0) {
3299                     /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3300                     if (IS_IMM) {       /* immediate */
3301                         simm = GET_FIELDs(insn, 19, 31);
3302                         tcg_gen_movi_tl(dst, simm);
3303                         gen_store_gpr(dc, rd, dst);
3304                     } else {            /* register */
3305                         rs2 = GET_FIELD(insn, 27, 31);
3306                         if (rs2 == 0) {
3307                             tcg_gen_movi_tl(dst, 0);
3308                             gen_store_gpr(dc, rd, dst);
3309                         } else {
3310                             cpu_src2 = gen_load_gpr(dc, rs2);
3311                             gen_store_gpr(dc, rd, cpu_src2);
3312                         }
3313                     }
3314                 } else {
3315                     cpu_src1 = get_src1(dc, insn);
3316                     if (IS_IMM) {       /* immediate */
3317                         simm = GET_FIELDs(insn, 19, 31);
3318                         tcg_gen_ori_tl(dst, cpu_src1, simm);
3319                         gen_store_gpr(dc, rd, dst);
3320                     } else {            /* register */
3321                         rs2 = GET_FIELD(insn, 27, 31);
3322                         if (rs2 == 0) {
3323                             /* mov shortcut:  or x, %g0, y -> mov x, y */
3324                             gen_store_gpr(dc, rd, cpu_src1);
3325                         } else {
3326                             cpu_src2 = gen_load_gpr(dc, rs2);
3327                             tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
3328                             gen_store_gpr(dc, rd, dst);
3329                         }
3330                     }
3331                 }
3332 #ifdef TARGET_SPARC64
3333             } else if (xop == 0x25) { /* sll, V9 sllx */
3334                 cpu_src1 = get_src1(dc, insn);
3335                 if (IS_IMM) {   /* immediate */
3336                     simm = GET_FIELDs(insn, 20, 31);
3337                     if (insn & (1 << 12)) {
3338                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3339                     } else {
3340                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3341                     }
3342                 } else {                /* register */
3343                     rs2 = GET_FIELD(insn, 27, 31);
3344                     cpu_src2 = gen_load_gpr(dc, rs2);
3345                     cpu_tmp0 = get_temp_tl(dc);
3346                     if (insn & (1 << 12)) {
3347                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3348                     } else {
3349                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3350                     }
3351                     tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3352                 }
3353                 gen_store_gpr(dc, rd, cpu_dst);
3354             } else if (xop == 0x26) { /* srl, V9 srlx */
3355                 cpu_src1 = get_src1(dc, insn);
3356                 if (IS_IMM) {   /* immediate */
3357                     simm = GET_FIELDs(insn, 20, 31);
3358                     if (insn & (1 << 12)) {
3359                         tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3360                     } else {
3361                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3362                         tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3363                     }
3364                 } else {                /* register */
3365                     rs2 = GET_FIELD(insn, 27, 31);
3366                     cpu_src2 = gen_load_gpr(dc, rs2);
3367                     cpu_tmp0 = get_temp_tl(dc);
3368                     if (insn & (1 << 12)) {
3369                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3370                         tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3371                     } else {
3372                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3373                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3374                         tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3375                     }
3376                 }
3377                 gen_store_gpr(dc, rd, cpu_dst);
3378             } else if (xop == 0x27) { /* sra, V9 srax */
3379                 cpu_src1 = get_src1(dc, insn);
3380                 if (IS_IMM) {   /* immediate */
3381                     simm = GET_FIELDs(insn, 20, 31);
3382                     if (insn & (1 << 12)) {
3383                         tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3384                     } else {
3385                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3386                         tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3387                     }
3388                 } else {                /* register */
3389                     rs2 = GET_FIELD(insn, 27, 31);
3390                     cpu_src2 = gen_load_gpr(dc, rs2);
3391                     cpu_tmp0 = get_temp_tl(dc);
3392                     if (insn & (1 << 12)) {
3393                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3394                         tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3395                     } else {
3396                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3397                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3398                         tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3399                     }
3400                 }
3401                 gen_store_gpr(dc, rd, cpu_dst);
3402 #endif
3403             } else if (xop < 0x36) {
3404                 if (xop < 0x20) {
3405                     cpu_src1 = get_src1(dc, insn);
3406                     cpu_src2 = get_src2(dc, insn);
3407                     switch (xop & ~0x10) {
3408                     case 0x0: /* add */
3409                         if (xop & 0x10) {
3410                             gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3411                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3412                             dc->cc_op = CC_OP_ADD;
3413                         } else {
3414                             tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3415                         }
3416                         break;
3417                     case 0x1: /* and */
3418                         tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3419                         if (xop & 0x10) {
3420                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3421                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3422                             dc->cc_op = CC_OP_LOGIC;
3423                         }
3424                         break;
3425                     case 0x2: /* or */
3426                         tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3427                         if (xop & 0x10) {
3428                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3429                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3430                             dc->cc_op = CC_OP_LOGIC;
3431                         }
3432                         break;
3433                     case 0x3: /* xor */
3434                         tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3435                         if (xop & 0x10) {
3436                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3437                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3438                             dc->cc_op = CC_OP_LOGIC;
3439                         }
3440                         break;
3441                     case 0x4: /* sub */
3442                         if (xop & 0x10) {
3443                             gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3444                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3445                             dc->cc_op = CC_OP_SUB;
3446                         } else {
3447                             tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3448                         }
3449                         break;
3450                     case 0x5: /* andn */
3451                         tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3452                         if (xop & 0x10) {
3453                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3454                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3455                             dc->cc_op = CC_OP_LOGIC;
3456                         }
3457                         break;
3458                     case 0x6: /* orn */
3459                         tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3460                         if (xop & 0x10) {
3461                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3462                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3463                             dc->cc_op = CC_OP_LOGIC;
3464                         }
3465                         break;
3466                     case 0x7: /* xorn */
3467                         tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
3468                         if (xop & 0x10) {
3469                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3470                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3471                             dc->cc_op = CC_OP_LOGIC;
3472                         }
3473                         break;
3474                     case 0x8: /* addx, V9 addc */
3475                         gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3476                                         (xop & 0x10));
3477                         break;
3478 #ifdef TARGET_SPARC64
3479                     case 0x9: /* V9 mulx */
3480                         tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3481                         break;
3482 #endif
3483                     case 0xa: /* umul */
3484                         CHECK_IU_FEATURE(dc, MUL);
3485                         gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3486                         if (xop & 0x10) {
3487                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3488                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3489                             dc->cc_op = CC_OP_LOGIC;
3490                         }
3491                         break;
3492                     case 0xb: /* smul */
3493                         CHECK_IU_FEATURE(dc, MUL);
3494                         gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3495                         if (xop & 0x10) {
3496                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3497                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3498                             dc->cc_op = CC_OP_LOGIC;
3499                         }
3500                         break;
3501                     case 0xc: /* subx, V9 subc */
3502                         gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3503                                         (xop & 0x10));
3504                         break;
3505 #ifdef TARGET_SPARC64
3506                     case 0xd: /* V9 udivx */
3507                         gen_helper_udivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3508                         break;
3509 #endif
3510                     case 0xe: /* udiv */
3511                         CHECK_IU_FEATURE(dc, DIV);
3512                         if (xop & 0x10) {
3513                             gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
3514                                                cpu_src2);
3515                             dc->cc_op = CC_OP_DIV;
3516                         } else {
3517                             gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
3518                                             cpu_src2);
3519                         }
3520                         break;
3521                     case 0xf: /* sdiv */
3522                         CHECK_IU_FEATURE(dc, DIV);
3523                         if (xop & 0x10) {
3524                             gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
3525                                                cpu_src2);
3526                             dc->cc_op = CC_OP_DIV;
3527                         } else {
3528                             gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
3529                                             cpu_src2);
3530                         }
3531                         break;
3532                     default:
3533                         goto illegal_insn;
3534                     }
3535                     gen_store_gpr(dc, rd, cpu_dst);
3536                 } else {
3537                     cpu_src1 = get_src1(dc, insn);
3538                     cpu_src2 = get_src2(dc, insn);
3539                     switch (xop) {
3540                     case 0x20: /* taddcc */
3541                         gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3542                         gen_store_gpr(dc, rd, cpu_dst);
3543                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3544                         dc->cc_op = CC_OP_TADD;
3545                         break;
3546                     case 0x21: /* tsubcc */
3547                         gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3548                         gen_store_gpr(dc, rd, cpu_dst);
3549                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3550                         dc->cc_op = CC_OP_TSUB;
3551                         break;
3552                     case 0x22: /* taddcctv */
3553                         gen_helper_taddcctv(cpu_dst, cpu_env,
3554                                             cpu_src1, cpu_src2);
3555                         gen_store_gpr(dc, rd, cpu_dst);
3556                         dc->cc_op = CC_OP_TADDTV;
3557                         break;
3558                     case 0x23: /* tsubcctv */
3559                         gen_helper_tsubcctv(cpu_dst, cpu_env,
3560                                             cpu_src1, cpu_src2);
3561                         gen_store_gpr(dc, rd, cpu_dst);
3562                         dc->cc_op = CC_OP_TSUBTV;
3563                         break;
3564                     case 0x24: /* mulscc */
3565                         update_psr(dc);
3566                         gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3567                         gen_store_gpr(dc, rd, cpu_dst);
3568                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3569                         dc->cc_op = CC_OP_ADD;
3570                         break;
3571 #ifndef TARGET_SPARC64
3572                     case 0x25:  /* sll */
3573                         if (IS_IMM) { /* immediate */
3574                             simm = GET_FIELDs(insn, 20, 31);
3575                             tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3576                         } else { /* register */
3577                             cpu_tmp0 = get_temp_tl(dc);
3578                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3579                             tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3580                         }
3581                         gen_store_gpr(dc, rd, cpu_dst);
3582                         break;
3583                     case 0x26:  /* srl */
3584                         if (IS_IMM) { /* immediate */
3585                             simm = GET_FIELDs(insn, 20, 31);
3586                             tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3587                         } else { /* register */
3588                             cpu_tmp0 = get_temp_tl(dc);
3589                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3590                             tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3591                         }
3592                         gen_store_gpr(dc, rd, cpu_dst);
3593                         break;
3594                     case 0x27:  /* sra */
3595                         if (IS_IMM) { /* immediate */
3596                             simm = GET_FIELDs(insn, 20, 31);
3597                             tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3598                         } else { /* register */
3599                             cpu_tmp0 = get_temp_tl(dc);
3600                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3601                             tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3602                         }
3603                         gen_store_gpr(dc, rd, cpu_dst);
3604                         break;
3605 #endif
3606                     case 0x30:
3607                         {
3608                             cpu_tmp0 = get_temp_tl(dc);
3609                             switch(rd) {
3610                             case 0: /* wry */
3611                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3612                                 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3613                                 break;
3614 #ifndef TARGET_SPARC64
3615                             case 0x01 ... 0x0f: /* undefined in the
3616                                                    SPARCv8 manual, nop
3617                                                    on the microSPARC
3618                                                    II */
3619                             case 0x10 ... 0x1f: /* implementation-dependent
3620                                                    in the SPARCv8
3621                                                    manual, nop on the
3622                                                    microSPARC II */
3623                                 if ((rd == 0x13) && (dc->def->features &
3624                                                      CPU_FEATURE_POWERDOWN)) {
3625                                     /* LEON3 power-down */
3626                                     save_state(dc);
3627                                     gen_helper_power_down(cpu_env);
3628                                 }
3629                                 break;
3630 #else
3631                             case 0x2: /* V9 wrccr */
3632                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3633                                 gen_helper_wrccr(cpu_env, cpu_tmp0);
3634                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3635                                 dc->cc_op = CC_OP_FLAGS;
3636                                 break;
3637                             case 0x3: /* V9 wrasi */
3638                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3639                                 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xff);
3640                                 tcg_gen_trunc_tl_i32(cpu_asi, cpu_tmp0);
3641                                 break;
3642                             case 0x6: /* V9 wrfprs */
3643                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3644                                 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_tmp0);
3645                                 save_state(dc);
3646                                 gen_op_next_insn();
3647                                 tcg_gen_exit_tb(0);
3648                                 dc->is_br = 1;
3649                                 break;
3650                             case 0xf: /* V9 sir, nop if user */
3651 #if !defined(CONFIG_USER_ONLY)
3652                                 if (supervisor(dc)) {
3653                                     ; // XXX
3654                                 }
3655 #endif
3656                                 break;
3657                             case 0x13: /* Graphics Status */
3658                                 if (gen_trap_ifnofpu(dc)) {
3659                                     goto jmp_insn;
3660                                 }
3661                                 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3662                                 break;
3663                             case 0x14: /* Softint set */
3664                                 if (!supervisor(dc))
3665                                     goto illegal_insn;
3666                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3667                                 gen_helper_set_softint(cpu_env, cpu_tmp0);
3668                                 break;
3669                             case 0x15: /* Softint clear */
3670                                 if (!supervisor(dc))
3671                                     goto illegal_insn;
3672                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3673                                 gen_helper_clear_softint(cpu_env, cpu_tmp0);
3674                                 break;
3675                             case 0x16: /* Softint write */
3676                                 if (!supervisor(dc))
3677                                     goto illegal_insn;
3678                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3679                                 gen_helper_write_softint(cpu_env, cpu_tmp0);
3680                                 break;
3681                             case 0x17: /* Tick compare */
3682 #if !defined(CONFIG_USER_ONLY)
3683                                 if (!supervisor(dc))
3684                                     goto illegal_insn;
3685 #endif
3686                                 {
3687                                     TCGv_ptr r_tickptr;
3688
3689                                     tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3690                                                    cpu_src2);
3691                                     r_tickptr = tcg_temp_new_ptr();
3692                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3693                                                    offsetof(CPUSPARCState, tick));
3694                                     gen_helper_tick_set_limit(r_tickptr,
3695                                                               cpu_tick_cmpr);
3696                                     tcg_temp_free_ptr(r_tickptr);
3697                                 }
3698                                 break;
3699                             case 0x18: /* System tick */
3700 #if !defined(CONFIG_USER_ONLY)
3701                                 if (!supervisor(dc))
3702                                     goto illegal_insn;
3703 #endif
3704                                 {
3705                                     TCGv_ptr r_tickptr;
3706
3707                                     tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
3708                                                    cpu_src2);
3709                                     r_tickptr = tcg_temp_new_ptr();
3710                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3711                                                    offsetof(CPUSPARCState, stick));
3712                                     gen_helper_tick_set_count(r_tickptr,
3713                                                               cpu_tmp0);
3714                                     tcg_temp_free_ptr(r_tickptr);
3715                                 }
3716                                 break;
3717                             case 0x19: /* System tick compare */
3718 #if !defined(CONFIG_USER_ONLY)
3719                                 if (!supervisor(dc))
3720                                     goto illegal_insn;
3721 #endif
3722                                 {
3723                                     TCGv_ptr r_tickptr;
3724
3725                                     tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3726                                                    cpu_src2);
3727                                     r_tickptr = tcg_temp_new_ptr();
3728                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3729                                                    offsetof(CPUSPARCState, stick));
3730                                     gen_helper_tick_set_limit(r_tickptr,
3731                                                               cpu_stick_cmpr);
3732                                     tcg_temp_free_ptr(r_tickptr);
3733                                 }
3734                                 break;
3735
3736                             case 0x10: /* Performance Control */
3737                             case 0x11: /* Performance Instrumentation
3738                                           Counter */
3739                             case 0x12: /* Dispatch Control */
3740 #endif
3741                             default:
3742                                 goto illegal_insn;
3743                             }
3744                         }
3745                         break;
3746 #if !defined(CONFIG_USER_ONLY)
3747                     case 0x31: /* wrpsr, V9 saved, restored */
3748                         {
3749                             if (!supervisor(dc))
3750                                 goto priv_insn;
3751 #ifdef TARGET_SPARC64
3752                             switch (rd) {
3753                             case 0:
3754                                 gen_helper_saved(cpu_env);
3755                                 break;
3756                             case 1:
3757                                 gen_helper_restored(cpu_env);
3758                                 break;
3759                             case 2: /* UA2005 allclean */
3760                             case 3: /* UA2005 otherw */
3761                             case 4: /* UA2005 normalw */
3762                             case 5: /* UA2005 invalw */
3763                                 // XXX
3764                             default:
3765                                 goto illegal_insn;
3766                             }
3767 #else
3768                             cpu_tmp0 = get_temp_tl(dc);
3769                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3770                             gen_helper_wrpsr(cpu_env, cpu_tmp0);
3771                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3772                             dc->cc_op = CC_OP_FLAGS;
3773                             save_state(dc);
3774                             gen_op_next_insn();
3775                             tcg_gen_exit_tb(0);
3776                             dc->is_br = 1;
3777 #endif
3778                         }
3779                         break;
3780                     case 0x32: /* wrwim, V9 wrpr */
3781                         {
3782                             if (!supervisor(dc))
3783                                 goto priv_insn;
3784                             cpu_tmp0 = get_temp_tl(dc);
3785                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3786 #ifdef TARGET_SPARC64
3787                             switch (rd) {
3788                             case 0: // tpc
3789                                 {
3790                                     TCGv_ptr r_tsptr;
3791
3792                                     r_tsptr = tcg_temp_new_ptr();
3793                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3794                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3795                                                   offsetof(trap_state, tpc));
3796                                     tcg_temp_free_ptr(r_tsptr);
3797                                 }
3798                                 break;
3799                             case 1: // tnpc
3800                                 {
3801                                     TCGv_ptr r_tsptr;
3802
3803                                     r_tsptr = tcg_temp_new_ptr();
3804                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3805                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3806                                                   offsetof(trap_state, tnpc));
3807                                     tcg_temp_free_ptr(r_tsptr);
3808                                 }
3809                                 break;
3810                             case 2: // tstate
3811                                 {
3812                                     TCGv_ptr r_tsptr;
3813
3814                                     r_tsptr = tcg_temp_new_ptr();
3815                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3816                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3817                                                   offsetof(trap_state,
3818                                                            tstate));
3819                                     tcg_temp_free_ptr(r_tsptr);
3820                                 }
3821                                 break;
3822                             case 3: // tt
3823                                 {
3824                                     TCGv_ptr r_tsptr;
3825
3826                                     r_tsptr = tcg_temp_new_ptr();
3827                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3828                                     tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
3829                                                     offsetof(trap_state, tt));
3830                                     tcg_temp_free_ptr(r_tsptr);
3831                                 }
3832                                 break;
3833                             case 4: // tick
3834                                 {
3835                                     TCGv_ptr r_tickptr;
3836
3837                                     r_tickptr = tcg_temp_new_ptr();
3838                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3839                                                    offsetof(CPUSPARCState, tick));
3840                                     gen_helper_tick_set_count(r_tickptr,
3841                                                               cpu_tmp0);
3842                                     tcg_temp_free_ptr(r_tickptr);
3843                                 }
3844                                 break;
3845                             case 5: // tba
3846                                 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3847                                 break;
3848                             case 6: // pstate
3849                                 save_state(dc);
3850                                 gen_helper_wrpstate(cpu_env, cpu_tmp0);
3851                                 dc->npc = DYNAMIC_PC;
3852                                 break;
3853                             case 7: // tl
3854                                 save_state(dc);
3855                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3856                                                offsetof(CPUSPARCState, tl));
3857                                 dc->npc = DYNAMIC_PC;
3858                                 break;
3859                             case 8: // pil
3860                                 gen_helper_wrpil(cpu_env, cpu_tmp0);
3861                                 break;
3862                             case 9: // cwp
3863                                 gen_helper_wrcwp(cpu_env, cpu_tmp0);
3864                                 break;
3865                             case 10: // cansave
3866                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3867                                                 offsetof(CPUSPARCState,
3868                                                          cansave));
3869                                 break;
3870                             case 11: // canrestore
3871                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3872                                                 offsetof(CPUSPARCState,
3873                                                          canrestore));
3874                                 break;
3875                             case 12: // cleanwin
3876                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3877                                                 offsetof(CPUSPARCState,
3878                                                          cleanwin));
3879                                 break;
3880                             case 13: // otherwin
3881                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3882                                                 offsetof(CPUSPARCState,
3883                                                          otherwin));
3884                                 break;
3885                             case 14: // wstate
3886                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3887                                                 offsetof(CPUSPARCState,
3888                                                          wstate));
3889                                 break;
3890                             case 16: // UA2005 gl
3891                                 CHECK_IU_FEATURE(dc, GL);
3892                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3893                                                 offsetof(CPUSPARCState, gl));
3894                                 break;
3895                             case 26: // UA2005 strand status
3896                                 CHECK_IU_FEATURE(dc, HYPV);
3897                                 if (!hypervisor(dc))
3898                                     goto priv_insn;
3899                                 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3900                                 break;
3901                             default:
3902                                 goto illegal_insn;
3903                             }
3904 #else
3905                             tcg_gen_trunc_tl_i32(cpu_wim, cpu_tmp0);
3906                             if (dc->def->nwindows != 32) {
3907                                 tcg_gen_andi_tl(cpu_wim, cpu_wim,
3908                                                 (1 << dc->def->nwindows) - 1);
3909                             }
3910 #endif
3911                         }
3912                         break;
3913                     case 0x33: /* wrtbr, UA2005 wrhpr */
3914                         {
3915 #ifndef TARGET_SPARC64
3916                             if (!supervisor(dc))
3917                                 goto priv_insn;
3918                             tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3919 #else
3920                             CHECK_IU_FEATURE(dc, HYPV);
3921                             if (!hypervisor(dc))
3922                                 goto priv_insn;
3923                             cpu_tmp0 = get_temp_tl(dc);
3924                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3925                             switch (rd) {
3926                             case 0: // hpstate
3927                                 // XXX gen_op_wrhpstate();
3928                                 save_state(dc);
3929                                 gen_op_next_insn();
3930                                 tcg_gen_exit_tb(0);
3931                                 dc->is_br = 1;
3932                                 break;
3933                             case 1: // htstate
3934                                 // XXX gen_op_wrhtstate();
3935                                 break;
3936                             case 3: // hintp
3937                                 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3938                                 break;
3939                             case 5: // htba
3940                                 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3941                                 break;
3942                             case 31: // hstick_cmpr
3943                                 {
3944                                     TCGv_ptr r_tickptr;
3945
3946                                     tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3947                                     r_tickptr = tcg_temp_new_ptr();
3948                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3949                                                    offsetof(CPUSPARCState, hstick));
3950                                     gen_helper_tick_set_limit(r_tickptr,
3951                                                               cpu_hstick_cmpr);
3952                                     tcg_temp_free_ptr(r_tickptr);
3953                                 }
3954                                 break;
3955                             case 6: // hver readonly
3956                             default:
3957                                 goto illegal_insn;
3958                             }
3959 #endif
3960                         }
3961                         break;
3962 #endif
3963 #ifdef TARGET_SPARC64
3964                     case 0x2c: /* V9 movcc */
3965                         {
3966                             int cc = GET_FIELD_SP(insn, 11, 12);
3967                             int cond = GET_FIELD_SP(insn, 14, 17);
3968                             DisasCompare cmp;
3969                             TCGv dst;
3970
3971                             if (insn & (1 << 18)) {
3972                                 if (cc == 0) {
3973                                     gen_compare(&cmp, 0, cond, dc);
3974                                 } else if (cc == 2) {
3975                                     gen_compare(&cmp, 1, cond, dc);
3976                                 } else {
3977                                     goto illegal_insn;
3978                                 }
3979                             } else {
3980                                 gen_fcompare(&cmp, cc, cond);
3981                             }
3982
3983                             /* The get_src2 above loaded the normal 13-bit
3984                                immediate field, not the 11-bit field we have
3985                                in movcc.  But it did handle the reg case.  */
3986                             if (IS_IMM) {
3987                                 simm = GET_FIELD_SPs(insn, 0, 10);
3988                                 tcg_gen_movi_tl(cpu_src2, simm);
3989                             }
3990
3991                             dst = gen_load_gpr(dc, rd);
3992                             tcg_gen_movcond_tl(cmp.cond, dst,
3993                                                cmp.c1, cmp.c2,
3994                                                cpu_src2, dst);
3995                             free_compare(&cmp);
3996                             gen_store_gpr(dc, rd, dst);
3997                             break;
3998                         }
3999                     case 0x2d: /* V9 sdivx */
4000                         gen_helper_sdivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4001                         gen_store_gpr(dc, rd, cpu_dst);
4002                         break;
4003                     case 0x2e: /* V9 popc */
4004                         gen_helper_popc(cpu_dst, cpu_src2);
4005                         gen_store_gpr(dc, rd, cpu_dst);
4006                         break;
4007                     case 0x2f: /* V9 movr */
4008                         {
4009                             int cond = GET_FIELD_SP(insn, 10, 12);
4010                             DisasCompare cmp;
4011                             TCGv dst;
4012
4013                             gen_compare_reg(&cmp, cond, cpu_src1);
4014
4015                             /* The get_src2 above loaded the normal 13-bit
4016                                immediate field, not the 10-bit field we have
4017                                in movr.  But it did handle the reg case.  */
4018                             if (IS_IMM) {
4019                                 simm = GET_FIELD_SPs(insn, 0, 9);
4020                                 tcg_gen_movi_tl(cpu_src2, simm);
4021                             }
4022
4023                             dst = gen_load_gpr(dc, rd);
4024                             tcg_gen_movcond_tl(cmp.cond, dst,
4025                                                cmp.c1, cmp.c2,
4026                                                cpu_src2, dst);
4027                             free_compare(&cmp);
4028                             gen_store_gpr(dc, rd, dst);
4029                             break;
4030                         }
4031 #endif
4032                     default:
4033                         goto illegal_insn;
4034                     }
4035                 }
4036             } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4037 #ifdef TARGET_SPARC64
4038                 int opf = GET_FIELD_SP(insn, 5, 13);
4039                 rs1 = GET_FIELD(insn, 13, 17);
4040                 rs2 = GET_FIELD(insn, 27, 31);
4041                 if (gen_trap_ifnofpu(dc)) {
4042                     goto jmp_insn;
4043                 }
4044
4045                 switch (opf) {
4046                 case 0x000: /* VIS I edge8cc */
4047                     CHECK_FPU_FEATURE(dc, VIS1);
4048                     cpu_src1 = gen_load_gpr(dc, rs1);
4049                     cpu_src2 = gen_load_gpr(dc, rs2);
4050                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4051                     gen_store_gpr(dc, rd, cpu_dst);
4052                     break;
4053                 case 0x001: /* VIS II edge8n */
4054                     CHECK_FPU_FEATURE(dc, VIS2);
4055                     cpu_src1 = gen_load_gpr(dc, rs1);
4056                     cpu_src2 = gen_load_gpr(dc, rs2);
4057                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4058                     gen_store_gpr(dc, rd, cpu_dst);
4059                     break;
4060                 case 0x002: /* VIS I edge8lcc */
4061                     CHECK_FPU_FEATURE(dc, VIS1);
4062                     cpu_src1 = gen_load_gpr(dc, rs1);
4063                     cpu_src2 = gen_load_gpr(dc, rs2);
4064                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4065                     gen_store_gpr(dc, rd, cpu_dst);
4066                     break;
4067                 case 0x003: /* VIS II edge8ln */
4068                     CHECK_FPU_FEATURE(dc, VIS2);
4069                     cpu_src1 = gen_load_gpr(dc, rs1);
4070                     cpu_src2 = gen_load_gpr(dc, rs2);
4071                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4072                     gen_store_gpr(dc, rd, cpu_dst);
4073                     break;
4074                 case 0x004: /* VIS I edge16cc */
4075                     CHECK_FPU_FEATURE(dc, VIS1);
4076                     cpu_src1 = gen_load_gpr(dc, rs1);
4077                     cpu_src2 = gen_load_gpr(dc, rs2);
4078                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4079                     gen_store_gpr(dc, rd, cpu_dst);
4080                     break;
4081                 case 0x005: /* VIS II edge16n */
4082                     CHECK_FPU_FEATURE(dc, VIS2);
4083                     cpu_src1 = gen_load_gpr(dc, rs1);
4084                     cpu_src2 = gen_load_gpr(dc, rs2);
4085                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4086                     gen_store_gpr(dc, rd, cpu_dst);
4087                     break;
4088                 case 0x006: /* VIS I edge16lcc */
4089                     CHECK_FPU_FEATURE(dc, VIS1);
4090                     cpu_src1 = gen_load_gpr(dc, rs1);
4091                     cpu_src2 = gen_load_gpr(dc, rs2);
4092                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4093                     gen_store_gpr(dc, rd, cpu_dst);
4094                     break;
4095                 case 0x007: /* VIS II edge16ln */
4096                     CHECK_FPU_FEATURE(dc, VIS2);
4097                     cpu_src1 = gen_load_gpr(dc, rs1);
4098                     cpu_src2 = gen_load_gpr(dc, rs2);
4099                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4100                     gen_store_gpr(dc, rd, cpu_dst);
4101                     break;
4102                 case 0x008: /* VIS I edge32cc */
4103                     CHECK_FPU_FEATURE(dc, VIS1);
4104                     cpu_src1 = gen_load_gpr(dc, rs1);
4105                     cpu_src2 = gen_load_gpr(dc, rs2);
4106                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4107                     gen_store_gpr(dc, rd, cpu_dst);
4108                     break;
4109                 case 0x009: /* VIS II edge32n */
4110                     CHECK_FPU_FEATURE(dc, VIS2);
4111                     cpu_src1 = gen_load_gpr(dc, rs1);
4112                     cpu_src2 = gen_load_gpr(dc, rs2);
4113                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4114                     gen_store_gpr(dc, rd, cpu_dst);
4115                     break;
4116                 case 0x00a: /* VIS I edge32lcc */
4117                     CHECK_FPU_FEATURE(dc, VIS1);
4118                     cpu_src1 = gen_load_gpr(dc, rs1);
4119                     cpu_src2 = gen_load_gpr(dc, rs2);
4120                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4121                     gen_store_gpr(dc, rd, cpu_dst);
4122                     break;
4123                 case 0x00b: /* VIS II edge32ln */
4124                     CHECK_FPU_FEATURE(dc, VIS2);
4125                     cpu_src1 = gen_load_gpr(dc, rs1);
4126                     cpu_src2 = gen_load_gpr(dc, rs2);
4127                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4128                     gen_store_gpr(dc, rd, cpu_dst);
4129                     break;
4130                 case 0x010: /* VIS I array8 */
4131                     CHECK_FPU_FEATURE(dc, VIS1);
4132                     cpu_src1 = gen_load_gpr(dc, rs1);
4133                     cpu_src2 = gen_load_gpr(dc, rs2);
4134                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4135                     gen_store_gpr(dc, rd, cpu_dst);
4136                     break;
4137                 case 0x012: /* VIS I array16 */
4138                     CHECK_FPU_FEATURE(dc, VIS1);
4139                     cpu_src1 = gen_load_gpr(dc, rs1);
4140                     cpu_src2 = gen_load_gpr(dc, rs2);
4141                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4142                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4143                     gen_store_gpr(dc, rd, cpu_dst);
4144                     break;
4145                 case 0x014: /* VIS I array32 */
4146                     CHECK_FPU_FEATURE(dc, VIS1);
4147                     cpu_src1 = gen_load_gpr(dc, rs1);
4148                     cpu_src2 = gen_load_gpr(dc, rs2);
4149                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4150                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4151                     gen_store_gpr(dc, rd, cpu_dst);
4152                     break;
4153                 case 0x018: /* VIS I alignaddr */
4154                     CHECK_FPU_FEATURE(dc, VIS1);
4155                     cpu_src1 = gen_load_gpr(dc, rs1);
4156                     cpu_src2 = gen_load_gpr(dc, rs2);
4157                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4158                     gen_store_gpr(dc, rd, cpu_dst);
4159                     break;
4160                 case 0x01a: /* VIS I alignaddrl */
4161                     CHECK_FPU_FEATURE(dc, VIS1);
4162                     cpu_src1 = gen_load_gpr(dc, rs1);
4163                     cpu_src2 = gen_load_gpr(dc, rs2);
4164                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4165                     gen_store_gpr(dc, rd, cpu_dst);
4166                     break;
4167                 case 0x019: /* VIS II bmask */
4168                     CHECK_FPU_FEATURE(dc, VIS2);
4169                     cpu_src1 = gen_load_gpr(dc, rs1);
4170                     cpu_src2 = gen_load_gpr(dc, rs2);
4171                     tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4172                     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4173                     gen_store_gpr(dc, rd, cpu_dst);
4174                     break;
4175                 case 0x020: /* VIS I fcmple16 */
4176                     CHECK_FPU_FEATURE(dc, VIS1);
4177                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4178                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4179                     gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4180                     gen_store_gpr(dc, rd, cpu_dst);
4181                     break;
4182                 case 0x022: /* VIS I fcmpne16 */
4183                     CHECK_FPU_FEATURE(dc, VIS1);
4184                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4185                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4186                     gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4187                     gen_store_gpr(dc, rd, cpu_dst);
4188                     break;
4189                 case 0x024: /* VIS I fcmple32 */
4190                     CHECK_FPU_FEATURE(dc, VIS1);
4191                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4192                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4193                     gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4194                     gen_store_gpr(dc, rd, cpu_dst);
4195                     break;
4196                 case 0x026: /* VIS I fcmpne32 */
4197                     CHECK_FPU_FEATURE(dc, VIS1);
4198                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4199                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4200                     gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4201                     gen_store_gpr(dc, rd, cpu_dst);
4202                     break;
4203                 case 0x028: /* VIS I fcmpgt16 */
4204                     CHECK_FPU_FEATURE(dc, VIS1);
4205                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4206                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4207                     gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4208                     gen_store_gpr(dc, rd, cpu_dst);
4209                     break;
4210                 case 0x02a: /* VIS I fcmpeq16 */
4211                     CHECK_FPU_FEATURE(dc, VIS1);
4212                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4213                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4214                     gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4215                     gen_store_gpr(dc, rd, cpu_dst);
4216                     break;
4217                 case 0x02c: /* VIS I fcmpgt32 */
4218                     CHECK_FPU_FEATURE(dc, VIS1);
4219                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4220                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4221                     gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4222                     gen_store_gpr(dc, rd, cpu_dst);
4223                     break;
4224                 case 0x02e: /* VIS I fcmpeq32 */
4225                     CHECK_FPU_FEATURE(dc, VIS1);
4226                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4227                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4228                     gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4229                     gen_store_gpr(dc, rd, cpu_dst);
4230                     break;
4231                 case 0x031: /* VIS I fmul8x16 */
4232                     CHECK_FPU_FEATURE(dc, VIS1);
4233                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4234                     break;
4235                 case 0x033: /* VIS I fmul8x16au */
4236                     CHECK_FPU_FEATURE(dc, VIS1);
4237                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4238                     break;
4239                 case 0x035: /* VIS I fmul8x16al */
4240                     CHECK_FPU_FEATURE(dc, VIS1);
4241                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4242                     break;
4243                 case 0x036: /* VIS I fmul8sux16 */
4244                     CHECK_FPU_FEATURE(dc, VIS1);
4245                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4246                     break;
4247                 case 0x037: /* VIS I fmul8ulx16 */
4248                     CHECK_FPU_FEATURE(dc, VIS1);
4249                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4250                     break;
4251                 case 0x038: /* VIS I fmuld8sux16 */
4252                     CHECK_FPU_FEATURE(dc, VIS1);
4253                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4254                     break;
4255                 case 0x039: /* VIS I fmuld8ulx16 */
4256                     CHECK_FPU_FEATURE(dc, VIS1);
4257                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4258                     break;
4259                 case 0x03a: /* VIS I fpack32 */
4260                     CHECK_FPU_FEATURE(dc, VIS1);
4261                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4262                     break;
4263                 case 0x03b: /* VIS I fpack16 */
4264                     CHECK_FPU_FEATURE(dc, VIS1);
4265                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4266                     cpu_dst_32 = gen_dest_fpr_F(dc);
4267                     gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4268                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4269                     break;
4270                 case 0x03d: /* VIS I fpackfix */
4271                     CHECK_FPU_FEATURE(dc, VIS1);
4272                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4273                     cpu_dst_32 = gen_dest_fpr_F(dc);
4274                     gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4275                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4276                     break;
4277                 case 0x03e: /* VIS I pdist */
4278                     CHECK_FPU_FEATURE(dc, VIS1);
4279                     gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4280                     break;
4281                 case 0x048: /* VIS I faligndata */
4282                     CHECK_FPU_FEATURE(dc, VIS1);
4283                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4284                     break;
4285                 case 0x04b: /* VIS I fpmerge */
4286                     CHECK_FPU_FEATURE(dc, VIS1);
4287                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4288                     break;
4289                 case 0x04c: /* VIS II bshuffle */
4290                     CHECK_FPU_FEATURE(dc, VIS2);
4291                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4292                     break;
4293                 case 0x04d: /* VIS I fexpand */
4294                     CHECK_FPU_FEATURE(dc, VIS1);
4295                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4296                     break;
4297                 case 0x050: /* VIS I fpadd16 */
4298                     CHECK_FPU_FEATURE(dc, VIS1);
4299                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4300                     break;
4301                 case 0x051: /* VIS I fpadd16s */
4302                     CHECK_FPU_FEATURE(dc, VIS1);
4303                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4304                     break;
4305                 case 0x052: /* VIS I fpadd32 */
4306                     CHECK_FPU_FEATURE(dc, VIS1);
4307                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4308                     break;
4309                 case 0x053: /* VIS I fpadd32s */
4310                     CHECK_FPU_FEATURE(dc, VIS1);
4311                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4312                     break;
4313                 case 0x054: /* VIS I fpsub16 */
4314                     CHECK_FPU_FEATURE(dc, VIS1);
4315                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4316                     break;
4317                 case 0x055: /* VIS I fpsub16s */
4318                     CHECK_FPU_FEATURE(dc, VIS1);
4319                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4320                     break;
4321                 case 0x056: /* VIS I fpsub32 */
4322                     CHECK_FPU_FEATURE(dc, VIS1);
4323                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4324                     break;
4325                 case 0x057: /* VIS I fpsub32s */
4326                     CHECK_FPU_FEATURE(dc, VIS1);
4327                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4328                     break;
4329                 case 0x060: /* VIS I fzero */
4330                     CHECK_FPU_FEATURE(dc, VIS1);
4331                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4332                     tcg_gen_movi_i64(cpu_dst_64, 0);
4333                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4334                     break;
4335                 case 0x061: /* VIS I fzeros */
4336                     CHECK_FPU_FEATURE(dc, VIS1);
4337                     cpu_dst_32 = gen_dest_fpr_F(dc);
4338                     tcg_gen_movi_i32(cpu_dst_32, 0);
4339                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4340                     break;
4341                 case 0x062: /* VIS I fnor */
4342                     CHECK_FPU_FEATURE(dc, VIS1);
4343                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4344                     break;
4345                 case 0x063: /* VIS I fnors */
4346                     CHECK_FPU_FEATURE(dc, VIS1);
4347                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4348                     break;
4349                 case 0x064: /* VIS I fandnot2 */
4350                     CHECK_FPU_FEATURE(dc, VIS1);
4351                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4352                     break;
4353                 case 0x065: /* VIS I fandnot2s */
4354                     CHECK_FPU_FEATURE(dc, VIS1);
4355                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4356                     break;
4357                 case 0x066: /* VIS I fnot2 */
4358                     CHECK_FPU_FEATURE(dc, VIS1);
4359                     gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4360                     break;
4361                 case 0x067: /* VIS I fnot2s */
4362                     CHECK_FPU_FEATURE(dc, VIS1);
4363                     gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4364                     break;
4365                 case 0x068: /* VIS I fandnot1 */
4366                     CHECK_FPU_FEATURE(dc, VIS1);
4367                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4368                     break;
4369                 case 0x069: /* VIS I fandnot1s */
4370                     CHECK_FPU_FEATURE(dc, VIS1);
4371                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4372                     break;
4373                 case 0x06a: /* VIS I fnot1 */
4374                     CHECK_FPU_FEATURE(dc, VIS1);
4375                     gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4376                     break;
4377                 case 0x06b: /* VIS I fnot1s */
4378                     CHECK_FPU_FEATURE(dc, VIS1);
4379                     gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4380                     break;
4381                 case 0x06c: /* VIS I fxor */
4382                     CHECK_FPU_FEATURE(dc, VIS1);
4383                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4384                     break;
4385                 case 0x06d: /* VIS I fxors */
4386                     CHECK_FPU_FEATURE(dc, VIS1);
4387                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4388                     break;
4389                 case 0x06e: /* VIS I fnand */
4390                     CHECK_FPU_FEATURE(dc, VIS1);
4391                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4392                     break;
4393                 case 0x06f: /* VIS I fnands */
4394                     CHECK_FPU_FEATURE(dc, VIS1);
4395                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4396                     break;
4397                 case 0x070: /* VIS I fand */
4398                     CHECK_FPU_FEATURE(dc, VIS1);
4399                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4400                     break;
4401                 case 0x071: /* VIS I fands */
4402                     CHECK_FPU_FEATURE(dc, VIS1);
4403                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4404                     break;
4405                 case 0x072: /* VIS I fxnor */
4406                     CHECK_FPU_FEATURE(dc, VIS1);
4407                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4408                     break;
4409                 case 0x073: /* VIS I fxnors */
4410                     CHECK_FPU_FEATURE(dc, VIS1);
4411                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4412                     break;
4413                 case 0x074: /* VIS I fsrc1 */
4414                     CHECK_FPU_FEATURE(dc, VIS1);
4415                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4416                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4417                     break;
4418                 case 0x075: /* VIS I fsrc1s */
4419                     CHECK_FPU_FEATURE(dc, VIS1);
4420                     cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4421                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4422                     break;
4423                 case 0x076: /* VIS I fornot2 */
4424                     CHECK_FPU_FEATURE(dc, VIS1);
4425                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4426                     break;
4427                 case 0x077: /* VIS I fornot2s */
4428                     CHECK_FPU_FEATURE(dc, VIS1);
4429                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4430                     break;
4431                 case 0x078: /* VIS I fsrc2 */
4432                     CHECK_FPU_FEATURE(dc, VIS1);
4433                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4434                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4435                     break;
4436                 case 0x079: /* VIS I fsrc2s */
4437                     CHECK_FPU_FEATURE(dc, VIS1);
4438                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4439                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4440                     break;
4441                 case 0x07a: /* VIS I fornot1 */
4442                     CHECK_FPU_FEATURE(dc, VIS1);
4443                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4444                     break;
4445                 case 0x07b: /* VIS I fornot1s */
4446                     CHECK_FPU_FEATURE(dc, VIS1);
4447                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
4448                     break;
4449                 case 0x07c: /* VIS I for */
4450                     CHECK_FPU_FEATURE(dc, VIS1);
4451                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
4452                     break;
4453                 case 0x07d: /* VIS I fors */
4454                     CHECK_FPU_FEATURE(dc, VIS1);
4455                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
4456                     break;
4457                 case 0x07e: /* VIS I fone */
4458                     CHECK_FPU_FEATURE(dc, VIS1);
4459                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4460                     tcg_gen_movi_i64(cpu_dst_64, -1);
4461                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4462                     break;
4463                 case 0x07f: /* VIS I fones */
4464                     CHECK_FPU_FEATURE(dc, VIS1);
4465                     cpu_dst_32 = gen_dest_fpr_F(dc);
4466                     tcg_gen_movi_i32(cpu_dst_32, -1);
4467                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4468                     break;
4469                 case 0x080: /* VIS I shutdown */
4470                 case 0x081: /* VIS II siam */
4471                     // XXX
4472                     goto illegal_insn;
4473                 default:
4474                     goto illegal_insn;
4475                 }
4476 #else
4477                 goto ncp_insn;
4478 #endif
4479             } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4480 #ifdef TARGET_SPARC64
4481                 goto illegal_insn;
4482 #else
4483                 goto ncp_insn;
4484 #endif
4485 #ifdef TARGET_SPARC64
4486             } else if (xop == 0x39) { /* V9 return */
4487                 TCGv_i32 r_const;
4488
4489                 save_state(dc);
4490                 cpu_src1 = get_src1(dc, insn);
4491                 cpu_tmp0 = get_temp_tl(dc);
4492                 if (IS_IMM) {   /* immediate */
4493                     simm = GET_FIELDs(insn, 19, 31);
4494                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
4495                 } else {                /* register */
4496                     rs2 = GET_FIELD(insn, 27, 31);
4497                     if (rs2) {
4498                         cpu_src2 = gen_load_gpr(dc, rs2);
4499                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
4500                     } else {
4501                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
4502                     }
4503                 }
4504                 gen_helper_restore(cpu_env);
4505                 gen_mov_pc_npc(dc);
4506                 r_const = tcg_const_i32(3);
4507                 gen_helper_check_align(cpu_env, cpu_tmp0, r_const);
4508                 tcg_temp_free_i32(r_const);
4509                 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
4510                 dc->npc = DYNAMIC_PC;
4511                 goto jmp_insn;
4512 #endif
4513             } else {
4514                 cpu_src1 = get_src1(dc, insn);
4515                 cpu_tmp0 = get_temp_tl(dc);
4516                 if (IS_IMM) {   /* immediate */
4517                     simm = GET_FIELDs(insn, 19, 31);
4518                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
4519                 } else {                /* register */
4520                     rs2 = GET_FIELD(insn, 27, 31);
4521                     if (rs2) {
4522                         cpu_src2 = gen_load_gpr(dc, rs2);
4523                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
4524                     } else {
4525                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
4526                     }
4527                 }
4528                 switch (xop) {
4529                 case 0x38:      /* jmpl */
4530                     {
4531                         TCGv t;
4532                         TCGv_i32 r_const;
4533
4534                         t = gen_dest_gpr(dc, rd);
4535                         tcg_gen_movi_tl(t, dc->pc);
4536                         gen_store_gpr(dc, rd, t);
4537                         gen_mov_pc_npc(dc);
4538                         r_const = tcg_const_i32(3);
4539                         gen_helper_check_align(cpu_env, cpu_tmp0, r_const);
4540                         tcg_temp_free_i32(r_const);
4541                         gen_address_mask(dc, cpu_tmp0);
4542                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
4543                         dc->npc = DYNAMIC_PC;
4544                     }
4545                     goto jmp_insn;
4546 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4547                 case 0x39:      /* rett, V9 return */
4548                     {
4549                         TCGv_i32 r_const;
4550
4551                         if (!supervisor(dc))
4552                             goto priv_insn;
4553                         gen_mov_pc_npc(dc);
4554                         r_const = tcg_const_i32(3);
4555                         gen_helper_check_align(cpu_env, cpu_tmp0, r_const);
4556                         tcg_temp_free_i32(r_const);
4557                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
4558                         dc->npc = DYNAMIC_PC;
4559                         gen_helper_rett(cpu_env);
4560                     }
4561                     goto jmp_insn;
4562 #endif
4563                 case 0x3b: /* flush */
4564                     if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4565                         goto unimp_flush;
4566                     /* nop */
4567                     break;
4568                 case 0x3c:      /* save */
4569                     save_state(dc);
4570                     gen_helper_save(cpu_env);
4571                     gen_store_gpr(dc, rd, cpu_tmp0);
4572                     break;
4573                 case 0x3d:      /* restore */
4574                     save_state(dc);
4575                     gen_helper_restore(cpu_env);
4576                     gen_store_gpr(dc, rd, cpu_tmp0);
4577                     break;
4578 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4579                 case 0x3e:      /* V9 done/retry */
4580                     {
4581                         switch (rd) {
4582                         case 0:
4583                             if (!supervisor(dc))
4584                                 goto priv_insn;
4585                             dc->npc = DYNAMIC_PC;
4586                             dc->pc = DYNAMIC_PC;
4587                             gen_helper_done(cpu_env);
4588                             goto jmp_insn;
4589                         case 1:
4590                             if (!supervisor(dc))
4591                                 goto priv_insn;
4592                             dc->npc = DYNAMIC_PC;
4593                             dc->pc = DYNAMIC_PC;
4594                             gen_helper_retry(cpu_env);
4595                             goto jmp_insn;
4596                         default:
4597                             goto illegal_insn;
4598                         }
4599                     }
4600                     break;
4601 #endif
4602                 default:
4603                     goto illegal_insn;
4604                 }
4605             }
4606             break;
4607         }
4608         break;
4609     case 3:                     /* load/store instructions */
4610         {
4611             unsigned int xop = GET_FIELD(insn, 7, 12);
4612             /* ??? gen_address_mask prevents us from using a source
4613                register directly.  Always generate a temporary.  */
4614             TCGv cpu_addr = get_temp_tl(dc);
4615
4616             tcg_gen_mov_tl(cpu_addr, get_src1(dc, insn));
4617             if (xop == 0x3c || xop == 0x3e) {
4618                 /* V9 casa/casxa : no offset */
4619             } else if (IS_IMM) {     /* immediate */
4620                 simm = GET_FIELDs(insn, 19, 31);
4621                 if (simm != 0) {
4622                     tcg_gen_addi_tl(cpu_addr, cpu_addr, simm);
4623                 }
4624             } else {            /* register */
4625                 rs2 = GET_FIELD(insn, 27, 31);
4626                 if (rs2 != 0) {
4627                     tcg_gen_add_tl(cpu_addr, cpu_addr, gen_load_gpr(dc, rs2));
4628                 }
4629             }
4630             if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4631                 (xop > 0x17 && xop <= 0x1d ) ||
4632                 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4633                 TCGv cpu_val = gen_dest_gpr(dc, rd);
4634
4635                 switch (xop) {
4636                 case 0x0:       /* ld, V9 lduw, load unsigned word */
4637                     gen_address_mask(dc, cpu_addr);
4638                     tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4639                     break;
4640                 case 0x1:       /* ldub, load unsigned byte */
4641                     gen_address_mask(dc, cpu_addr);
4642                     tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4643                     break;
4644                 case 0x2:       /* lduh, load unsigned halfword */
4645                     gen_address_mask(dc, cpu_addr);
4646                     tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4647                     break;
4648                 case 0x3:       /* ldd, load double word */
4649                     if (rd & 1)
4650                         goto illegal_insn;
4651                     else {
4652                         TCGv_i32 r_const;
4653                         TCGv_i64 t64;
4654
4655                         save_state(dc);
4656                         r_const = tcg_const_i32(7);
4657                         /* XXX remove alignment check */
4658                         gen_helper_check_align(cpu_env, cpu_addr, r_const);
4659                         tcg_temp_free_i32(r_const);
4660                         gen_address_mask(dc, cpu_addr);
4661                         t64 = tcg_temp_new_i64();
4662                         tcg_gen_qemu_ld64(t64, cpu_addr, dc->mem_idx);
4663                         tcg_gen_trunc_i64_tl(cpu_val, t64);
4664                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
4665                         gen_store_gpr(dc, rd + 1, cpu_val);
4666                         tcg_gen_shri_i64(t64, t64, 32);
4667                         tcg_gen_trunc_i64_tl(cpu_val, t64);
4668                         tcg_temp_free_i64(t64);
4669                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
4670                     }
4671                     break;
4672                 case 0x9:       /* ldsb, load signed byte */
4673                     gen_address_mask(dc, cpu_addr);
4674                     tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4675                     break;
4676                 case 0xa:       /* ldsh, load signed halfword */
4677                     gen_address_mask(dc, cpu_addr);
4678                     tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4679                     break;
4680                 case 0xd:       /* ldstub -- XXX: should be atomically */
4681                     {
4682                         TCGv r_const;
4683                         TCGv tmp = tcg_temp_new();
4684
4685                         gen_address_mask(dc, cpu_addr);
4686                         tcg_gen_qemu_ld8u(tmp, cpu_addr, dc->mem_idx);
4687                         r_const = tcg_const_tl(0xff);
4688                         tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4689                         tcg_gen_mov_tl(cpu_val, tmp);
4690                         tcg_temp_free(r_const);
4691                         tcg_temp_free(tmp);
4692                     }
4693                     break;
4694                 case 0x0f:
4695                     /* swap, swap register with memory. Also atomically */
4696                     {
4697                         TCGv t0 = get_temp_tl(dc);
4698                         CHECK_IU_FEATURE(dc, SWAP);
4699                         cpu_src1 = gen_load_gpr(dc, rd);
4700                         gen_address_mask(dc, cpu_addr);
4701                         tcg_gen_qemu_ld32u(t0, cpu_addr, dc->mem_idx);
4702                         tcg_gen_qemu_st32(cpu_src1, cpu_addr, dc->mem_idx);
4703                         tcg_gen_mov_tl(cpu_val, t0);
4704                     }
4705                     break;
4706 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4707                 case 0x10:      /* lda, V9 lduwa, load word alternate */
4708 #ifndef TARGET_SPARC64
4709                     if (IS_IMM)
4710                         goto illegal_insn;
4711                     if (!supervisor(dc))
4712                         goto priv_insn;
4713 #endif
4714                     save_state(dc);
4715                     gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4716                     break;
4717                 case 0x11:      /* lduba, load unsigned byte alternate */
4718 #ifndef TARGET_SPARC64
4719                     if (IS_IMM)
4720                         goto illegal_insn;
4721                     if (!supervisor(dc))
4722                         goto priv_insn;
4723 #endif
4724                     save_state(dc);
4725                     gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4726                     break;
4727                 case 0x12:      /* lduha, load unsigned halfword alternate */
4728 #ifndef TARGET_SPARC64
4729                     if (IS_IMM)
4730                         goto illegal_insn;
4731                     if (!supervisor(dc))
4732                         goto priv_insn;
4733 #endif
4734                     save_state(dc);
4735                     gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4736                     break;
4737                 case 0x13:      /* ldda, load double word alternate */
4738 #ifndef TARGET_SPARC64
4739                     if (IS_IMM)
4740                         goto illegal_insn;
4741                     if (!supervisor(dc))
4742                         goto priv_insn;
4743 #endif
4744                     if (rd & 1)
4745                         goto illegal_insn;
4746                     save_state(dc);
4747                     gen_ldda_asi(dc, cpu_val, cpu_addr, insn, rd);
4748                     goto skip_move;
4749                 case 0x19:      /* ldsba, load signed byte alternate */
4750 #ifndef TARGET_SPARC64
4751                     if (IS_IMM)
4752                         goto illegal_insn;
4753                     if (!supervisor(dc))
4754                         goto priv_insn;
4755 #endif
4756                     save_state(dc);
4757                     gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4758                     break;
4759                 case 0x1a:      /* ldsha, load signed halfword alternate */
4760 #ifndef TARGET_SPARC64
4761                     if (IS_IMM)
4762                         goto illegal_insn;
4763                     if (!supervisor(dc))
4764                         goto priv_insn;
4765 #endif
4766                     save_state(dc);
4767                     gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4768                     break;
4769                 case 0x1d:      /* ldstuba -- XXX: should be atomically */
4770 #ifndef TARGET_SPARC64
4771                     if (IS_IMM)
4772                         goto illegal_insn;
4773                     if (!supervisor(dc))
4774                         goto priv_insn;
4775 #endif
4776                     save_state(dc);
4777                     gen_ldstub_asi(cpu_val, cpu_addr, insn);
4778                     break;
4779                 case 0x1f:      /* swapa, swap reg with alt. memory. Also
4780                                    atomically */
4781                     CHECK_IU_FEATURE(dc, SWAP);
4782 #ifndef TARGET_SPARC64
4783                     if (IS_IMM)
4784                         goto illegal_insn;
4785                     if (!supervisor(dc))
4786                         goto priv_insn;
4787 #endif
4788                     save_state(dc);
4789                     cpu_src1 = gen_load_gpr(dc, rd);
4790                     gen_swap_asi(cpu_val, cpu_src1, cpu_addr, insn);
4791                     break;
4792
4793 #ifndef TARGET_SPARC64
4794                 case 0x30: /* ldc */
4795                 case 0x31: /* ldcsr */
4796                 case 0x33: /* lddc */
4797                     goto ncp_insn;
4798 #endif
4799 #endif
4800 #ifdef TARGET_SPARC64
4801                 case 0x08: /* V9 ldsw */
4802                     gen_address_mask(dc, cpu_addr);
4803                     tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4804                     break;
4805                 case 0x0b: /* V9 ldx */
4806                     gen_address_mask(dc, cpu_addr);
4807                     tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4808                     break;
4809                 case 0x18: /* V9 ldswa */
4810                     save_state(dc);
4811                     gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4812                     break;
4813                 case 0x1b: /* V9 ldxa */
4814                     save_state(dc);
4815                     gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4816                     break;
4817                 case 0x2d: /* V9 prefetch, no effect */
4818                     goto skip_move;
4819                 case 0x30: /* V9 ldfa */
4820                     if (gen_trap_ifnofpu(dc)) {
4821                         goto jmp_insn;
4822                     }
4823                     save_state(dc);
4824                     gen_ldf_asi(cpu_addr, insn, 4, rd);
4825                     gen_update_fprs_dirty(rd);
4826                     goto skip_move;
4827                 case 0x33: /* V9 lddfa */
4828                     if (gen_trap_ifnofpu(dc)) {
4829                         goto jmp_insn;
4830                     }
4831                     save_state(dc);
4832                     gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4833                     gen_update_fprs_dirty(DFPREG(rd));
4834                     goto skip_move;
4835                 case 0x3d: /* V9 prefetcha, no effect */
4836                     goto skip_move;
4837                 case 0x32: /* V9 ldqfa */
4838                     CHECK_FPU_FEATURE(dc, FLOAT128);
4839                     if (gen_trap_ifnofpu(dc)) {
4840                         goto jmp_insn;
4841                     }
4842                     save_state(dc);
4843                     gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4844                     gen_update_fprs_dirty(QFPREG(rd));
4845                     goto skip_move;
4846 #endif
4847                 default:
4848                     goto illegal_insn;
4849                 }
4850                 gen_store_gpr(dc, rd, cpu_val);
4851 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4852             skip_move: ;
4853 #endif
4854             } else if (xop >= 0x20 && xop < 0x24) {
4855                 TCGv t0;
4856
4857                 if (gen_trap_ifnofpu(dc)) {
4858                     goto jmp_insn;
4859                 }
4860                 save_state(dc);
4861                 switch (xop) {
4862                 case 0x20:      /* ldf, load fpreg */
4863                     gen_address_mask(dc, cpu_addr);
4864                     t0 = get_temp_tl(dc);
4865                     tcg_gen_qemu_ld32u(t0, cpu_addr, dc->mem_idx);
4866                     cpu_dst_32 = gen_dest_fpr_F(dc);
4867                     tcg_gen_trunc_tl_i32(cpu_dst_32, t0);
4868                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4869                     break;
4870                 case 0x21:      /* ldfsr, V9 ldxfsr */
4871 #ifdef TARGET_SPARC64
4872                     gen_address_mask(dc, cpu_addr);
4873                     if (rd == 1) {
4874                         TCGv_i64 t64 = tcg_temp_new_i64();
4875                         tcg_gen_qemu_ld64(t64, cpu_addr, dc->mem_idx);
4876                         gen_helper_ldxfsr(cpu_env, t64);
4877                         tcg_temp_free_i64(t64);
4878                         break;
4879                     }
4880 #endif
4881                     cpu_dst_32 = get_temp_i32(dc);
4882                     t0 = get_temp_tl(dc);
4883                     tcg_gen_qemu_ld32u(t0, cpu_addr, dc->mem_idx);
4884                     tcg_gen_trunc_tl_i32(cpu_dst_32, t0);
4885                     gen_helper_ldfsr(cpu_env, cpu_dst_32);
4886                     break;
4887                 case 0x22:      /* ldqf, load quad fpreg */
4888                     {
4889                         TCGv_i32 r_const;
4890
4891                         CHECK_FPU_FEATURE(dc, FLOAT128);
4892                         r_const = tcg_const_i32(dc->mem_idx);
4893                         gen_address_mask(dc, cpu_addr);
4894                         gen_helper_ldqf(cpu_env, cpu_addr, r_const);
4895                         tcg_temp_free_i32(r_const);
4896                         gen_op_store_QT0_fpr(QFPREG(rd));
4897                         gen_update_fprs_dirty(QFPREG(rd));
4898                     }
4899                     break;
4900                 case 0x23:      /* lddf, load double fpreg */
4901                     gen_address_mask(dc, cpu_addr);
4902                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4903                     tcg_gen_qemu_ld64(cpu_dst_64, cpu_addr, dc->mem_idx);
4904                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4905                     break;
4906                 default:
4907                     goto illegal_insn;
4908                 }
4909             } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4910                        xop == 0xe || xop == 0x1e) {
4911                 TCGv cpu_val = gen_load_gpr(dc, rd);
4912
4913                 switch (xop) {
4914                 case 0x4: /* st, store word */
4915                     gen_address_mask(dc, cpu_addr);
4916                     tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4917                     break;
4918                 case 0x5: /* stb, store byte */
4919                     gen_address_mask(dc, cpu_addr);
4920                     tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4921                     break;
4922                 case 0x6: /* sth, store halfword */
4923                     gen_address_mask(dc, cpu_addr);
4924                     tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4925                     break;
4926                 case 0x7: /* std, store double word */
4927                     if (rd & 1)
4928                         goto illegal_insn;
4929                     else {
4930                         TCGv_i32 r_const;
4931                         TCGv_i64 t64;
4932                         TCGv lo;
4933
4934                         save_state(dc);
4935                         gen_address_mask(dc, cpu_addr);
4936                         r_const = tcg_const_i32(7);
4937                         /* XXX remove alignment check */
4938                         gen_helper_check_align(cpu_env, cpu_addr, r_const);
4939                         tcg_temp_free_i32(r_const);
4940                         lo = gen_load_gpr(dc, rd + 1);
4941
4942                         t64 = tcg_temp_new_i64();
4943                         tcg_gen_concat_tl_i64(t64, lo, cpu_val);
4944                         tcg_gen_qemu_st64(t64, cpu_addr, dc->mem_idx);
4945                         tcg_temp_free_i64(t64);
4946                     }
4947                     break;
4948 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4949                 case 0x14: /* sta, V9 stwa, store word alternate */
4950 #ifndef TARGET_SPARC64
4951                     if (IS_IMM)
4952                         goto illegal_insn;
4953                     if (!supervisor(dc))
4954                         goto priv_insn;
4955 #endif
4956                     save_state(dc);
4957                     gen_st_asi(cpu_val, cpu_addr, insn, 4);
4958                     dc->npc = DYNAMIC_PC;
4959                     break;
4960                 case 0x15: /* stba, store byte alternate */
4961 #ifndef TARGET_SPARC64
4962                     if (IS_IMM)
4963                         goto illegal_insn;
4964                     if (!supervisor(dc))
4965                         goto priv_insn;
4966 #endif
4967                     save_state(dc);
4968                     gen_st_asi(cpu_val, cpu_addr, insn, 1);
4969                     dc->npc = DYNAMIC_PC;
4970                     break;
4971                 case 0x16: /* stha, store halfword alternate */
4972 #ifndef TARGET_SPARC64
4973                     if (IS_IMM)
4974                         goto illegal_insn;
4975                     if (!supervisor(dc))
4976                         goto priv_insn;
4977 #endif
4978                     save_state(dc);
4979                     gen_st_asi(cpu_val, cpu_addr, insn, 2);
4980                     dc->npc = DYNAMIC_PC;
4981                     break;
4982                 case 0x17: /* stda, store double word alternate */
4983 #ifndef TARGET_SPARC64
4984                     if (IS_IMM)
4985                         goto illegal_insn;
4986                     if (!supervisor(dc))
4987                         goto priv_insn;
4988 #endif
4989                     if (rd & 1)
4990                         goto illegal_insn;
4991                     else {
4992                         save_state(dc);
4993                         gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
4994                     }
4995                     break;
4996 #endif
4997 #ifdef TARGET_SPARC64
4998                 case 0x0e: /* V9 stx */
4999                     gen_address_mask(dc, cpu_addr);
5000                     tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
5001                     break;
5002                 case 0x1e: /* V9 stxa */
5003                     save_state(dc);
5004                     gen_st_asi(cpu_val, cpu_addr, insn, 8);
5005                     dc->npc = DYNAMIC_PC;
5006                     break;
5007 #endif
5008                 default:
5009                     goto illegal_insn;
5010                 }
5011             } else if (xop > 0x23 && xop < 0x28) {
5012                 if (gen_trap_ifnofpu(dc)) {
5013                     goto jmp_insn;
5014                 }
5015                 save_state(dc);
5016                 switch (xop) {
5017                 case 0x24: /* stf, store fpreg */
5018                     {
5019                         TCGv t = get_temp_tl(dc);
5020                         gen_address_mask(dc, cpu_addr);
5021                         cpu_src1_32 = gen_load_fpr_F(dc, rd);
5022                         tcg_gen_ext_i32_tl(t, cpu_src1_32);
5023                         tcg_gen_qemu_st32(t, cpu_addr, dc->mem_idx);
5024                     }
5025                     break;
5026                 case 0x25: /* stfsr, V9 stxfsr */
5027                     {
5028                         TCGv t = get_temp_tl(dc);
5029
5030                         tcg_gen_ld_tl(t, cpu_env, offsetof(CPUSPARCState, fsr));
5031 #ifdef TARGET_SPARC64
5032                         gen_address_mask(dc, cpu_addr);
5033                         if (rd == 1) {
5034                             tcg_gen_qemu_st64(t, cpu_addr, dc->mem_idx);
5035                             break;
5036                         }
5037 #endif
5038                         tcg_gen_qemu_st32(t, cpu_addr, dc->mem_idx);
5039                     }
5040                     break;
5041                 case 0x26:
5042 #ifdef TARGET_SPARC64
5043                     /* V9 stqf, store quad fpreg */
5044                     {
5045                         TCGv_i32 r_const;
5046
5047                         CHECK_FPU_FEATURE(dc, FLOAT128);
5048                         gen_op_load_fpr_QT0(QFPREG(rd));
5049                         r_const = tcg_const_i32(dc->mem_idx);
5050                         gen_address_mask(dc, cpu_addr);
5051                         gen_helper_stqf(cpu_env, cpu_addr, r_const);
5052                         tcg_temp_free_i32(r_const);
5053                     }
5054                     break;
5055 #else /* !TARGET_SPARC64 */
5056                     /* stdfq, store floating point queue */
5057 #if defined(CONFIG_USER_ONLY)
5058                     goto illegal_insn;
5059 #else
5060                     if (!supervisor(dc))
5061                         goto priv_insn;
5062                     if (gen_trap_ifnofpu(dc)) {
5063                         goto jmp_insn;
5064                     }
5065                     goto nfq_insn;
5066 #endif
5067 #endif
5068                 case 0x27: /* stdf, store double fpreg */
5069                     gen_address_mask(dc, cpu_addr);
5070                     cpu_src1_64 = gen_load_fpr_D(dc, rd);
5071                     tcg_gen_qemu_st64(cpu_src1_64, cpu_addr, dc->mem_idx);
5072                     break;
5073                 default:
5074                     goto illegal_insn;
5075                 }
5076             } else if (xop > 0x33 && xop < 0x3f) {
5077                 save_state(dc);
5078                 switch (xop) {
5079 #ifdef TARGET_SPARC64
5080                 case 0x34: /* V9 stfa */
5081                     if (gen_trap_ifnofpu(dc)) {
5082                         goto jmp_insn;
5083                     }
5084                     gen_stf_asi(cpu_addr, insn, 4, rd);
5085                     break;
5086                 case 0x36: /* V9 stqfa */
5087                     {
5088                         TCGv_i32 r_const;
5089
5090                         CHECK_FPU_FEATURE(dc, FLOAT128);
5091                         if (gen_trap_ifnofpu(dc)) {
5092                             goto jmp_insn;
5093                         }
5094                         r_const = tcg_const_i32(7);
5095                         gen_helper_check_align(cpu_env, cpu_addr, r_const);
5096                         tcg_temp_free_i32(r_const);
5097                         gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
5098                     }
5099                     break;
5100                 case 0x37: /* V9 stdfa */
5101                     if (gen_trap_ifnofpu(dc)) {
5102                         goto jmp_insn;
5103                     }
5104                     gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
5105                     break;
5106                 case 0x3e: /* V9 casxa */
5107                     rs2 = GET_FIELD(insn, 27, 31);
5108                     cpu_src2 = gen_load_gpr(dc, rs2);
5109                     gen_casx_asi(dc, cpu_addr, cpu_src2, insn, rd);
5110                     break;
5111 #else
5112                 case 0x34: /* stc */
5113                 case 0x35: /* stcsr */
5114                 case 0x36: /* stdcq */
5115                 case 0x37: /* stdc */
5116                     goto ncp_insn;
5117 #endif
5118 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5119                 case 0x3c: /* V9 or LEON3 casa */
5120 #ifndef TARGET_SPARC64
5121                     CHECK_IU_FEATURE(dc, CASA);
5122                     if (IS_IMM) {
5123                         goto illegal_insn;
5124                     }
5125                     /* LEON3 allows CASA from user space with ASI 0xa */
5126                     if ((GET_FIELD(insn, 19, 26) != 0xa) && !supervisor(dc)) {
5127                         goto priv_insn;
5128                     }
5129 #endif
5130                     rs2 = GET_FIELD(insn, 27, 31);
5131                     cpu_src2 = gen_load_gpr(dc, rs2);
5132                     gen_cas_asi(dc, cpu_addr, cpu_src2, insn, rd);
5133                     break;
5134 #endif
5135                 default:
5136                     goto illegal_insn;
5137                 }
5138             } else {
5139                 goto illegal_insn;
5140             }
5141         }
5142         break;
5143     }
5144     /* default case for non jump instructions */
5145     if (dc->npc == DYNAMIC_PC) {
5146         dc->pc = DYNAMIC_PC;
5147         gen_op_next_insn();
5148     } else if (dc->npc == JUMP_PC) {
5149         /* we can do a static jump */
5150         gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5151         dc->is_br = 1;
5152     } else {
5153         dc->pc = dc->npc;
5154         dc->npc = dc->npc + 4;
5155     }
5156  jmp_insn:
5157     goto egress;
5158  illegal_insn:
5159     gen_exception(dc, TT_ILL_INSN);
5160     goto egress;
5161  unimp_flush:
5162     gen_exception(dc, TT_UNIMP_FLUSH);
5163     goto egress;
5164 #if !defined(CONFIG_USER_ONLY)
5165  priv_insn:
5166     gen_exception(dc, TT_PRIV_INSN);
5167     goto egress;
5168 #endif
5169  nfpu_insn:
5170     gen_op_fpexception_im(dc, FSR_FTT_UNIMPFPOP);
5171     goto egress;
5172 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5173  nfq_insn:
5174     gen_op_fpexception_im(dc, FSR_FTT_SEQ_ERROR);
5175     goto egress;
5176 #endif
5177 #ifndef TARGET_SPARC64
5178  ncp_insn:
5179     gen_exception(dc, TT_NCP_INSN);
5180     goto egress;
5181 #endif
5182  egress:
5183     if (dc->n_t32 != 0) {
5184         int i;
5185         for (i = dc->n_t32 - 1; i >= 0; --i) {
5186             tcg_temp_free_i32(dc->t32[i]);
5187         }
5188         dc->n_t32 = 0;
5189     }
5190     if (dc->n_ttl != 0) {
5191         int i;
5192         for (i = dc->n_ttl - 1; i >= 0; --i) {
5193             tcg_temp_free(dc->ttl[i]);
5194         }
5195         dc->n_ttl = 0;
5196     }
5197 }
5198
5199 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5200 {
5201     SPARCCPU *cpu = sparc_env_get_cpu(env);
5202     CPUState *cs = CPU(cpu);
5203     target_ulong pc_start, last_pc;
5204     DisasContext dc1, *dc = &dc1;
5205     int num_insns;
5206     int max_insns;
5207     unsigned int insn;
5208
5209     memset(dc, 0, sizeof(DisasContext));
5210     dc->tb = tb;
5211     pc_start = tb->pc;
5212     dc->pc = pc_start;
5213     last_pc = dc->pc;
5214     dc->npc = (target_ulong) tb->cs_base;
5215     dc->cc_op = CC_OP_DYNAMIC;
5216     dc->mem_idx = tb->flags & TB_FLAG_MMU_MASK;
5217     dc->def = env->def;
5218     dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5219     dc->address_mask_32bit = tb_am_enabled(tb->flags);
5220     dc->singlestep = (cs->singlestep_enabled || singlestep);
5221
5222     num_insns = 0;
5223     max_insns = tb->cflags & CF_COUNT_MASK;
5224     if (max_insns == 0) {
5225         max_insns = CF_COUNT_MASK;
5226     }
5227     if (max_insns > TCG_MAX_INSNS) {
5228         max_insns = TCG_MAX_INSNS;
5229     }
5230
5231     gen_tb_start(tb);
5232     do {
5233         if (dc->npc & JUMP_PC) {
5234             assert(dc->jump_pc[1] == dc->pc + 4);
5235             tcg_gen_insn_start(dc->pc, dc->jump_pc[0] | JUMP_PC);
5236         } else {
5237             tcg_gen_insn_start(dc->pc, dc->npc);
5238         }
5239         num_insns++;
5240         last_pc = dc->pc;
5241
5242         if (unlikely(cpu_breakpoint_test(cs, dc->pc, BP_ANY))) {
5243             if (dc->pc != pc_start) {
5244                 save_state(dc);
5245             }
5246             gen_helper_debug(cpu_env);
5247             tcg_gen_exit_tb(0);
5248             dc->is_br = 1;
5249             goto exit_gen_loop;
5250         }
5251
5252         if (num_insns == max_insns && (tb->cflags & CF_LAST_IO)) {
5253             gen_io_start();
5254         }
5255
5256         insn = cpu_ldl_code(env, dc->pc);
5257
5258         disas_sparc_insn(dc, insn);
5259
5260         if (dc->is_br)
5261             break;
5262         /* if the next PC is different, we abort now */
5263         if (dc->pc != (last_pc + 4))
5264             break;
5265         /* if we reach a page boundary, we stop generation so that the
5266            PC of a TT_TFAULT exception is always in the right page */
5267         if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5268             break;
5269         /* if single step mode, we generate only one instruction and
5270            generate an exception */
5271         if (dc->singlestep) {
5272             break;
5273         }
5274     } while (!tcg_op_buf_full() &&
5275              (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5276              num_insns < max_insns);
5277
5278  exit_gen_loop:
5279     if (tb->cflags & CF_LAST_IO) {
5280         gen_io_end();
5281     }
5282     if (!dc->is_br) {
5283         if (dc->pc != DYNAMIC_PC &&
5284             (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5285             /* static PC and NPC: we can use direct chaining */
5286             gen_goto_tb(dc, 0, dc->pc, dc->npc);
5287         } else {
5288             if (dc->pc != DYNAMIC_PC) {
5289                 tcg_gen_movi_tl(cpu_pc, dc->pc);
5290             }
5291             save_npc(dc);
5292             tcg_gen_exit_tb(0);
5293         }
5294     }
5295     gen_tb_end(tb, num_insns);
5296
5297     tb->size = last_pc + 4 - pc_start;
5298     tb->icount = num_insns;
5299
5300 #ifdef DEBUG_DISAS
5301     if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)
5302         && qemu_log_in_addr_range(pc_start)) {
5303         qemu_log("--------------\n");
5304         qemu_log("IN: %s\n", lookup_symbol(pc_start));
5305         log_target_disas(cs, pc_start, last_pc + 4 - pc_start, 0);
5306         qemu_log("\n");
5307     }
5308 #endif
5309 }
5310
5311 void gen_intermediate_code_init(CPUSPARCState *env)
5312 {
5313     static int inited;
5314     static const char gregnames[32][4] = {
5315         "g0", "g1", "g2", "g3", "g4", "g5", "g6", "g7",
5316         "o0", "o1", "o2", "o3", "o4", "o5", "o6", "o7",
5317         "l0", "l1", "l2", "l3", "l4", "l5", "l6", "l7",
5318         "i0", "i1", "i2", "i3", "i4", "i5", "i6", "i7",
5319     };
5320     static const char fregnames[32][4] = {
5321         "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5322         "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5323         "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5324         "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5325     };
5326
5327     static const struct { TCGv_i32 *ptr; int off; const char *name; } r32[] = {
5328 #ifdef TARGET_SPARC64
5329         { &cpu_xcc, offsetof(CPUSPARCState, xcc), "xcc" },
5330         { &cpu_asi, offsetof(CPUSPARCState, asi), "asi" },
5331         { &cpu_fprs, offsetof(CPUSPARCState, fprs), "fprs" },
5332 #else
5333         { &cpu_wim, offsetof(CPUSPARCState, wim), "wim" },
5334 #endif
5335         { &cpu_cc_op, offsetof(CPUSPARCState, cc_op), "cc_op" },
5336         { &cpu_psr, offsetof(CPUSPARCState, psr), "psr" },
5337     };
5338
5339     static const struct { TCGv *ptr; int off; const char *name; } rtl[] = {
5340 #ifdef TARGET_SPARC64
5341         { &cpu_gsr, offsetof(CPUSPARCState, gsr), "gsr" },
5342         { &cpu_tick_cmpr, offsetof(CPUSPARCState, tick_cmpr), "tick_cmpr" },
5343         { &cpu_stick_cmpr, offsetof(CPUSPARCState, stick_cmpr), "stick_cmpr" },
5344         { &cpu_hstick_cmpr, offsetof(CPUSPARCState, hstick_cmpr),
5345           "hstick_cmpr" },
5346         { &cpu_hintp, offsetof(CPUSPARCState, hintp), "hintp" },
5347         { &cpu_htba, offsetof(CPUSPARCState, htba), "htba" },
5348         { &cpu_hver, offsetof(CPUSPARCState, hver), "hver" },
5349         { &cpu_ssr, offsetof(CPUSPARCState, ssr), "ssr" },
5350         { &cpu_ver, offsetof(CPUSPARCState, version), "ver" },
5351 #endif
5352         { &cpu_cond, offsetof(CPUSPARCState, cond), "cond" },
5353         { &cpu_cc_src, offsetof(CPUSPARCState, cc_src), "cc_src" },
5354         { &cpu_cc_src2, offsetof(CPUSPARCState, cc_src2), "cc_src2" },
5355         { &cpu_cc_dst, offsetof(CPUSPARCState, cc_dst), "cc_dst" },
5356         { &cpu_fsr, offsetof(CPUSPARCState, fsr), "fsr" },
5357         { &cpu_pc, offsetof(CPUSPARCState, pc), "pc" },
5358         { &cpu_npc, offsetof(CPUSPARCState, npc), "npc" },
5359         { &cpu_y, offsetof(CPUSPARCState, y), "y" },
5360 #ifndef CONFIG_USER_ONLY
5361         { &cpu_tbr, offsetof(CPUSPARCState, tbr), "tbr" },
5362 #endif
5363     };
5364
5365     unsigned int i;
5366
5367     /* init various static tables */
5368     if (inited) {
5369         return;
5370     }
5371     inited = 1;
5372
5373     cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5374     tcg_ctx.tcg_env = cpu_env;
5375
5376     cpu_regwptr = tcg_global_mem_new_ptr(cpu_env,
5377                                          offsetof(CPUSPARCState, regwptr),
5378                                          "regwptr");
5379
5380     for (i = 0; i < ARRAY_SIZE(r32); ++i) {
5381         *r32[i].ptr = tcg_global_mem_new_i32(cpu_env, r32[i].off, r32[i].name);
5382     }
5383
5384     for (i = 0; i < ARRAY_SIZE(rtl); ++i) {
5385         *rtl[i].ptr = tcg_global_mem_new(cpu_env, rtl[i].off, rtl[i].name);
5386     }
5387
5388     TCGV_UNUSED(cpu_regs[0]);
5389     for (i = 1; i < 8; ++i) {
5390         cpu_regs[i] = tcg_global_mem_new(cpu_env,
5391                                          offsetof(CPUSPARCState, gregs[i]),
5392                                          gregnames[i]);
5393     }
5394
5395     for (i = 8; i < 32; ++i) {
5396         cpu_regs[i] = tcg_global_mem_new(cpu_regwptr,
5397                                          (i - 8) * sizeof(target_ulong),
5398                                          gregnames[i]);
5399     }
5400
5401     for (i = 0; i < TARGET_DPREGS; i++) {
5402         cpu_fpr[i] = tcg_global_mem_new_i64(cpu_env,
5403                                             offsetof(CPUSPARCState, fpr[i]),
5404                                             fregnames[i]);
5405     }
5406 }
5407
5408 void restore_state_to_opc(CPUSPARCState *env, TranslationBlock *tb,
5409                           target_ulong *data)
5410 {
5411     target_ulong pc = data[0];
5412     target_ulong npc = data[1];
5413
5414     env->pc = pc;
5415     if (npc == DYNAMIC_PC) {
5416         /* dynamic NPC: already stored */
5417     } else if (npc & JUMP_PC) {
5418         /* jump PC: use 'cond' and the jump targets of the translation */
5419         if (env->cond) {
5420             env->npc = npc & ~3;
5421         } else {
5422             env->npc = pc + 4;
5423         }
5424     } else {
5425         env->npc = npc;
5426     }
5427 }
This page took 0.328188 seconds and 4 git commands to generate.