]> Git Repo - qemu.git/blob - target/sparc/translate.c
target-sparc: implement UA2005 TSB Pointers
[qemu.git] / target / sparc / translate.c
1 /*
2    SPARC translation
3
4    Copyright (C) 2003 Thomas M. Ogrisegg <[email protected]>
5    Copyright (C) 2003-2005 Fabrice Bellard
6
7    This library is free software; you can redistribute it and/or
8    modify it under the terms of the GNU Lesser General Public
9    License as published by the Free Software Foundation; either
10    version 2 of the License, or (at your option) any later version.
11
12    This library is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15    Lesser General Public License for more details.
16
17    You should have received a copy of the GNU Lesser General Public
18    License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20
21 #include "qemu/osdep.h"
22
23 #include "cpu.h"
24 #include "disas/disas.h"
25 #include "exec/helper-proto.h"
26 #include "exec/exec-all.h"
27 #include "tcg-op.h"
28 #include "exec/cpu_ldst.h"
29
30 #include "exec/helper-gen.h"
31
32 #include "trace-tcg.h"
33 #include "exec/log.h"
34 #include "asi.h"
35
36
37 #define DEBUG_DISAS
38
39 #define DYNAMIC_PC  1 /* dynamic pc value */
40 #define JUMP_PC     2 /* dynamic pc value which takes only two values
41                          according to jump_pc[T2] */
42
43 /* global register indexes */
44 static TCGv_env cpu_env;
45 static TCGv_ptr cpu_regwptr;
46 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
47 static TCGv_i32 cpu_cc_op;
48 static TCGv_i32 cpu_psr;
49 static TCGv cpu_fsr, cpu_pc, cpu_npc;
50 static TCGv cpu_regs[32];
51 static TCGv cpu_y;
52 #ifndef CONFIG_USER_ONLY
53 static TCGv cpu_tbr;
54 #endif
55 static TCGv cpu_cond;
56 #ifdef TARGET_SPARC64
57 static TCGv_i32 cpu_xcc, cpu_fprs;
58 static TCGv cpu_gsr;
59 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
60 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
61 #else
62 static TCGv cpu_wim;
63 #endif
64 /* Floating point registers */
65 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
66
67 #include "exec/gen-icount.h"
68
69 typedef struct DisasContext {
70     target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
71     target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
72     target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
73     int is_br;
74     int mem_idx;
75     bool fpu_enabled;
76     bool address_mask_32bit;
77     bool singlestep;
78 #ifndef CONFIG_USER_ONLY
79     bool supervisor;
80 #ifdef TARGET_SPARC64
81     bool hypervisor;
82 #endif
83 #endif
84
85     uint32_t cc_op;  /* current CC operation */
86     struct TranslationBlock *tb;
87     sparc_def_t *def;
88     TCGv_i32 t32[3];
89     TCGv ttl[5];
90     int n_t32;
91     int n_ttl;
92 #ifdef TARGET_SPARC64
93     int fprs_dirty;
94     int asi;
95 #endif
96 } DisasContext;
97
98 typedef struct {
99     TCGCond cond;
100     bool is_bool;
101     bool g1, g2;
102     TCGv c1, c2;
103 } DisasCompare;
104
105 // This function uses non-native bit order
106 #define GET_FIELD(X, FROM, TO)                                  \
107     ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
108
109 // This function uses the order in the manuals, i.e. bit 0 is 2^0
110 #define GET_FIELD_SP(X, FROM, TO)               \
111     GET_FIELD(X, 31 - (TO), 31 - (FROM))
112
113 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
114 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
115
116 #ifdef TARGET_SPARC64
117 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
118 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
119 #else
120 #define DFPREG(r) (r & 0x1e)
121 #define QFPREG(r) (r & 0x1c)
122 #endif
123
124 #define UA2005_HTRAP_MASK 0xff
125 #define V8_TRAP_MASK 0x7f
126
127 static int sign_extend(int x, int len)
128 {
129     len = 32 - len;
130     return (x << len) >> len;
131 }
132
133 #define IS_IMM (insn & (1<<13))
134
135 static inline TCGv_i32 get_temp_i32(DisasContext *dc)
136 {
137     TCGv_i32 t;
138     assert(dc->n_t32 < ARRAY_SIZE(dc->t32));
139     dc->t32[dc->n_t32++] = t = tcg_temp_new_i32();
140     return t;
141 }
142
143 static inline TCGv get_temp_tl(DisasContext *dc)
144 {
145     TCGv t;
146     assert(dc->n_ttl < ARRAY_SIZE(dc->ttl));
147     dc->ttl[dc->n_ttl++] = t = tcg_temp_new();
148     return t;
149 }
150
151 static inline void gen_update_fprs_dirty(DisasContext *dc, int rd)
152 {
153 #if defined(TARGET_SPARC64)
154     int bit = (rd < 32) ? 1 : 2;
155     /* If we know we've already set this bit within the TB,
156        we can avoid setting it again.  */
157     if (!(dc->fprs_dirty & bit)) {
158         dc->fprs_dirty |= bit;
159         tcg_gen_ori_i32(cpu_fprs, cpu_fprs, bit);
160     }
161 #endif
162 }
163
164 /* floating point registers moves */
165 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
166 {
167 #if TCG_TARGET_REG_BITS == 32
168     if (src & 1) {
169         return TCGV_LOW(cpu_fpr[src / 2]);
170     } else {
171         return TCGV_HIGH(cpu_fpr[src / 2]);
172     }
173 #else
174     if (src & 1) {
175         return MAKE_TCGV_I32(GET_TCGV_I64(cpu_fpr[src / 2]));
176     } else {
177         TCGv_i32 ret = get_temp_i32(dc);
178         TCGv_i64 t = tcg_temp_new_i64();
179
180         tcg_gen_shri_i64(t, cpu_fpr[src / 2], 32);
181         tcg_gen_extrl_i64_i32(ret, t);
182         tcg_temp_free_i64(t);
183
184         return ret;
185     }
186 #endif
187 }
188
189 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
190 {
191 #if TCG_TARGET_REG_BITS == 32
192     if (dst & 1) {
193         tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
194     } else {
195         tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
196     }
197 #else
198     TCGv_i64 t = MAKE_TCGV_I64(GET_TCGV_I32(v));
199     tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
200                         (dst & 1 ? 0 : 32), 32);
201 #endif
202     gen_update_fprs_dirty(dc, dst);
203 }
204
205 static TCGv_i32 gen_dest_fpr_F(DisasContext *dc)
206 {
207     return get_temp_i32(dc);
208 }
209
210 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
211 {
212     src = DFPREG(src);
213     return cpu_fpr[src / 2];
214 }
215
216 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
217 {
218     dst = DFPREG(dst);
219     tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
220     gen_update_fprs_dirty(dc, dst);
221 }
222
223 static TCGv_i64 gen_dest_fpr_D(DisasContext *dc, unsigned int dst)
224 {
225     return cpu_fpr[DFPREG(dst) / 2];
226 }
227
228 static void gen_op_load_fpr_QT0(unsigned int src)
229 {
230     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
231                    offsetof(CPU_QuadU, ll.upper));
232     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
233                    offsetof(CPU_QuadU, ll.lower));
234 }
235
236 static void gen_op_load_fpr_QT1(unsigned int src)
237 {
238     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
239                    offsetof(CPU_QuadU, ll.upper));
240     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
241                    offsetof(CPU_QuadU, ll.lower));
242 }
243
244 static void gen_op_store_QT0_fpr(unsigned int dst)
245 {
246     tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
247                    offsetof(CPU_QuadU, ll.upper));
248     tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
249                    offsetof(CPU_QuadU, ll.lower));
250 }
251
252 static void gen_store_fpr_Q(DisasContext *dc, unsigned int dst,
253                             TCGv_i64 v1, TCGv_i64 v2)
254 {
255     dst = QFPREG(dst);
256
257     tcg_gen_mov_i64(cpu_fpr[dst / 2], v1);
258     tcg_gen_mov_i64(cpu_fpr[dst / 2 + 1], v2);
259     gen_update_fprs_dirty(dc, dst);
260 }
261
262 #ifdef TARGET_SPARC64
263 static TCGv_i64 gen_load_fpr_Q0(DisasContext *dc, unsigned int src)
264 {
265     src = QFPREG(src);
266     return cpu_fpr[src / 2];
267 }
268
269 static TCGv_i64 gen_load_fpr_Q1(DisasContext *dc, unsigned int src)
270 {
271     src = QFPREG(src);
272     return cpu_fpr[src / 2 + 1];
273 }
274
275 static void gen_move_Q(DisasContext *dc, unsigned int rd, unsigned int rs)
276 {
277     rd = QFPREG(rd);
278     rs = QFPREG(rs);
279
280     tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
281     tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
282     gen_update_fprs_dirty(dc, rd);
283 }
284 #endif
285
286 /* moves */
287 #ifdef CONFIG_USER_ONLY
288 #define supervisor(dc) 0
289 #ifdef TARGET_SPARC64
290 #define hypervisor(dc) 0
291 #endif
292 #else
293 #ifdef TARGET_SPARC64
294 #define hypervisor(dc) (dc->hypervisor)
295 #define supervisor(dc) (dc->supervisor | dc->hypervisor)
296 #else
297 #define supervisor(dc) (dc->supervisor)
298 #endif
299 #endif
300
301 #ifdef TARGET_SPARC64
302 #ifndef TARGET_ABI32
303 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
304 #else
305 #define AM_CHECK(dc) (1)
306 #endif
307 #endif
308
309 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
310 {
311 #ifdef TARGET_SPARC64
312     if (AM_CHECK(dc))
313         tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
314 #endif
315 }
316
317 static inline TCGv gen_load_gpr(DisasContext *dc, int reg)
318 {
319     if (reg > 0) {
320         assert(reg < 32);
321         return cpu_regs[reg];
322     } else {
323         TCGv t = get_temp_tl(dc);
324         tcg_gen_movi_tl(t, 0);
325         return t;
326     }
327 }
328
329 static inline void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
330 {
331     if (reg > 0) {
332         assert(reg < 32);
333         tcg_gen_mov_tl(cpu_regs[reg], v);
334     }
335 }
336
337 static inline TCGv gen_dest_gpr(DisasContext *dc, int reg)
338 {
339     if (reg > 0) {
340         assert(reg < 32);
341         return cpu_regs[reg];
342     } else {
343         return get_temp_tl(dc);
344     }
345 }
346
347 static inline bool use_goto_tb(DisasContext *s, target_ulong pc,
348                                target_ulong npc)
349 {
350     if (unlikely(s->singlestep)) {
351         return false;
352     }
353
354 #ifndef CONFIG_USER_ONLY
355     return (pc & TARGET_PAGE_MASK) == (s->tb->pc & TARGET_PAGE_MASK) &&
356            (npc & TARGET_PAGE_MASK) == (s->tb->pc & TARGET_PAGE_MASK);
357 #else
358     return true;
359 #endif
360 }
361
362 static inline void gen_goto_tb(DisasContext *s, int tb_num,
363                                target_ulong pc, target_ulong npc)
364 {
365     if (use_goto_tb(s, pc, npc))  {
366         /* jump to same page: we can use a direct jump */
367         tcg_gen_goto_tb(tb_num);
368         tcg_gen_movi_tl(cpu_pc, pc);
369         tcg_gen_movi_tl(cpu_npc, npc);
370         tcg_gen_exit_tb((uintptr_t)s->tb + tb_num);
371     } else {
372         /* jump to another page: currently not optimized */
373         tcg_gen_movi_tl(cpu_pc, pc);
374         tcg_gen_movi_tl(cpu_npc, npc);
375         tcg_gen_exit_tb(0);
376     }
377 }
378
379 // XXX suboptimal
380 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
381 {
382     tcg_gen_extu_i32_tl(reg, src);
383     tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
384     tcg_gen_andi_tl(reg, reg, 0x1);
385 }
386
387 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
388 {
389     tcg_gen_extu_i32_tl(reg, src);
390     tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
391     tcg_gen_andi_tl(reg, reg, 0x1);
392 }
393
394 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
395 {
396     tcg_gen_extu_i32_tl(reg, src);
397     tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
398     tcg_gen_andi_tl(reg, reg, 0x1);
399 }
400
401 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
402 {
403     tcg_gen_extu_i32_tl(reg, src);
404     tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
405     tcg_gen_andi_tl(reg, reg, 0x1);
406 }
407
408 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
409 {
410     tcg_gen_mov_tl(cpu_cc_src, src1);
411     tcg_gen_mov_tl(cpu_cc_src2, src2);
412     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
413     tcg_gen_mov_tl(dst, cpu_cc_dst);
414 }
415
416 static TCGv_i32 gen_add32_carry32(void)
417 {
418     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
419
420     /* Carry is computed from a previous add: (dst < src)  */
421 #if TARGET_LONG_BITS == 64
422     cc_src1_32 = tcg_temp_new_i32();
423     cc_src2_32 = tcg_temp_new_i32();
424     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_dst);
425     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src);
426 #else
427     cc_src1_32 = cpu_cc_dst;
428     cc_src2_32 = cpu_cc_src;
429 #endif
430
431     carry_32 = tcg_temp_new_i32();
432     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
433
434 #if TARGET_LONG_BITS == 64
435     tcg_temp_free_i32(cc_src1_32);
436     tcg_temp_free_i32(cc_src2_32);
437 #endif
438
439     return carry_32;
440 }
441
442 static TCGv_i32 gen_sub32_carry32(void)
443 {
444     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
445
446     /* Carry is computed from a previous borrow: (src1 < src2)  */
447 #if TARGET_LONG_BITS == 64
448     cc_src1_32 = tcg_temp_new_i32();
449     cc_src2_32 = tcg_temp_new_i32();
450     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_src);
451     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src2);
452 #else
453     cc_src1_32 = cpu_cc_src;
454     cc_src2_32 = cpu_cc_src2;
455 #endif
456
457     carry_32 = tcg_temp_new_i32();
458     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
459
460 #if TARGET_LONG_BITS == 64
461     tcg_temp_free_i32(cc_src1_32);
462     tcg_temp_free_i32(cc_src2_32);
463 #endif
464
465     return carry_32;
466 }
467
468 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
469                             TCGv src2, int update_cc)
470 {
471     TCGv_i32 carry_32;
472     TCGv carry;
473
474     switch (dc->cc_op) {
475     case CC_OP_DIV:
476     case CC_OP_LOGIC:
477         /* Carry is known to be zero.  Fall back to plain ADD.  */
478         if (update_cc) {
479             gen_op_add_cc(dst, src1, src2);
480         } else {
481             tcg_gen_add_tl(dst, src1, src2);
482         }
483         return;
484
485     case CC_OP_ADD:
486     case CC_OP_TADD:
487     case CC_OP_TADDTV:
488         if (TARGET_LONG_BITS == 32) {
489             /* We can re-use the host's hardware carry generation by using
490                an ADD2 opcode.  We discard the low part of the output.
491                Ideally we'd combine this operation with the add that
492                generated the carry in the first place.  */
493             carry = tcg_temp_new();
494             tcg_gen_add2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
495             tcg_temp_free(carry);
496             goto add_done;
497         }
498         carry_32 = gen_add32_carry32();
499         break;
500
501     case CC_OP_SUB:
502     case CC_OP_TSUB:
503     case CC_OP_TSUBTV:
504         carry_32 = gen_sub32_carry32();
505         break;
506
507     default:
508         /* We need external help to produce the carry.  */
509         carry_32 = tcg_temp_new_i32();
510         gen_helper_compute_C_icc(carry_32, cpu_env);
511         break;
512     }
513
514 #if TARGET_LONG_BITS == 64
515     carry = tcg_temp_new();
516     tcg_gen_extu_i32_i64(carry, carry_32);
517 #else
518     carry = carry_32;
519 #endif
520
521     tcg_gen_add_tl(dst, src1, src2);
522     tcg_gen_add_tl(dst, dst, carry);
523
524     tcg_temp_free_i32(carry_32);
525 #if TARGET_LONG_BITS == 64
526     tcg_temp_free(carry);
527 #endif
528
529  add_done:
530     if (update_cc) {
531         tcg_gen_mov_tl(cpu_cc_src, src1);
532         tcg_gen_mov_tl(cpu_cc_src2, src2);
533         tcg_gen_mov_tl(cpu_cc_dst, dst);
534         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
535         dc->cc_op = CC_OP_ADDX;
536     }
537 }
538
539 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
540 {
541     tcg_gen_mov_tl(cpu_cc_src, src1);
542     tcg_gen_mov_tl(cpu_cc_src2, src2);
543     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
544     tcg_gen_mov_tl(dst, cpu_cc_dst);
545 }
546
547 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
548                             TCGv src2, int update_cc)
549 {
550     TCGv_i32 carry_32;
551     TCGv carry;
552
553     switch (dc->cc_op) {
554     case CC_OP_DIV:
555     case CC_OP_LOGIC:
556         /* Carry is known to be zero.  Fall back to plain SUB.  */
557         if (update_cc) {
558             gen_op_sub_cc(dst, src1, src2);
559         } else {
560             tcg_gen_sub_tl(dst, src1, src2);
561         }
562         return;
563
564     case CC_OP_ADD:
565     case CC_OP_TADD:
566     case CC_OP_TADDTV:
567         carry_32 = gen_add32_carry32();
568         break;
569
570     case CC_OP_SUB:
571     case CC_OP_TSUB:
572     case CC_OP_TSUBTV:
573         if (TARGET_LONG_BITS == 32) {
574             /* We can re-use the host's hardware carry generation by using
575                a SUB2 opcode.  We discard the low part of the output.
576                Ideally we'd combine this operation with the add that
577                generated the carry in the first place.  */
578             carry = tcg_temp_new();
579             tcg_gen_sub2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
580             tcg_temp_free(carry);
581             goto sub_done;
582         }
583         carry_32 = gen_sub32_carry32();
584         break;
585
586     default:
587         /* We need external help to produce the carry.  */
588         carry_32 = tcg_temp_new_i32();
589         gen_helper_compute_C_icc(carry_32, cpu_env);
590         break;
591     }
592
593 #if TARGET_LONG_BITS == 64
594     carry = tcg_temp_new();
595     tcg_gen_extu_i32_i64(carry, carry_32);
596 #else
597     carry = carry_32;
598 #endif
599
600     tcg_gen_sub_tl(dst, src1, src2);
601     tcg_gen_sub_tl(dst, dst, carry);
602
603     tcg_temp_free_i32(carry_32);
604 #if TARGET_LONG_BITS == 64
605     tcg_temp_free(carry);
606 #endif
607
608  sub_done:
609     if (update_cc) {
610         tcg_gen_mov_tl(cpu_cc_src, src1);
611         tcg_gen_mov_tl(cpu_cc_src2, src2);
612         tcg_gen_mov_tl(cpu_cc_dst, dst);
613         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
614         dc->cc_op = CC_OP_SUBX;
615     }
616 }
617
618 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
619 {
620     TCGv r_temp, zero, t0;
621
622     r_temp = tcg_temp_new();
623     t0 = tcg_temp_new();
624
625     /* old op:
626     if (!(env->y & 1))
627         T1 = 0;
628     */
629     zero = tcg_const_tl(0);
630     tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
631     tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
632     tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
633     tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
634                        zero, cpu_cc_src2);
635     tcg_temp_free(zero);
636
637     // b2 = T0 & 1;
638     // env->y = (b2 << 31) | (env->y >> 1);
639     tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
640     tcg_gen_shli_tl(r_temp, r_temp, 31);
641     tcg_gen_shri_tl(t0, cpu_y, 1);
642     tcg_gen_andi_tl(t0, t0, 0x7fffffff);
643     tcg_gen_or_tl(t0, t0, r_temp);
644     tcg_gen_andi_tl(cpu_y, t0, 0xffffffff);
645
646     // b1 = N ^ V;
647     gen_mov_reg_N(t0, cpu_psr);
648     gen_mov_reg_V(r_temp, cpu_psr);
649     tcg_gen_xor_tl(t0, t0, r_temp);
650     tcg_temp_free(r_temp);
651
652     // T0 = (b1 << 31) | (T0 >> 1);
653     // src1 = T0;
654     tcg_gen_shli_tl(t0, t0, 31);
655     tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
656     tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
657     tcg_temp_free(t0);
658
659     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
660
661     tcg_gen_mov_tl(dst, cpu_cc_dst);
662 }
663
664 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
665 {
666 #if TARGET_LONG_BITS == 32
667     if (sign_ext) {
668         tcg_gen_muls2_tl(dst, cpu_y, src1, src2);
669     } else {
670         tcg_gen_mulu2_tl(dst, cpu_y, src1, src2);
671     }
672 #else
673     TCGv t0 = tcg_temp_new_i64();
674     TCGv t1 = tcg_temp_new_i64();
675
676     if (sign_ext) {
677         tcg_gen_ext32s_i64(t0, src1);
678         tcg_gen_ext32s_i64(t1, src2);
679     } else {
680         tcg_gen_ext32u_i64(t0, src1);
681         tcg_gen_ext32u_i64(t1, src2);
682     }
683
684     tcg_gen_mul_i64(dst, t0, t1);
685     tcg_temp_free(t0);
686     tcg_temp_free(t1);
687
688     tcg_gen_shri_i64(cpu_y, dst, 32);
689 #endif
690 }
691
692 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
693 {
694     /* zero-extend truncated operands before multiplication */
695     gen_op_multiply(dst, src1, src2, 0);
696 }
697
698 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
699 {
700     /* sign-extend truncated operands before multiplication */
701     gen_op_multiply(dst, src1, src2, 1);
702 }
703
704 // 1
705 static inline void gen_op_eval_ba(TCGv dst)
706 {
707     tcg_gen_movi_tl(dst, 1);
708 }
709
710 // Z
711 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
712 {
713     gen_mov_reg_Z(dst, src);
714 }
715
716 // Z | (N ^ V)
717 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
718 {
719     TCGv t0 = tcg_temp_new();
720     gen_mov_reg_N(t0, src);
721     gen_mov_reg_V(dst, src);
722     tcg_gen_xor_tl(dst, dst, t0);
723     gen_mov_reg_Z(t0, src);
724     tcg_gen_or_tl(dst, dst, t0);
725     tcg_temp_free(t0);
726 }
727
728 // N ^ V
729 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
730 {
731     TCGv t0 = tcg_temp_new();
732     gen_mov_reg_V(t0, src);
733     gen_mov_reg_N(dst, src);
734     tcg_gen_xor_tl(dst, dst, t0);
735     tcg_temp_free(t0);
736 }
737
738 // C | Z
739 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
740 {
741     TCGv t0 = tcg_temp_new();
742     gen_mov_reg_Z(t0, src);
743     gen_mov_reg_C(dst, src);
744     tcg_gen_or_tl(dst, dst, t0);
745     tcg_temp_free(t0);
746 }
747
748 // C
749 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
750 {
751     gen_mov_reg_C(dst, src);
752 }
753
754 // V
755 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
756 {
757     gen_mov_reg_V(dst, src);
758 }
759
760 // 0
761 static inline void gen_op_eval_bn(TCGv dst)
762 {
763     tcg_gen_movi_tl(dst, 0);
764 }
765
766 // N
767 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
768 {
769     gen_mov_reg_N(dst, src);
770 }
771
772 // !Z
773 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
774 {
775     gen_mov_reg_Z(dst, src);
776     tcg_gen_xori_tl(dst, dst, 0x1);
777 }
778
779 // !(Z | (N ^ V))
780 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
781 {
782     gen_op_eval_ble(dst, src);
783     tcg_gen_xori_tl(dst, dst, 0x1);
784 }
785
786 // !(N ^ V)
787 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
788 {
789     gen_op_eval_bl(dst, src);
790     tcg_gen_xori_tl(dst, dst, 0x1);
791 }
792
793 // !(C | Z)
794 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
795 {
796     gen_op_eval_bleu(dst, src);
797     tcg_gen_xori_tl(dst, dst, 0x1);
798 }
799
800 // !C
801 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
802 {
803     gen_mov_reg_C(dst, src);
804     tcg_gen_xori_tl(dst, dst, 0x1);
805 }
806
807 // !N
808 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
809 {
810     gen_mov_reg_N(dst, src);
811     tcg_gen_xori_tl(dst, dst, 0x1);
812 }
813
814 // !V
815 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
816 {
817     gen_mov_reg_V(dst, src);
818     tcg_gen_xori_tl(dst, dst, 0x1);
819 }
820
821 /*
822   FPSR bit field FCC1 | FCC0:
823    0 =
824    1 <
825    2 >
826    3 unordered
827 */
828 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
829                                     unsigned int fcc_offset)
830 {
831     tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
832     tcg_gen_andi_tl(reg, reg, 0x1);
833 }
834
835 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
836                                     unsigned int fcc_offset)
837 {
838     tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
839     tcg_gen_andi_tl(reg, reg, 0x1);
840 }
841
842 // !0: FCC0 | FCC1
843 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
844                                     unsigned int fcc_offset)
845 {
846     TCGv t0 = tcg_temp_new();
847     gen_mov_reg_FCC0(dst, src, fcc_offset);
848     gen_mov_reg_FCC1(t0, src, fcc_offset);
849     tcg_gen_or_tl(dst, dst, t0);
850     tcg_temp_free(t0);
851 }
852
853 // 1 or 2: FCC0 ^ FCC1
854 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
855                                     unsigned int fcc_offset)
856 {
857     TCGv t0 = tcg_temp_new();
858     gen_mov_reg_FCC0(dst, src, fcc_offset);
859     gen_mov_reg_FCC1(t0, src, fcc_offset);
860     tcg_gen_xor_tl(dst, dst, t0);
861     tcg_temp_free(t0);
862 }
863
864 // 1 or 3: FCC0
865 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
866                                     unsigned int fcc_offset)
867 {
868     gen_mov_reg_FCC0(dst, src, fcc_offset);
869 }
870
871 // 1: FCC0 & !FCC1
872 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
873                                     unsigned int fcc_offset)
874 {
875     TCGv t0 = tcg_temp_new();
876     gen_mov_reg_FCC0(dst, src, fcc_offset);
877     gen_mov_reg_FCC1(t0, src, fcc_offset);
878     tcg_gen_andc_tl(dst, dst, t0);
879     tcg_temp_free(t0);
880 }
881
882 // 2 or 3: FCC1
883 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
884                                     unsigned int fcc_offset)
885 {
886     gen_mov_reg_FCC1(dst, src, fcc_offset);
887 }
888
889 // 2: !FCC0 & FCC1
890 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
891                                     unsigned int fcc_offset)
892 {
893     TCGv t0 = tcg_temp_new();
894     gen_mov_reg_FCC0(dst, src, fcc_offset);
895     gen_mov_reg_FCC1(t0, src, fcc_offset);
896     tcg_gen_andc_tl(dst, t0, dst);
897     tcg_temp_free(t0);
898 }
899
900 // 3: FCC0 & FCC1
901 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
902                                     unsigned int fcc_offset)
903 {
904     TCGv t0 = tcg_temp_new();
905     gen_mov_reg_FCC0(dst, src, fcc_offset);
906     gen_mov_reg_FCC1(t0, src, fcc_offset);
907     tcg_gen_and_tl(dst, dst, t0);
908     tcg_temp_free(t0);
909 }
910
911 // 0: !(FCC0 | FCC1)
912 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
913                                     unsigned int fcc_offset)
914 {
915     TCGv t0 = tcg_temp_new();
916     gen_mov_reg_FCC0(dst, src, fcc_offset);
917     gen_mov_reg_FCC1(t0, src, fcc_offset);
918     tcg_gen_or_tl(dst, dst, t0);
919     tcg_gen_xori_tl(dst, dst, 0x1);
920     tcg_temp_free(t0);
921 }
922
923 // 0 or 3: !(FCC0 ^ FCC1)
924 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
925                                     unsigned int fcc_offset)
926 {
927     TCGv t0 = tcg_temp_new();
928     gen_mov_reg_FCC0(dst, src, fcc_offset);
929     gen_mov_reg_FCC1(t0, src, fcc_offset);
930     tcg_gen_xor_tl(dst, dst, t0);
931     tcg_gen_xori_tl(dst, dst, 0x1);
932     tcg_temp_free(t0);
933 }
934
935 // 0 or 2: !FCC0
936 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
937                                     unsigned int fcc_offset)
938 {
939     gen_mov_reg_FCC0(dst, src, fcc_offset);
940     tcg_gen_xori_tl(dst, dst, 0x1);
941 }
942
943 // !1: !(FCC0 & !FCC1)
944 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
945                                     unsigned int fcc_offset)
946 {
947     TCGv t0 = tcg_temp_new();
948     gen_mov_reg_FCC0(dst, src, fcc_offset);
949     gen_mov_reg_FCC1(t0, src, fcc_offset);
950     tcg_gen_andc_tl(dst, dst, t0);
951     tcg_gen_xori_tl(dst, dst, 0x1);
952     tcg_temp_free(t0);
953 }
954
955 // 0 or 1: !FCC1
956 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
957                                     unsigned int fcc_offset)
958 {
959     gen_mov_reg_FCC1(dst, src, fcc_offset);
960     tcg_gen_xori_tl(dst, dst, 0x1);
961 }
962
963 // !2: !(!FCC0 & FCC1)
964 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
965                                     unsigned int fcc_offset)
966 {
967     TCGv t0 = tcg_temp_new();
968     gen_mov_reg_FCC0(dst, src, fcc_offset);
969     gen_mov_reg_FCC1(t0, src, fcc_offset);
970     tcg_gen_andc_tl(dst, t0, dst);
971     tcg_gen_xori_tl(dst, dst, 0x1);
972     tcg_temp_free(t0);
973 }
974
975 // !3: !(FCC0 & FCC1)
976 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
977                                     unsigned int fcc_offset)
978 {
979     TCGv t0 = tcg_temp_new();
980     gen_mov_reg_FCC0(dst, src, fcc_offset);
981     gen_mov_reg_FCC1(t0, src, fcc_offset);
982     tcg_gen_and_tl(dst, dst, t0);
983     tcg_gen_xori_tl(dst, dst, 0x1);
984     tcg_temp_free(t0);
985 }
986
987 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
988                                target_ulong pc2, TCGv r_cond)
989 {
990     TCGLabel *l1 = gen_new_label();
991
992     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
993
994     gen_goto_tb(dc, 0, pc1, pc1 + 4);
995
996     gen_set_label(l1);
997     gen_goto_tb(dc, 1, pc2, pc2 + 4);
998 }
999
1000 static void gen_branch_a(DisasContext *dc, target_ulong pc1)
1001 {
1002     TCGLabel *l1 = gen_new_label();
1003     target_ulong npc = dc->npc;
1004
1005     tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cond, 0, l1);
1006
1007     gen_goto_tb(dc, 0, npc, pc1);
1008
1009     gen_set_label(l1);
1010     gen_goto_tb(dc, 1, npc + 4, npc + 8);
1011
1012     dc->is_br = 1;
1013 }
1014
1015 static void gen_branch_n(DisasContext *dc, target_ulong pc1)
1016 {
1017     target_ulong npc = dc->npc;
1018
1019     if (likely(npc != DYNAMIC_PC)) {
1020         dc->pc = npc;
1021         dc->jump_pc[0] = pc1;
1022         dc->jump_pc[1] = npc + 4;
1023         dc->npc = JUMP_PC;
1024     } else {
1025         TCGv t, z;
1026
1027         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1028
1029         tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1030         t = tcg_const_tl(pc1);
1031         z = tcg_const_tl(0);
1032         tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, z, t, cpu_npc);
1033         tcg_temp_free(t);
1034         tcg_temp_free(z);
1035
1036         dc->pc = DYNAMIC_PC;
1037     }
1038 }
1039
1040 static inline void gen_generic_branch(DisasContext *dc)
1041 {
1042     TCGv npc0 = tcg_const_tl(dc->jump_pc[0]);
1043     TCGv npc1 = tcg_const_tl(dc->jump_pc[1]);
1044     TCGv zero = tcg_const_tl(0);
1045
1046     tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
1047
1048     tcg_temp_free(npc0);
1049     tcg_temp_free(npc1);
1050     tcg_temp_free(zero);
1051 }
1052
1053 /* call this function before using the condition register as it may
1054    have been set for a jump */
1055 static inline void flush_cond(DisasContext *dc)
1056 {
1057     if (dc->npc == JUMP_PC) {
1058         gen_generic_branch(dc);
1059         dc->npc = DYNAMIC_PC;
1060     }
1061 }
1062
1063 static inline void save_npc(DisasContext *dc)
1064 {
1065     if (dc->npc == JUMP_PC) {
1066         gen_generic_branch(dc);
1067         dc->npc = DYNAMIC_PC;
1068     } else if (dc->npc != DYNAMIC_PC) {
1069         tcg_gen_movi_tl(cpu_npc, dc->npc);
1070     }
1071 }
1072
1073 static inline void update_psr(DisasContext *dc)
1074 {
1075     if (dc->cc_op != CC_OP_FLAGS) {
1076         dc->cc_op = CC_OP_FLAGS;
1077         gen_helper_compute_psr(cpu_env);
1078     }
1079 }
1080
1081 static inline void save_state(DisasContext *dc)
1082 {
1083     tcg_gen_movi_tl(cpu_pc, dc->pc);
1084     save_npc(dc);
1085 }
1086
1087 static void gen_exception(DisasContext *dc, int which)
1088 {
1089     TCGv_i32 t;
1090
1091     save_state(dc);
1092     t = tcg_const_i32(which);
1093     gen_helper_raise_exception(cpu_env, t);
1094     tcg_temp_free_i32(t);
1095     dc->is_br = 1;
1096 }
1097
1098 static void gen_check_align(TCGv addr, int mask)
1099 {
1100     TCGv_i32 r_mask = tcg_const_i32(mask);
1101     gen_helper_check_align(cpu_env, addr, r_mask);
1102     tcg_temp_free_i32(r_mask);
1103 }
1104
1105 static inline void gen_mov_pc_npc(DisasContext *dc)
1106 {
1107     if (dc->npc == JUMP_PC) {
1108         gen_generic_branch(dc);
1109         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1110         dc->pc = DYNAMIC_PC;
1111     } else if (dc->npc == DYNAMIC_PC) {
1112         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1113         dc->pc = DYNAMIC_PC;
1114     } else {
1115         dc->pc = dc->npc;
1116     }
1117 }
1118
1119 static inline void gen_op_next_insn(void)
1120 {
1121     tcg_gen_mov_tl(cpu_pc, cpu_npc);
1122     tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1123 }
1124
1125 static void free_compare(DisasCompare *cmp)
1126 {
1127     if (!cmp->g1) {
1128         tcg_temp_free(cmp->c1);
1129     }
1130     if (!cmp->g2) {
1131         tcg_temp_free(cmp->c2);
1132     }
1133 }
1134
1135 static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1136                         DisasContext *dc)
1137 {
1138     static int subcc_cond[16] = {
1139         TCG_COND_NEVER,
1140         TCG_COND_EQ,
1141         TCG_COND_LE,
1142         TCG_COND_LT,
1143         TCG_COND_LEU,
1144         TCG_COND_LTU,
1145         -1, /* neg */
1146         -1, /* overflow */
1147         TCG_COND_ALWAYS,
1148         TCG_COND_NE,
1149         TCG_COND_GT,
1150         TCG_COND_GE,
1151         TCG_COND_GTU,
1152         TCG_COND_GEU,
1153         -1, /* pos */
1154         -1, /* no overflow */
1155     };
1156
1157     static int logic_cond[16] = {
1158         TCG_COND_NEVER,
1159         TCG_COND_EQ,     /* eq:  Z */
1160         TCG_COND_LE,     /* le:  Z | (N ^ V) -> Z | N */
1161         TCG_COND_LT,     /* lt:  N ^ V -> N */
1162         TCG_COND_EQ,     /* leu: C | Z -> Z */
1163         TCG_COND_NEVER,  /* ltu: C -> 0 */
1164         TCG_COND_LT,     /* neg: N */
1165         TCG_COND_NEVER,  /* vs:  V -> 0 */
1166         TCG_COND_ALWAYS,
1167         TCG_COND_NE,     /* ne:  !Z */
1168         TCG_COND_GT,     /* gt:  !(Z | (N ^ V)) -> !(Z | N) */
1169         TCG_COND_GE,     /* ge:  !(N ^ V) -> !N */
1170         TCG_COND_NE,     /* gtu: !(C | Z) -> !Z */
1171         TCG_COND_ALWAYS, /* geu: !C -> 1 */
1172         TCG_COND_GE,     /* pos: !N */
1173         TCG_COND_ALWAYS, /* vc:  !V -> 1 */
1174     };
1175
1176     TCGv_i32 r_src;
1177     TCGv r_dst;
1178
1179 #ifdef TARGET_SPARC64
1180     if (xcc) {
1181         r_src = cpu_xcc;
1182     } else {
1183         r_src = cpu_psr;
1184     }
1185 #else
1186     r_src = cpu_psr;
1187 #endif
1188
1189     switch (dc->cc_op) {
1190     case CC_OP_LOGIC:
1191         cmp->cond = logic_cond[cond];
1192     do_compare_dst_0:
1193         cmp->is_bool = false;
1194         cmp->g2 = false;
1195         cmp->c2 = tcg_const_tl(0);
1196 #ifdef TARGET_SPARC64
1197         if (!xcc) {
1198             cmp->g1 = false;
1199             cmp->c1 = tcg_temp_new();
1200             tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1201             break;
1202         }
1203 #endif
1204         cmp->g1 = true;
1205         cmp->c1 = cpu_cc_dst;
1206         break;
1207
1208     case CC_OP_SUB:
1209         switch (cond) {
1210         case 6:  /* neg */
1211         case 14: /* pos */
1212             cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1213             goto do_compare_dst_0;
1214
1215         case 7: /* overflow */
1216         case 15: /* !overflow */
1217             goto do_dynamic;
1218
1219         default:
1220             cmp->cond = subcc_cond[cond];
1221             cmp->is_bool = false;
1222 #ifdef TARGET_SPARC64
1223             if (!xcc) {
1224                 /* Note that sign-extension works for unsigned compares as
1225                    long as both operands are sign-extended.  */
1226                 cmp->g1 = cmp->g2 = false;
1227                 cmp->c1 = tcg_temp_new();
1228                 cmp->c2 = tcg_temp_new();
1229                 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1230                 tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1231                 break;
1232             }
1233 #endif
1234             cmp->g1 = cmp->g2 = true;
1235             cmp->c1 = cpu_cc_src;
1236             cmp->c2 = cpu_cc_src2;
1237             break;
1238         }
1239         break;
1240
1241     default:
1242     do_dynamic:
1243         gen_helper_compute_psr(cpu_env);
1244         dc->cc_op = CC_OP_FLAGS;
1245         /* FALLTHRU */
1246
1247     case CC_OP_FLAGS:
1248         /* We're going to generate a boolean result.  */
1249         cmp->cond = TCG_COND_NE;
1250         cmp->is_bool = true;
1251         cmp->g1 = cmp->g2 = false;
1252         cmp->c1 = r_dst = tcg_temp_new();
1253         cmp->c2 = tcg_const_tl(0);
1254
1255         switch (cond) {
1256         case 0x0:
1257             gen_op_eval_bn(r_dst);
1258             break;
1259         case 0x1:
1260             gen_op_eval_be(r_dst, r_src);
1261             break;
1262         case 0x2:
1263             gen_op_eval_ble(r_dst, r_src);
1264             break;
1265         case 0x3:
1266             gen_op_eval_bl(r_dst, r_src);
1267             break;
1268         case 0x4:
1269             gen_op_eval_bleu(r_dst, r_src);
1270             break;
1271         case 0x5:
1272             gen_op_eval_bcs(r_dst, r_src);
1273             break;
1274         case 0x6:
1275             gen_op_eval_bneg(r_dst, r_src);
1276             break;
1277         case 0x7:
1278             gen_op_eval_bvs(r_dst, r_src);
1279             break;
1280         case 0x8:
1281             gen_op_eval_ba(r_dst);
1282             break;
1283         case 0x9:
1284             gen_op_eval_bne(r_dst, r_src);
1285             break;
1286         case 0xa:
1287             gen_op_eval_bg(r_dst, r_src);
1288             break;
1289         case 0xb:
1290             gen_op_eval_bge(r_dst, r_src);
1291             break;
1292         case 0xc:
1293             gen_op_eval_bgu(r_dst, r_src);
1294             break;
1295         case 0xd:
1296             gen_op_eval_bcc(r_dst, r_src);
1297             break;
1298         case 0xe:
1299             gen_op_eval_bpos(r_dst, r_src);
1300             break;
1301         case 0xf:
1302             gen_op_eval_bvc(r_dst, r_src);
1303             break;
1304         }
1305         break;
1306     }
1307 }
1308
1309 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1310 {
1311     unsigned int offset;
1312     TCGv r_dst;
1313
1314     /* For now we still generate a straight boolean result.  */
1315     cmp->cond = TCG_COND_NE;
1316     cmp->is_bool = true;
1317     cmp->g1 = cmp->g2 = false;
1318     cmp->c1 = r_dst = tcg_temp_new();
1319     cmp->c2 = tcg_const_tl(0);
1320
1321     switch (cc) {
1322     default:
1323     case 0x0:
1324         offset = 0;
1325         break;
1326     case 0x1:
1327         offset = 32 - 10;
1328         break;
1329     case 0x2:
1330         offset = 34 - 10;
1331         break;
1332     case 0x3:
1333         offset = 36 - 10;
1334         break;
1335     }
1336
1337     switch (cond) {
1338     case 0x0:
1339         gen_op_eval_bn(r_dst);
1340         break;
1341     case 0x1:
1342         gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1343         break;
1344     case 0x2:
1345         gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1346         break;
1347     case 0x3:
1348         gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1349         break;
1350     case 0x4:
1351         gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1352         break;
1353     case 0x5:
1354         gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1355         break;
1356     case 0x6:
1357         gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1358         break;
1359     case 0x7:
1360         gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1361         break;
1362     case 0x8:
1363         gen_op_eval_ba(r_dst);
1364         break;
1365     case 0x9:
1366         gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1367         break;
1368     case 0xa:
1369         gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1370         break;
1371     case 0xb:
1372         gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1373         break;
1374     case 0xc:
1375         gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1376         break;
1377     case 0xd:
1378         gen_op_eval_fble(r_dst, cpu_fsr, offset);
1379         break;
1380     case 0xe:
1381         gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1382         break;
1383     case 0xf:
1384         gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1385         break;
1386     }
1387 }
1388
1389 static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1390                      DisasContext *dc)
1391 {
1392     DisasCompare cmp;
1393     gen_compare(&cmp, cc, cond, dc);
1394
1395     /* The interface is to return a boolean in r_dst.  */
1396     if (cmp.is_bool) {
1397         tcg_gen_mov_tl(r_dst, cmp.c1);
1398     } else {
1399         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1400     }
1401
1402     free_compare(&cmp);
1403 }
1404
1405 static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1406 {
1407     DisasCompare cmp;
1408     gen_fcompare(&cmp, cc, cond);
1409
1410     /* The interface is to return a boolean in r_dst.  */
1411     if (cmp.is_bool) {
1412         tcg_gen_mov_tl(r_dst, cmp.c1);
1413     } else {
1414         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1415     }
1416
1417     free_compare(&cmp);
1418 }
1419
1420 #ifdef TARGET_SPARC64
1421 // Inverted logic
1422 static const int gen_tcg_cond_reg[8] = {
1423     -1,
1424     TCG_COND_NE,
1425     TCG_COND_GT,
1426     TCG_COND_GE,
1427     -1,
1428     TCG_COND_EQ,
1429     TCG_COND_LE,
1430     TCG_COND_LT,
1431 };
1432
1433 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1434 {
1435     cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1436     cmp->is_bool = false;
1437     cmp->g1 = true;
1438     cmp->g2 = false;
1439     cmp->c1 = r_src;
1440     cmp->c2 = tcg_const_tl(0);
1441 }
1442
1443 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1444 {
1445     DisasCompare cmp;
1446     gen_compare_reg(&cmp, cond, r_src);
1447
1448     /* The interface is to return a boolean in r_dst.  */
1449     tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1450
1451     free_compare(&cmp);
1452 }
1453 #endif
1454
1455 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1456 {
1457     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1458     target_ulong target = dc->pc + offset;
1459
1460 #ifdef TARGET_SPARC64
1461     if (unlikely(AM_CHECK(dc))) {
1462         target &= 0xffffffffULL;
1463     }
1464 #endif
1465     if (cond == 0x0) {
1466         /* unconditional not taken */
1467         if (a) {
1468             dc->pc = dc->npc + 4;
1469             dc->npc = dc->pc + 4;
1470         } else {
1471             dc->pc = dc->npc;
1472             dc->npc = dc->pc + 4;
1473         }
1474     } else if (cond == 0x8) {
1475         /* unconditional taken */
1476         if (a) {
1477             dc->pc = target;
1478             dc->npc = dc->pc + 4;
1479         } else {
1480             dc->pc = dc->npc;
1481             dc->npc = target;
1482             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1483         }
1484     } else {
1485         flush_cond(dc);
1486         gen_cond(cpu_cond, cc, cond, dc);
1487         if (a) {
1488             gen_branch_a(dc, target);
1489         } else {
1490             gen_branch_n(dc, target);
1491         }
1492     }
1493 }
1494
1495 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1496 {
1497     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1498     target_ulong target = dc->pc + offset;
1499
1500 #ifdef TARGET_SPARC64
1501     if (unlikely(AM_CHECK(dc))) {
1502         target &= 0xffffffffULL;
1503     }
1504 #endif
1505     if (cond == 0x0) {
1506         /* unconditional not taken */
1507         if (a) {
1508             dc->pc = dc->npc + 4;
1509             dc->npc = dc->pc + 4;
1510         } else {
1511             dc->pc = dc->npc;
1512             dc->npc = dc->pc + 4;
1513         }
1514     } else if (cond == 0x8) {
1515         /* unconditional taken */
1516         if (a) {
1517             dc->pc = target;
1518             dc->npc = dc->pc + 4;
1519         } else {
1520             dc->pc = dc->npc;
1521             dc->npc = target;
1522             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1523         }
1524     } else {
1525         flush_cond(dc);
1526         gen_fcond(cpu_cond, cc, cond);
1527         if (a) {
1528             gen_branch_a(dc, target);
1529         } else {
1530             gen_branch_n(dc, target);
1531         }
1532     }
1533 }
1534
1535 #ifdef TARGET_SPARC64
1536 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1537                           TCGv r_reg)
1538 {
1539     unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1540     target_ulong target = dc->pc + offset;
1541
1542     if (unlikely(AM_CHECK(dc))) {
1543         target &= 0xffffffffULL;
1544     }
1545     flush_cond(dc);
1546     gen_cond_reg(cpu_cond, cond, r_reg);
1547     if (a) {
1548         gen_branch_a(dc, target);
1549     } else {
1550         gen_branch_n(dc, target);
1551     }
1552 }
1553
1554 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1555 {
1556     switch (fccno) {
1557     case 0:
1558         gen_helper_fcmps(cpu_fsr, cpu_env, r_rs1, r_rs2);
1559         break;
1560     case 1:
1561         gen_helper_fcmps_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1562         break;
1563     case 2:
1564         gen_helper_fcmps_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1565         break;
1566     case 3:
1567         gen_helper_fcmps_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1568         break;
1569     }
1570 }
1571
1572 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1573 {
1574     switch (fccno) {
1575     case 0:
1576         gen_helper_fcmpd(cpu_fsr, cpu_env, r_rs1, r_rs2);
1577         break;
1578     case 1:
1579         gen_helper_fcmpd_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1580         break;
1581     case 2:
1582         gen_helper_fcmpd_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1583         break;
1584     case 3:
1585         gen_helper_fcmpd_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1586         break;
1587     }
1588 }
1589
1590 static inline void gen_op_fcmpq(int fccno)
1591 {
1592     switch (fccno) {
1593     case 0:
1594         gen_helper_fcmpq(cpu_fsr, cpu_env);
1595         break;
1596     case 1:
1597         gen_helper_fcmpq_fcc1(cpu_fsr, cpu_env);
1598         break;
1599     case 2:
1600         gen_helper_fcmpq_fcc2(cpu_fsr, cpu_env);
1601         break;
1602     case 3:
1603         gen_helper_fcmpq_fcc3(cpu_fsr, cpu_env);
1604         break;
1605     }
1606 }
1607
1608 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1609 {
1610     switch (fccno) {
1611     case 0:
1612         gen_helper_fcmpes(cpu_fsr, cpu_env, r_rs1, r_rs2);
1613         break;
1614     case 1:
1615         gen_helper_fcmpes_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1616         break;
1617     case 2:
1618         gen_helper_fcmpes_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1619         break;
1620     case 3:
1621         gen_helper_fcmpes_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1622         break;
1623     }
1624 }
1625
1626 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1627 {
1628     switch (fccno) {
1629     case 0:
1630         gen_helper_fcmped(cpu_fsr, cpu_env, r_rs1, r_rs2);
1631         break;
1632     case 1:
1633         gen_helper_fcmped_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1634         break;
1635     case 2:
1636         gen_helper_fcmped_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1637         break;
1638     case 3:
1639         gen_helper_fcmped_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1640         break;
1641     }
1642 }
1643
1644 static inline void gen_op_fcmpeq(int fccno)
1645 {
1646     switch (fccno) {
1647     case 0:
1648         gen_helper_fcmpeq(cpu_fsr, cpu_env);
1649         break;
1650     case 1:
1651         gen_helper_fcmpeq_fcc1(cpu_fsr, cpu_env);
1652         break;
1653     case 2:
1654         gen_helper_fcmpeq_fcc2(cpu_fsr, cpu_env);
1655         break;
1656     case 3:
1657         gen_helper_fcmpeq_fcc3(cpu_fsr, cpu_env);
1658         break;
1659     }
1660 }
1661
1662 #else
1663
1664 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1665 {
1666     gen_helper_fcmps(cpu_fsr, cpu_env, r_rs1, r_rs2);
1667 }
1668
1669 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1670 {
1671     gen_helper_fcmpd(cpu_fsr, cpu_env, r_rs1, r_rs2);
1672 }
1673
1674 static inline void gen_op_fcmpq(int fccno)
1675 {
1676     gen_helper_fcmpq(cpu_fsr, cpu_env);
1677 }
1678
1679 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1680 {
1681     gen_helper_fcmpes(cpu_fsr, cpu_env, r_rs1, r_rs2);
1682 }
1683
1684 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1685 {
1686     gen_helper_fcmped(cpu_fsr, cpu_env, r_rs1, r_rs2);
1687 }
1688
1689 static inline void gen_op_fcmpeq(int fccno)
1690 {
1691     gen_helper_fcmpeq(cpu_fsr, cpu_env);
1692 }
1693 #endif
1694
1695 static void gen_op_fpexception_im(DisasContext *dc, int fsr_flags)
1696 {
1697     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1698     tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1699     gen_exception(dc, TT_FP_EXCP);
1700 }
1701
1702 static int gen_trap_ifnofpu(DisasContext *dc)
1703 {
1704 #if !defined(CONFIG_USER_ONLY)
1705     if (!dc->fpu_enabled) {
1706         gen_exception(dc, TT_NFPU_INSN);
1707         return 1;
1708     }
1709 #endif
1710     return 0;
1711 }
1712
1713 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1714 {
1715     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1716 }
1717
1718 static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1719                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1720 {
1721     TCGv_i32 dst, src;
1722
1723     src = gen_load_fpr_F(dc, rs);
1724     dst = gen_dest_fpr_F(dc);
1725
1726     gen(dst, cpu_env, src);
1727     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1728
1729     gen_store_fpr_F(dc, rd, dst);
1730 }
1731
1732 static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1733                                  void (*gen)(TCGv_i32, TCGv_i32))
1734 {
1735     TCGv_i32 dst, src;
1736
1737     src = gen_load_fpr_F(dc, rs);
1738     dst = gen_dest_fpr_F(dc);
1739
1740     gen(dst, src);
1741
1742     gen_store_fpr_F(dc, rd, dst);
1743 }
1744
1745 static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1746                         void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1747 {
1748     TCGv_i32 dst, src1, src2;
1749
1750     src1 = gen_load_fpr_F(dc, rs1);
1751     src2 = gen_load_fpr_F(dc, rs2);
1752     dst = gen_dest_fpr_F(dc);
1753
1754     gen(dst, cpu_env, src1, src2);
1755     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1756
1757     gen_store_fpr_F(dc, rd, dst);
1758 }
1759
1760 #ifdef TARGET_SPARC64
1761 static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1762                                   void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1763 {
1764     TCGv_i32 dst, src1, src2;
1765
1766     src1 = gen_load_fpr_F(dc, rs1);
1767     src2 = gen_load_fpr_F(dc, rs2);
1768     dst = gen_dest_fpr_F(dc);
1769
1770     gen(dst, src1, src2);
1771
1772     gen_store_fpr_F(dc, rd, dst);
1773 }
1774 #endif
1775
1776 static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1777                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1778 {
1779     TCGv_i64 dst, src;
1780
1781     src = gen_load_fpr_D(dc, rs);
1782     dst = gen_dest_fpr_D(dc, rd);
1783
1784     gen(dst, cpu_env, src);
1785     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1786
1787     gen_store_fpr_D(dc, rd, dst);
1788 }
1789
1790 #ifdef TARGET_SPARC64
1791 static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1792                                  void (*gen)(TCGv_i64, TCGv_i64))
1793 {
1794     TCGv_i64 dst, src;
1795
1796     src = gen_load_fpr_D(dc, rs);
1797     dst = gen_dest_fpr_D(dc, rd);
1798
1799     gen(dst, src);
1800
1801     gen_store_fpr_D(dc, rd, dst);
1802 }
1803 #endif
1804
1805 static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1806                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1807 {
1808     TCGv_i64 dst, src1, src2;
1809
1810     src1 = gen_load_fpr_D(dc, rs1);
1811     src2 = gen_load_fpr_D(dc, rs2);
1812     dst = gen_dest_fpr_D(dc, rd);
1813
1814     gen(dst, cpu_env, src1, src2);
1815     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1816
1817     gen_store_fpr_D(dc, rd, dst);
1818 }
1819
1820 #ifdef TARGET_SPARC64
1821 static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1822                                   void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1823 {
1824     TCGv_i64 dst, src1, src2;
1825
1826     src1 = gen_load_fpr_D(dc, rs1);
1827     src2 = gen_load_fpr_D(dc, rs2);
1828     dst = gen_dest_fpr_D(dc, rd);
1829
1830     gen(dst, src1, src2);
1831
1832     gen_store_fpr_D(dc, rd, dst);
1833 }
1834
1835 static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1836                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1837 {
1838     TCGv_i64 dst, src1, src2;
1839
1840     src1 = gen_load_fpr_D(dc, rs1);
1841     src2 = gen_load_fpr_D(dc, rs2);
1842     dst = gen_dest_fpr_D(dc, rd);
1843
1844     gen(dst, cpu_gsr, src1, src2);
1845
1846     gen_store_fpr_D(dc, rd, dst);
1847 }
1848
1849 static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1850                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1851 {
1852     TCGv_i64 dst, src0, src1, src2;
1853
1854     src1 = gen_load_fpr_D(dc, rs1);
1855     src2 = gen_load_fpr_D(dc, rs2);
1856     src0 = gen_load_fpr_D(dc, rd);
1857     dst = gen_dest_fpr_D(dc, rd);
1858
1859     gen(dst, src0, src1, src2);
1860
1861     gen_store_fpr_D(dc, rd, dst);
1862 }
1863 #endif
1864
1865 static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1866                               void (*gen)(TCGv_ptr))
1867 {
1868     gen_op_load_fpr_QT1(QFPREG(rs));
1869
1870     gen(cpu_env);
1871     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1872
1873     gen_op_store_QT0_fpr(QFPREG(rd));
1874     gen_update_fprs_dirty(dc, QFPREG(rd));
1875 }
1876
1877 #ifdef TARGET_SPARC64
1878 static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1879                                  void (*gen)(TCGv_ptr))
1880 {
1881     gen_op_load_fpr_QT1(QFPREG(rs));
1882
1883     gen(cpu_env);
1884
1885     gen_op_store_QT0_fpr(QFPREG(rd));
1886     gen_update_fprs_dirty(dc, QFPREG(rd));
1887 }
1888 #endif
1889
1890 static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1891                                void (*gen)(TCGv_ptr))
1892 {
1893     gen_op_load_fpr_QT0(QFPREG(rs1));
1894     gen_op_load_fpr_QT1(QFPREG(rs2));
1895
1896     gen(cpu_env);
1897     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1898
1899     gen_op_store_QT0_fpr(QFPREG(rd));
1900     gen_update_fprs_dirty(dc, QFPREG(rd));
1901 }
1902
1903 static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1904                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1905 {
1906     TCGv_i64 dst;
1907     TCGv_i32 src1, src2;
1908
1909     src1 = gen_load_fpr_F(dc, rs1);
1910     src2 = gen_load_fpr_F(dc, rs2);
1911     dst = gen_dest_fpr_D(dc, rd);
1912
1913     gen(dst, cpu_env, src1, src2);
1914     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1915
1916     gen_store_fpr_D(dc, rd, dst);
1917 }
1918
1919 static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1920                                void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1921 {
1922     TCGv_i64 src1, src2;
1923
1924     src1 = gen_load_fpr_D(dc, rs1);
1925     src2 = gen_load_fpr_D(dc, rs2);
1926
1927     gen(cpu_env, src1, src2);
1928     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1929
1930     gen_op_store_QT0_fpr(QFPREG(rd));
1931     gen_update_fprs_dirty(dc, QFPREG(rd));
1932 }
1933
1934 #ifdef TARGET_SPARC64
1935 static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1936                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1937 {
1938     TCGv_i64 dst;
1939     TCGv_i32 src;
1940
1941     src = gen_load_fpr_F(dc, rs);
1942     dst = gen_dest_fpr_D(dc, rd);
1943
1944     gen(dst, cpu_env, src);
1945     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1946
1947     gen_store_fpr_D(dc, rd, dst);
1948 }
1949 #endif
1950
1951 static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1952                                  void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1953 {
1954     TCGv_i64 dst;
1955     TCGv_i32 src;
1956
1957     src = gen_load_fpr_F(dc, rs);
1958     dst = gen_dest_fpr_D(dc, rd);
1959
1960     gen(dst, cpu_env, src);
1961
1962     gen_store_fpr_D(dc, rd, dst);
1963 }
1964
1965 static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1966                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1967 {
1968     TCGv_i32 dst;
1969     TCGv_i64 src;
1970
1971     src = gen_load_fpr_D(dc, rs);
1972     dst = gen_dest_fpr_F(dc);
1973
1974     gen(dst, cpu_env, src);
1975     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1976
1977     gen_store_fpr_F(dc, rd, dst);
1978 }
1979
1980 static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1981                               void (*gen)(TCGv_i32, TCGv_ptr))
1982 {
1983     TCGv_i32 dst;
1984
1985     gen_op_load_fpr_QT1(QFPREG(rs));
1986     dst = gen_dest_fpr_F(dc);
1987
1988     gen(dst, cpu_env);
1989     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1990
1991     gen_store_fpr_F(dc, rd, dst);
1992 }
1993
1994 static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1995                               void (*gen)(TCGv_i64, TCGv_ptr))
1996 {
1997     TCGv_i64 dst;
1998
1999     gen_op_load_fpr_QT1(QFPREG(rs));
2000     dst = gen_dest_fpr_D(dc, rd);
2001
2002     gen(dst, cpu_env);
2003     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
2004
2005     gen_store_fpr_D(dc, rd, dst);
2006 }
2007
2008 static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
2009                                  void (*gen)(TCGv_ptr, TCGv_i32))
2010 {
2011     TCGv_i32 src;
2012
2013     src = gen_load_fpr_F(dc, rs);
2014
2015     gen(cpu_env, src);
2016
2017     gen_op_store_QT0_fpr(QFPREG(rd));
2018     gen_update_fprs_dirty(dc, QFPREG(rd));
2019 }
2020
2021 static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
2022                                  void (*gen)(TCGv_ptr, TCGv_i64))
2023 {
2024     TCGv_i64 src;
2025
2026     src = gen_load_fpr_D(dc, rs);
2027
2028     gen(cpu_env, src);
2029
2030     gen_op_store_QT0_fpr(QFPREG(rd));
2031     gen_update_fprs_dirty(dc, QFPREG(rd));
2032 }
2033
2034 static void gen_swap(DisasContext *dc, TCGv dst, TCGv src,
2035                      TCGv addr, int mmu_idx, TCGMemOp memop)
2036 {
2037     gen_address_mask(dc, addr);
2038     tcg_gen_atomic_xchg_tl(dst, addr, src, mmu_idx, memop);
2039 }
2040
2041 static void gen_ldstub(DisasContext *dc, TCGv dst, TCGv addr, int mmu_idx)
2042 {
2043     TCGv m1 = tcg_const_tl(0xff);
2044     gen_address_mask(dc, addr);
2045     tcg_gen_atomic_xchg_tl(dst, addr, m1, mmu_idx, MO_UB);
2046     tcg_temp_free(m1);
2047 }
2048
2049 /* asi moves */
2050 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2051 typedef enum {
2052     GET_ASI_HELPER,
2053     GET_ASI_EXCP,
2054     GET_ASI_DIRECT,
2055     GET_ASI_DTWINX,
2056     GET_ASI_BLOCK,
2057     GET_ASI_SHORT,
2058     GET_ASI_BCOPY,
2059     GET_ASI_BFILL,
2060 } ASIType;
2061
2062 typedef struct {
2063     ASIType type;
2064     int asi;
2065     int mem_idx;
2066     TCGMemOp memop;
2067 } DisasASI;
2068
2069 static DisasASI get_asi(DisasContext *dc, int insn, TCGMemOp memop)
2070 {
2071     int asi = GET_FIELD(insn, 19, 26);
2072     ASIType type = GET_ASI_HELPER;
2073     int mem_idx = dc->mem_idx;
2074
2075 #ifndef TARGET_SPARC64
2076     /* Before v9, all asis are immediate and privileged.  */
2077     if (IS_IMM) {
2078         gen_exception(dc, TT_ILL_INSN);
2079         type = GET_ASI_EXCP;
2080     } else if (supervisor(dc)
2081                /* Note that LEON accepts ASI_USERDATA in user mode, for
2082                   use with CASA.  Also note that previous versions of
2083                   QEMU allowed (and old versions of gcc emitted) ASI_P
2084                   for LEON, which is incorrect.  */
2085                || (asi == ASI_USERDATA
2086                    && (dc->def->features & CPU_FEATURE_CASA))) {
2087         switch (asi) {
2088         case ASI_USERDATA:   /* User data access */
2089             mem_idx = MMU_USER_IDX;
2090             type = GET_ASI_DIRECT;
2091             break;
2092         case ASI_KERNELDATA: /* Supervisor data access */
2093             mem_idx = MMU_KERNEL_IDX;
2094             type = GET_ASI_DIRECT;
2095             break;
2096         case ASI_M_BYPASS:    /* MMU passthrough */
2097         case ASI_LEON_BYPASS: /* LEON MMU passthrough */
2098             mem_idx = MMU_PHYS_IDX;
2099             type = GET_ASI_DIRECT;
2100             break;
2101         case ASI_M_BCOPY: /* Block copy, sta access */
2102             mem_idx = MMU_KERNEL_IDX;
2103             type = GET_ASI_BCOPY;
2104             break;
2105         case ASI_M_BFILL: /* Block fill, stda access */
2106             mem_idx = MMU_KERNEL_IDX;
2107             type = GET_ASI_BFILL;
2108             break;
2109         }
2110     } else {
2111         gen_exception(dc, TT_PRIV_INSN);
2112         type = GET_ASI_EXCP;
2113     }
2114 #else
2115     if (IS_IMM) {
2116         asi = dc->asi;
2117     }
2118     /* With v9, all asis below 0x80 are privileged.  */
2119     /* ??? We ought to check cpu_has_hypervisor, but we didn't copy
2120        down that bit into DisasContext.  For the moment that's ok,
2121        since the direct implementations below doesn't have any ASIs
2122        in the restricted [0x30, 0x7f] range, and the check will be
2123        done properly in the helper.  */
2124     if (!supervisor(dc) && asi < 0x80) {
2125         gen_exception(dc, TT_PRIV_ACT);
2126         type = GET_ASI_EXCP;
2127     } else {
2128         switch (asi) {
2129         case ASI_REAL:      /* Bypass */
2130         case ASI_REAL_IO:   /* Bypass, non-cacheable */
2131         case ASI_REAL_L:    /* Bypass LE */
2132         case ASI_REAL_IO_L: /* Bypass, non-cacheable LE */
2133         case ASI_TWINX_REAL:   /* Real address, twinx */
2134         case ASI_TWINX_REAL_L: /* Real address, twinx, LE */
2135         case ASI_QUAD_LDD_PHYS:
2136         case ASI_QUAD_LDD_PHYS_L:
2137             mem_idx = MMU_PHYS_IDX;
2138             break;
2139         case ASI_N:  /* Nucleus */
2140         case ASI_NL: /* Nucleus LE */
2141         case ASI_TWINX_N:
2142         case ASI_TWINX_NL:
2143         case ASI_NUCLEUS_QUAD_LDD:
2144         case ASI_NUCLEUS_QUAD_LDD_L:
2145             if (hypervisor(dc)) {
2146                 mem_idx = MMU_PHYS_IDX;
2147             } else {
2148                 mem_idx = MMU_NUCLEUS_IDX;
2149             }
2150             break;
2151         case ASI_AIUP:  /* As if user primary */
2152         case ASI_AIUPL: /* As if user primary LE */
2153         case ASI_TWINX_AIUP:
2154         case ASI_TWINX_AIUP_L:
2155         case ASI_BLK_AIUP_4V:
2156         case ASI_BLK_AIUP_L_4V:
2157         case ASI_BLK_AIUP:
2158         case ASI_BLK_AIUPL:
2159             mem_idx = MMU_USER_IDX;
2160             break;
2161         case ASI_AIUS:  /* As if user secondary */
2162         case ASI_AIUSL: /* As if user secondary LE */
2163         case ASI_TWINX_AIUS:
2164         case ASI_TWINX_AIUS_L:
2165         case ASI_BLK_AIUS_4V:
2166         case ASI_BLK_AIUS_L_4V:
2167         case ASI_BLK_AIUS:
2168         case ASI_BLK_AIUSL:
2169             mem_idx = MMU_USER_SECONDARY_IDX;
2170             break;
2171         case ASI_S:  /* Secondary */
2172         case ASI_SL: /* Secondary LE */
2173         case ASI_TWINX_S:
2174         case ASI_TWINX_SL:
2175         case ASI_BLK_COMMIT_S:
2176         case ASI_BLK_S:
2177         case ASI_BLK_SL:
2178         case ASI_FL8_S:
2179         case ASI_FL8_SL:
2180         case ASI_FL16_S:
2181         case ASI_FL16_SL:
2182             if (mem_idx == MMU_USER_IDX) {
2183                 mem_idx = MMU_USER_SECONDARY_IDX;
2184             } else if (mem_idx == MMU_KERNEL_IDX) {
2185                 mem_idx = MMU_KERNEL_SECONDARY_IDX;
2186             }
2187             break;
2188         case ASI_P:  /* Primary */
2189         case ASI_PL: /* Primary LE */
2190         case ASI_TWINX_P:
2191         case ASI_TWINX_PL:
2192         case ASI_BLK_COMMIT_P:
2193         case ASI_BLK_P:
2194         case ASI_BLK_PL:
2195         case ASI_FL8_P:
2196         case ASI_FL8_PL:
2197         case ASI_FL16_P:
2198         case ASI_FL16_PL:
2199             break;
2200         }
2201         switch (asi) {
2202         case ASI_REAL:
2203         case ASI_REAL_IO:
2204         case ASI_REAL_L:
2205         case ASI_REAL_IO_L:
2206         case ASI_N:
2207         case ASI_NL:
2208         case ASI_AIUP:
2209         case ASI_AIUPL:
2210         case ASI_AIUS:
2211         case ASI_AIUSL:
2212         case ASI_S:
2213         case ASI_SL:
2214         case ASI_P:
2215         case ASI_PL:
2216             type = GET_ASI_DIRECT;
2217             break;
2218         case ASI_TWINX_REAL:
2219         case ASI_TWINX_REAL_L:
2220         case ASI_TWINX_N:
2221         case ASI_TWINX_NL:
2222         case ASI_TWINX_AIUP:
2223         case ASI_TWINX_AIUP_L:
2224         case ASI_TWINX_AIUS:
2225         case ASI_TWINX_AIUS_L:
2226         case ASI_TWINX_P:
2227         case ASI_TWINX_PL:
2228         case ASI_TWINX_S:
2229         case ASI_TWINX_SL:
2230         case ASI_QUAD_LDD_PHYS:
2231         case ASI_QUAD_LDD_PHYS_L:
2232         case ASI_NUCLEUS_QUAD_LDD:
2233         case ASI_NUCLEUS_QUAD_LDD_L:
2234             type = GET_ASI_DTWINX;
2235             break;
2236         case ASI_BLK_COMMIT_P:
2237         case ASI_BLK_COMMIT_S:
2238         case ASI_BLK_AIUP_4V:
2239         case ASI_BLK_AIUP_L_4V:
2240         case ASI_BLK_AIUP:
2241         case ASI_BLK_AIUPL:
2242         case ASI_BLK_AIUS_4V:
2243         case ASI_BLK_AIUS_L_4V:
2244         case ASI_BLK_AIUS:
2245         case ASI_BLK_AIUSL:
2246         case ASI_BLK_S:
2247         case ASI_BLK_SL:
2248         case ASI_BLK_P:
2249         case ASI_BLK_PL:
2250             type = GET_ASI_BLOCK;
2251             break;
2252         case ASI_FL8_S:
2253         case ASI_FL8_SL:
2254         case ASI_FL8_P:
2255         case ASI_FL8_PL:
2256             memop = MO_UB;
2257             type = GET_ASI_SHORT;
2258             break;
2259         case ASI_FL16_S:
2260         case ASI_FL16_SL:
2261         case ASI_FL16_P:
2262         case ASI_FL16_PL:
2263             memop = MO_TEUW;
2264             type = GET_ASI_SHORT;
2265             break;
2266         }
2267         /* The little-endian asis all have bit 3 set.  */
2268         if (asi & 8) {
2269             memop ^= MO_BSWAP;
2270         }
2271     }
2272 #endif
2273
2274     return (DisasASI){ type, asi, mem_idx, memop };
2275 }
2276
2277 static void gen_ld_asi(DisasContext *dc, TCGv dst, TCGv addr,
2278                        int insn, TCGMemOp memop)
2279 {
2280     DisasASI da = get_asi(dc, insn, memop);
2281
2282     switch (da.type) {
2283     case GET_ASI_EXCP:
2284         break;
2285     case GET_ASI_DTWINX: /* Reserved for ldda.  */
2286         gen_exception(dc, TT_ILL_INSN);
2287         break;
2288     case GET_ASI_DIRECT:
2289         gen_address_mask(dc, addr);
2290         tcg_gen_qemu_ld_tl(dst, addr, da.mem_idx, da.memop);
2291         break;
2292     default:
2293         {
2294             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2295             TCGv_i32 r_mop = tcg_const_i32(memop);
2296
2297             save_state(dc);
2298 #ifdef TARGET_SPARC64
2299             gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_mop);
2300 #else
2301             {
2302                 TCGv_i64 t64 = tcg_temp_new_i64();
2303                 gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2304                 tcg_gen_trunc_i64_tl(dst, t64);
2305                 tcg_temp_free_i64(t64);
2306             }
2307 #endif
2308             tcg_temp_free_i32(r_mop);
2309             tcg_temp_free_i32(r_asi);
2310         }
2311         break;
2312     }
2313 }
2314
2315 static void gen_st_asi(DisasContext *dc, TCGv src, TCGv addr,
2316                        int insn, TCGMemOp memop)
2317 {
2318     DisasASI da = get_asi(dc, insn, memop);
2319
2320     switch (da.type) {
2321     case GET_ASI_EXCP:
2322         break;
2323     case GET_ASI_DTWINX: /* Reserved for stda.  */
2324         gen_exception(dc, TT_ILL_INSN);
2325         break;
2326     case GET_ASI_DIRECT:
2327         gen_address_mask(dc, addr);
2328         tcg_gen_qemu_st_tl(src, addr, da.mem_idx, da.memop);
2329         break;
2330 #if !defined(TARGET_SPARC64) && !defined(CONFIG_USER_ONLY)
2331     case GET_ASI_BCOPY:
2332         /* Copy 32 bytes from the address in SRC to ADDR.  */
2333         /* ??? The original qemu code suggests 4-byte alignment, dropping
2334            the low bits, but the only place I can see this used is in the
2335            Linux kernel with 32 byte alignment, which would make more sense
2336            as a cacheline-style operation.  */
2337         {
2338             TCGv saddr = tcg_temp_new();
2339             TCGv daddr = tcg_temp_new();
2340             TCGv four = tcg_const_tl(4);
2341             TCGv_i32 tmp = tcg_temp_new_i32();
2342             int i;
2343
2344             tcg_gen_andi_tl(saddr, src, -4);
2345             tcg_gen_andi_tl(daddr, addr, -4);
2346             for (i = 0; i < 32; i += 4) {
2347                 /* Since the loads and stores are paired, allow the
2348                    copy to happen in the host endianness.  */
2349                 tcg_gen_qemu_ld_i32(tmp, saddr, da.mem_idx, MO_UL);
2350                 tcg_gen_qemu_st_i32(tmp, daddr, da.mem_idx, MO_UL);
2351                 tcg_gen_add_tl(saddr, saddr, four);
2352                 tcg_gen_add_tl(daddr, daddr, four);
2353             }
2354
2355             tcg_temp_free(saddr);
2356             tcg_temp_free(daddr);
2357             tcg_temp_free(four);
2358             tcg_temp_free_i32(tmp);
2359         }
2360         break;
2361 #endif
2362     default:
2363         {
2364             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2365             TCGv_i32 r_mop = tcg_const_i32(memop & MO_SIZE);
2366
2367             save_state(dc);
2368 #ifdef TARGET_SPARC64
2369             gen_helper_st_asi(cpu_env, addr, src, r_asi, r_mop);
2370 #else
2371             {
2372                 TCGv_i64 t64 = tcg_temp_new_i64();
2373                 tcg_gen_extu_tl_i64(t64, src);
2374                 gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2375                 tcg_temp_free_i64(t64);
2376             }
2377 #endif
2378             tcg_temp_free_i32(r_mop);
2379             tcg_temp_free_i32(r_asi);
2380
2381             /* A write to a TLB register may alter page maps.  End the TB. */
2382             dc->npc = DYNAMIC_PC;
2383         }
2384         break;
2385     }
2386 }
2387
2388 static void gen_swap_asi(DisasContext *dc, TCGv dst, TCGv src,
2389                          TCGv addr, int insn)
2390 {
2391     DisasASI da = get_asi(dc, insn, MO_TEUL);
2392
2393     switch (da.type) {
2394     case GET_ASI_EXCP:
2395         break;
2396     case GET_ASI_DIRECT:
2397         gen_swap(dc, dst, src, addr, da.mem_idx, da.memop);
2398         break;
2399     default:
2400         /* ??? Should be DAE_invalid_asi.  */
2401         gen_exception(dc, TT_DATA_ACCESS);
2402         break;
2403     }
2404 }
2405
2406 static void gen_cas_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2407                         int insn, int rd)
2408 {
2409     DisasASI da = get_asi(dc, insn, MO_TEUL);
2410     TCGv oldv;
2411
2412     switch (da.type) {
2413     case GET_ASI_EXCP:
2414         return;
2415     case GET_ASI_DIRECT:
2416         oldv = tcg_temp_new();
2417         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2418                                   da.mem_idx, da.memop);
2419         gen_store_gpr(dc, rd, oldv);
2420         tcg_temp_free(oldv);
2421         break;
2422     default:
2423         /* ??? Should be DAE_invalid_asi.  */
2424         gen_exception(dc, TT_DATA_ACCESS);
2425         break;
2426     }
2427 }
2428
2429 static void gen_ldstub_asi(DisasContext *dc, TCGv dst, TCGv addr, int insn)
2430 {
2431     DisasASI da = get_asi(dc, insn, MO_UB);
2432
2433     switch (da.type) {
2434     case GET_ASI_EXCP:
2435         break;
2436     case GET_ASI_DIRECT:
2437         gen_ldstub(dc, dst, addr, da.mem_idx);
2438         break;
2439     default:
2440         /* ??? Should be DAE_invalid_asi.  */
2441         gen_exception(dc, TT_DATA_ACCESS);
2442         break;
2443     }
2444 }
2445 #endif
2446
2447 #ifdef TARGET_SPARC64
2448 static void gen_ldf_asi(DisasContext *dc, TCGv addr,
2449                         int insn, int size, int rd)
2450 {
2451     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEQ));
2452     TCGv_i32 d32;
2453     TCGv_i64 d64;
2454
2455     switch (da.type) {
2456     case GET_ASI_EXCP:
2457         break;
2458
2459     case GET_ASI_DIRECT:
2460         gen_address_mask(dc, addr);
2461         switch (size) {
2462         case 4:
2463             d32 = gen_dest_fpr_F(dc);
2464             tcg_gen_qemu_ld_i32(d32, addr, da.mem_idx, da.memop);
2465             gen_store_fpr_F(dc, rd, d32);
2466             break;
2467         case 8:
2468             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2469                                 da.memop | MO_ALIGN_4);
2470             break;
2471         case 16:
2472             d64 = tcg_temp_new_i64();
2473             tcg_gen_qemu_ld_i64(d64, addr, da.mem_idx, da.memop | MO_ALIGN_4);
2474             tcg_gen_addi_tl(addr, addr, 8);
2475             tcg_gen_qemu_ld_i64(cpu_fpr[rd/2+1], addr, da.mem_idx,
2476                                 da.memop | MO_ALIGN_4);
2477             tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2478             tcg_temp_free_i64(d64);
2479             break;
2480         default:
2481             g_assert_not_reached();
2482         }
2483         break;
2484
2485     case GET_ASI_BLOCK:
2486         /* Valid for lddfa on aligned registers only.  */
2487         if (size == 8 && (rd & 7) == 0) {
2488             TCGMemOp memop;
2489             TCGv eight;
2490             int i;
2491
2492             gen_address_mask(dc, addr);
2493
2494             /* The first operation checks required alignment.  */
2495             memop = da.memop | MO_ALIGN_64;
2496             eight = tcg_const_tl(8);
2497             for (i = 0; ; ++i) {
2498                 tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2 + i], addr,
2499                                     da.mem_idx, memop);
2500                 if (i == 7) {
2501                     break;
2502                 }
2503                 tcg_gen_add_tl(addr, addr, eight);
2504                 memop = da.memop;
2505             }
2506             tcg_temp_free(eight);
2507         } else {
2508             gen_exception(dc, TT_ILL_INSN);
2509         }
2510         break;
2511
2512     case GET_ASI_SHORT:
2513         /* Valid for lddfa only.  */
2514         if (size == 8) {
2515             gen_address_mask(dc, addr);
2516             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx, da.memop);
2517         } else {
2518             gen_exception(dc, TT_ILL_INSN);
2519         }
2520         break;
2521
2522     default:
2523         {
2524             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2525             TCGv_i32 r_mop = tcg_const_i32(da.memop);
2526
2527             save_state(dc);
2528             /* According to the table in the UA2011 manual, the only
2529                other asis that are valid for ldfa/lddfa/ldqfa are
2530                the NO_FAULT asis.  We still need a helper for these,
2531                but we can just use the integer asi helper for them.  */
2532             switch (size) {
2533             case 4:
2534                 d64 = tcg_temp_new_i64();
2535                 gen_helper_ld_asi(d64, cpu_env, addr, r_asi, r_mop);
2536                 d32 = gen_dest_fpr_F(dc);
2537                 tcg_gen_extrl_i64_i32(d32, d64);
2538                 tcg_temp_free_i64(d64);
2539                 gen_store_fpr_F(dc, rd, d32);
2540                 break;
2541             case 8:
2542                 gen_helper_ld_asi(cpu_fpr[rd / 2], cpu_env, addr, r_asi, r_mop);
2543                 break;
2544             case 16:
2545                 d64 = tcg_temp_new_i64();
2546                 gen_helper_ld_asi(d64, cpu_env, addr, r_asi, r_mop);
2547                 tcg_gen_addi_tl(addr, addr, 8);
2548                 gen_helper_ld_asi(cpu_fpr[rd/2+1], cpu_env, addr, r_asi, r_mop);
2549                 tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2550                 tcg_temp_free_i64(d64);
2551                 break;
2552             default:
2553                 g_assert_not_reached();
2554             }
2555             tcg_temp_free_i32(r_mop);
2556             tcg_temp_free_i32(r_asi);
2557         }
2558         break;
2559     }
2560 }
2561
2562 static void gen_stf_asi(DisasContext *dc, TCGv addr,
2563                         int insn, int size, int rd)
2564 {
2565     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEQ));
2566     TCGv_i32 d32;
2567
2568     switch (da.type) {
2569     case GET_ASI_EXCP:
2570         break;
2571
2572     case GET_ASI_DIRECT:
2573         gen_address_mask(dc, addr);
2574         switch (size) {
2575         case 4:
2576             d32 = gen_load_fpr_F(dc, rd);
2577             tcg_gen_qemu_st_i32(d32, addr, da.mem_idx, da.memop);
2578             break;
2579         case 8:
2580             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2581                                 da.memop | MO_ALIGN_4);
2582             break;
2583         case 16:
2584             /* Only 4-byte alignment required.  However, it is legal for the
2585                cpu to signal the alignment fault, and the OS trap handler is
2586                required to fix it up.  Requiring 16-byte alignment here avoids
2587                having to probe the second page before performing the first
2588                write.  */
2589             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2590                                 da.memop | MO_ALIGN_16);
2591             tcg_gen_addi_tl(addr, addr, 8);
2592             tcg_gen_qemu_st_i64(cpu_fpr[rd/2+1], addr, da.mem_idx, da.memop);
2593             break;
2594         default:
2595             g_assert_not_reached();
2596         }
2597         break;
2598
2599     case GET_ASI_BLOCK:
2600         /* Valid for stdfa on aligned registers only.  */
2601         if (size == 8 && (rd & 7) == 0) {
2602             TCGMemOp memop;
2603             TCGv eight;
2604             int i;
2605
2606             gen_address_mask(dc, addr);
2607
2608             /* The first operation checks required alignment.  */
2609             memop = da.memop | MO_ALIGN_64;
2610             eight = tcg_const_tl(8);
2611             for (i = 0; ; ++i) {
2612                 tcg_gen_qemu_st_i64(cpu_fpr[rd / 2 + i], addr,
2613                                     da.mem_idx, memop);
2614                 if (i == 7) {
2615                     break;
2616                 }
2617                 tcg_gen_add_tl(addr, addr, eight);
2618                 memop = da.memop;
2619             }
2620             tcg_temp_free(eight);
2621         } else {
2622             gen_exception(dc, TT_ILL_INSN);
2623         }
2624         break;
2625
2626     case GET_ASI_SHORT:
2627         /* Valid for stdfa only.  */
2628         if (size == 8) {
2629             gen_address_mask(dc, addr);
2630             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx, da.memop);
2631         } else {
2632             gen_exception(dc, TT_ILL_INSN);
2633         }
2634         break;
2635
2636     default:
2637         /* According to the table in the UA2011 manual, the only
2638            other asis that are valid for ldfa/lddfa/ldqfa are
2639            the PST* asis, which aren't currently handled.  */
2640         gen_exception(dc, TT_ILL_INSN);
2641         break;
2642     }
2643 }
2644
2645 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2646 {
2647     DisasASI da = get_asi(dc, insn, MO_TEQ);
2648     TCGv_i64 hi = gen_dest_gpr(dc, rd);
2649     TCGv_i64 lo = gen_dest_gpr(dc, rd + 1);
2650
2651     switch (da.type) {
2652     case GET_ASI_EXCP:
2653         return;
2654
2655     case GET_ASI_DTWINX:
2656         gen_address_mask(dc, addr);
2657         tcg_gen_qemu_ld_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2658         tcg_gen_addi_tl(addr, addr, 8);
2659         tcg_gen_qemu_ld_i64(lo, addr, da.mem_idx, da.memop);
2660         break;
2661
2662     case GET_ASI_DIRECT:
2663         {
2664             TCGv_i64 tmp = tcg_temp_new_i64();
2665
2666             gen_address_mask(dc, addr);
2667             tcg_gen_qemu_ld_i64(tmp, addr, da.mem_idx, da.memop);
2668
2669             /* Note that LE ldda acts as if each 32-bit register
2670                result is byte swapped.  Having just performed one
2671                64-bit bswap, we need now to swap the writebacks.  */
2672             if ((da.memop & MO_BSWAP) == MO_TE) {
2673                 tcg_gen_extr32_i64(lo, hi, tmp);
2674             } else {
2675                 tcg_gen_extr32_i64(hi, lo, tmp);
2676             }
2677             tcg_temp_free_i64(tmp);
2678         }
2679         break;
2680
2681     default:
2682         /* ??? In theory we've handled all of the ASIs that are valid
2683            for ldda, and this should raise DAE_invalid_asi.  However,
2684            real hardware allows others.  This can be seen with e.g.
2685            FreeBSD 10.3 wrt ASI_IC_TAG.  */
2686         {
2687             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2688             TCGv_i32 r_mop = tcg_const_i32(da.memop);
2689             TCGv_i64 tmp = tcg_temp_new_i64();
2690
2691             save_state(dc);
2692             gen_helper_ld_asi(tmp, cpu_env, addr, r_asi, r_mop);
2693             tcg_temp_free_i32(r_asi);
2694             tcg_temp_free_i32(r_mop);
2695
2696             /* See above.  */
2697             if ((da.memop & MO_BSWAP) == MO_TE) {
2698                 tcg_gen_extr32_i64(lo, hi, tmp);
2699             } else {
2700                 tcg_gen_extr32_i64(hi, lo, tmp);
2701             }
2702             tcg_temp_free_i64(tmp);
2703         }
2704         break;
2705     }
2706
2707     gen_store_gpr(dc, rd, hi);
2708     gen_store_gpr(dc, rd + 1, lo);
2709 }
2710
2711 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2712                          int insn, int rd)
2713 {
2714     DisasASI da = get_asi(dc, insn, MO_TEQ);
2715     TCGv lo = gen_load_gpr(dc, rd + 1);
2716
2717     switch (da.type) {
2718     case GET_ASI_EXCP:
2719         break;
2720
2721     case GET_ASI_DTWINX:
2722         gen_address_mask(dc, addr);
2723         tcg_gen_qemu_st_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2724         tcg_gen_addi_tl(addr, addr, 8);
2725         tcg_gen_qemu_st_i64(lo, addr, da.mem_idx, da.memop);
2726         break;
2727
2728     case GET_ASI_DIRECT:
2729         {
2730             TCGv_i64 t64 = tcg_temp_new_i64();
2731
2732             /* Note that LE stda acts as if each 32-bit register result is
2733                byte swapped.  We will perform one 64-bit LE store, so now
2734                we must swap the order of the construction.  */
2735             if ((da.memop & MO_BSWAP) == MO_TE) {
2736                 tcg_gen_concat32_i64(t64, lo, hi);
2737             } else {
2738                 tcg_gen_concat32_i64(t64, hi, lo);
2739             }
2740             gen_address_mask(dc, addr);
2741             tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop);
2742             tcg_temp_free_i64(t64);
2743         }
2744         break;
2745
2746     default:
2747         /* ??? In theory we've handled all of the ASIs that are valid
2748            for stda, and this should raise DAE_invalid_asi.  */
2749         {
2750             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2751             TCGv_i32 r_mop = tcg_const_i32(da.memop);
2752             TCGv_i64 t64 = tcg_temp_new_i64();
2753
2754             /* See above.  */
2755             if ((da.memop & MO_BSWAP) == MO_TE) {
2756                 tcg_gen_concat32_i64(t64, lo, hi);
2757             } else {
2758                 tcg_gen_concat32_i64(t64, hi, lo);
2759             }
2760
2761             save_state(dc);
2762             gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2763             tcg_temp_free_i32(r_mop);
2764             tcg_temp_free_i32(r_asi);
2765             tcg_temp_free_i64(t64);
2766         }
2767         break;
2768     }
2769 }
2770
2771 static void gen_casx_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2772                          int insn, int rd)
2773 {
2774     DisasASI da = get_asi(dc, insn, MO_TEQ);
2775     TCGv oldv;
2776
2777     switch (da.type) {
2778     case GET_ASI_EXCP:
2779         return;
2780     case GET_ASI_DIRECT:
2781         oldv = tcg_temp_new();
2782         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2783                                   da.mem_idx, da.memop);
2784         gen_store_gpr(dc, rd, oldv);
2785         tcg_temp_free(oldv);
2786         break;
2787     default:
2788         /* ??? Should be DAE_invalid_asi.  */
2789         gen_exception(dc, TT_DATA_ACCESS);
2790         break;
2791     }
2792 }
2793
2794 #elif !defined(CONFIG_USER_ONLY)
2795 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2796 {
2797     /* ??? Work around an apparent bug in Ubuntu gcc 4.8.2-10ubuntu2+12,
2798        whereby "rd + 1" elicits "error: array subscript is above array".
2799        Since we have already asserted that rd is even, the semantics
2800        are unchanged.  */
2801     TCGv lo = gen_dest_gpr(dc, rd | 1);
2802     TCGv hi = gen_dest_gpr(dc, rd);
2803     TCGv_i64 t64 = tcg_temp_new_i64();
2804     DisasASI da = get_asi(dc, insn, MO_TEQ);
2805
2806     switch (da.type) {
2807     case GET_ASI_EXCP:
2808         tcg_temp_free_i64(t64);
2809         return;
2810     case GET_ASI_DIRECT:
2811         gen_address_mask(dc, addr);
2812         tcg_gen_qemu_ld_i64(t64, addr, da.mem_idx, da.memop);
2813         break;
2814     default:
2815         {
2816             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2817             TCGv_i32 r_mop = tcg_const_i32(MO_Q);
2818
2819             save_state(dc);
2820             gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2821             tcg_temp_free_i32(r_mop);
2822             tcg_temp_free_i32(r_asi);
2823         }
2824         break;
2825     }
2826
2827     tcg_gen_extr_i64_i32(lo, hi, t64);
2828     tcg_temp_free_i64(t64);
2829     gen_store_gpr(dc, rd | 1, lo);
2830     gen_store_gpr(dc, rd, hi);
2831 }
2832
2833 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2834                          int insn, int rd)
2835 {
2836     DisasASI da = get_asi(dc, insn, MO_TEQ);
2837     TCGv lo = gen_load_gpr(dc, rd + 1);
2838     TCGv_i64 t64 = tcg_temp_new_i64();
2839
2840     tcg_gen_concat_tl_i64(t64, lo, hi);
2841
2842     switch (da.type) {
2843     case GET_ASI_EXCP:
2844         break;
2845     case GET_ASI_DIRECT:
2846         gen_address_mask(dc, addr);
2847         tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop);
2848         break;
2849     case GET_ASI_BFILL:
2850         /* Store 32 bytes of T64 to ADDR.  */
2851         /* ??? The original qemu code suggests 8-byte alignment, dropping
2852            the low bits, but the only place I can see this used is in the
2853            Linux kernel with 32 byte alignment, which would make more sense
2854            as a cacheline-style operation.  */
2855         {
2856             TCGv d_addr = tcg_temp_new();
2857             TCGv eight = tcg_const_tl(8);
2858             int i;
2859
2860             tcg_gen_andi_tl(d_addr, addr, -8);
2861             for (i = 0; i < 32; i += 8) {
2862                 tcg_gen_qemu_st_i64(t64, d_addr, da.mem_idx, da.memop);
2863                 tcg_gen_add_tl(d_addr, d_addr, eight);
2864             }
2865
2866             tcg_temp_free(d_addr);
2867             tcg_temp_free(eight);
2868         }
2869         break;
2870     default:
2871         {
2872             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2873             TCGv_i32 r_mop = tcg_const_i32(MO_Q);
2874
2875             save_state(dc);
2876             gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2877             tcg_temp_free_i32(r_mop);
2878             tcg_temp_free_i32(r_asi);
2879         }
2880         break;
2881     }
2882
2883     tcg_temp_free_i64(t64);
2884 }
2885 #endif
2886
2887 static TCGv get_src1(DisasContext *dc, unsigned int insn)
2888 {
2889     unsigned int rs1 = GET_FIELD(insn, 13, 17);
2890     return gen_load_gpr(dc, rs1);
2891 }
2892
2893 static TCGv get_src2(DisasContext *dc, unsigned int insn)
2894 {
2895     if (IS_IMM) { /* immediate */
2896         target_long simm = GET_FIELDs(insn, 19, 31);
2897         TCGv t = get_temp_tl(dc);
2898         tcg_gen_movi_tl(t, simm);
2899         return t;
2900     } else {      /* register */
2901         unsigned int rs2 = GET_FIELD(insn, 27, 31);
2902         return gen_load_gpr(dc, rs2);
2903     }
2904 }
2905
2906 #ifdef TARGET_SPARC64
2907 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2908 {
2909     TCGv_i32 c32, zero, dst, s1, s2;
2910
2911     /* We have two choices here: extend the 32 bit data and use movcond_i64,
2912        or fold the comparison down to 32 bits and use movcond_i32.  Choose
2913        the later.  */
2914     c32 = tcg_temp_new_i32();
2915     if (cmp->is_bool) {
2916         tcg_gen_extrl_i64_i32(c32, cmp->c1);
2917     } else {
2918         TCGv_i64 c64 = tcg_temp_new_i64();
2919         tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2920         tcg_gen_extrl_i64_i32(c32, c64);
2921         tcg_temp_free_i64(c64);
2922     }
2923
2924     s1 = gen_load_fpr_F(dc, rs);
2925     s2 = gen_load_fpr_F(dc, rd);
2926     dst = gen_dest_fpr_F(dc);
2927     zero = tcg_const_i32(0);
2928
2929     tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2930
2931     tcg_temp_free_i32(c32);
2932     tcg_temp_free_i32(zero);
2933     gen_store_fpr_F(dc, rd, dst);
2934 }
2935
2936 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2937 {
2938     TCGv_i64 dst = gen_dest_fpr_D(dc, rd);
2939     tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2940                         gen_load_fpr_D(dc, rs),
2941                         gen_load_fpr_D(dc, rd));
2942     gen_store_fpr_D(dc, rd, dst);
2943 }
2944
2945 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2946 {
2947     int qd = QFPREG(rd);
2948     int qs = QFPREG(rs);
2949
2950     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2951                         cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2952     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2953                         cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2954
2955     gen_update_fprs_dirty(dc, qd);
2956 }
2957
2958 #ifndef CONFIG_USER_ONLY
2959 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_env cpu_env)
2960 {
2961     TCGv_i32 r_tl = tcg_temp_new_i32();
2962
2963     /* load env->tl into r_tl */
2964     tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2965
2966     /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2967     tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2968
2969     /* calculate offset to current trap state from env->ts, reuse r_tl */
2970     tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2971     tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2972
2973     /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2974     {
2975         TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2976         tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2977         tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2978         tcg_temp_free_ptr(r_tl_tmp);
2979     }
2980
2981     tcg_temp_free_i32(r_tl);
2982 }
2983 #endif
2984
2985 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2986                      int width, bool cc, bool left)
2987 {
2988     TCGv lo1, lo2, t1, t2;
2989     uint64_t amask, tabl, tabr;
2990     int shift, imask, omask;
2991
2992     if (cc) {
2993         tcg_gen_mov_tl(cpu_cc_src, s1);
2994         tcg_gen_mov_tl(cpu_cc_src2, s2);
2995         tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2996         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2997         dc->cc_op = CC_OP_SUB;
2998     }
2999
3000     /* Theory of operation: there are two tables, left and right (not to
3001        be confused with the left and right versions of the opcode).  These
3002        are indexed by the low 3 bits of the inputs.  To make things "easy",
3003        these tables are loaded into two constants, TABL and TABR below.
3004        The operation index = (input & imask) << shift calculates the index
3005        into the constant, while val = (table >> index) & omask calculates
3006        the value we're looking for.  */
3007     switch (width) {
3008     case 8:
3009         imask = 0x7;
3010         shift = 3;
3011         omask = 0xff;
3012         if (left) {
3013             tabl = 0x80c0e0f0f8fcfeffULL;
3014             tabr = 0xff7f3f1f0f070301ULL;
3015         } else {
3016             tabl = 0x0103070f1f3f7fffULL;
3017             tabr = 0xfffefcf8f0e0c080ULL;
3018         }
3019         break;
3020     case 16:
3021         imask = 0x6;
3022         shift = 1;
3023         omask = 0xf;
3024         if (left) {
3025             tabl = 0x8cef;
3026             tabr = 0xf731;
3027         } else {
3028             tabl = 0x137f;
3029             tabr = 0xfec8;
3030         }
3031         break;
3032     case 32:
3033         imask = 0x4;
3034         shift = 0;
3035         omask = 0x3;
3036         if (left) {
3037             tabl = (2 << 2) | 3;
3038             tabr = (3 << 2) | 1;
3039         } else {
3040             tabl = (1 << 2) | 3;
3041             tabr = (3 << 2) | 2;
3042         }
3043         break;
3044     default:
3045         abort();
3046     }
3047
3048     lo1 = tcg_temp_new();
3049     lo2 = tcg_temp_new();
3050     tcg_gen_andi_tl(lo1, s1, imask);
3051     tcg_gen_andi_tl(lo2, s2, imask);
3052     tcg_gen_shli_tl(lo1, lo1, shift);
3053     tcg_gen_shli_tl(lo2, lo2, shift);
3054
3055     t1 = tcg_const_tl(tabl);
3056     t2 = tcg_const_tl(tabr);
3057     tcg_gen_shr_tl(lo1, t1, lo1);
3058     tcg_gen_shr_tl(lo2, t2, lo2);
3059     tcg_gen_andi_tl(dst, lo1, omask);
3060     tcg_gen_andi_tl(lo2, lo2, omask);
3061
3062     amask = -8;
3063     if (AM_CHECK(dc)) {
3064         amask &= 0xffffffffULL;
3065     }
3066     tcg_gen_andi_tl(s1, s1, amask);
3067     tcg_gen_andi_tl(s2, s2, amask);
3068
3069     /* We want to compute
3070         dst = (s1 == s2 ? lo1 : lo1 & lo2).
3071        We've already done dst = lo1, so this reduces to
3072         dst &= (s1 == s2 ? -1 : lo2)
3073        Which we perform by
3074         lo2 |= -(s1 == s2)
3075         dst &= lo2
3076     */
3077     tcg_gen_setcond_tl(TCG_COND_EQ, t1, s1, s2);
3078     tcg_gen_neg_tl(t1, t1);
3079     tcg_gen_or_tl(lo2, lo2, t1);
3080     tcg_gen_and_tl(dst, dst, lo2);
3081
3082     tcg_temp_free(lo1);
3083     tcg_temp_free(lo2);
3084     tcg_temp_free(t1);
3085     tcg_temp_free(t2);
3086 }
3087
3088 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
3089 {
3090     TCGv tmp = tcg_temp_new();
3091
3092     tcg_gen_add_tl(tmp, s1, s2);
3093     tcg_gen_andi_tl(dst, tmp, -8);
3094     if (left) {
3095         tcg_gen_neg_tl(tmp, tmp);
3096     }
3097     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
3098
3099     tcg_temp_free(tmp);
3100 }
3101
3102 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
3103 {
3104     TCGv t1, t2, shift;
3105
3106     t1 = tcg_temp_new();
3107     t2 = tcg_temp_new();
3108     shift = tcg_temp_new();
3109
3110     tcg_gen_andi_tl(shift, gsr, 7);
3111     tcg_gen_shli_tl(shift, shift, 3);
3112     tcg_gen_shl_tl(t1, s1, shift);
3113
3114     /* A shift of 64 does not produce 0 in TCG.  Divide this into a
3115        shift of (up to 63) followed by a constant shift of 1.  */
3116     tcg_gen_xori_tl(shift, shift, 63);
3117     tcg_gen_shr_tl(t2, s2, shift);
3118     tcg_gen_shri_tl(t2, t2, 1);
3119
3120     tcg_gen_or_tl(dst, t1, t2);
3121
3122     tcg_temp_free(t1);
3123     tcg_temp_free(t2);
3124     tcg_temp_free(shift);
3125 }
3126 #endif
3127
3128 #define CHECK_IU_FEATURE(dc, FEATURE)                      \
3129     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
3130         goto illegal_insn;
3131 #define CHECK_FPU_FEATURE(dc, FEATURE)                     \
3132     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
3133         goto nfpu_insn;
3134
3135 /* before an instruction, dc->pc must be static */
3136 static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
3137 {
3138     unsigned int opc, rs1, rs2, rd;
3139     TCGv cpu_src1, cpu_src2;
3140     TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
3141     TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
3142     target_long simm;
3143
3144     opc = GET_FIELD(insn, 0, 1);
3145     rd = GET_FIELD(insn, 2, 6);
3146
3147     switch (opc) {
3148     case 0:                     /* branches/sethi */
3149         {
3150             unsigned int xop = GET_FIELD(insn, 7, 9);
3151             int32_t target;
3152             switch (xop) {
3153 #ifdef TARGET_SPARC64
3154             case 0x1:           /* V9 BPcc */
3155                 {
3156                     int cc;
3157
3158                     target = GET_FIELD_SP(insn, 0, 18);
3159                     target = sign_extend(target, 19);
3160                     target <<= 2;
3161                     cc = GET_FIELD_SP(insn, 20, 21);
3162                     if (cc == 0)
3163                         do_branch(dc, target, insn, 0);
3164                     else if (cc == 2)
3165                         do_branch(dc, target, insn, 1);
3166                     else
3167                         goto illegal_insn;
3168                     goto jmp_insn;
3169                 }
3170             case 0x3:           /* V9 BPr */
3171                 {
3172                     target = GET_FIELD_SP(insn, 0, 13) |
3173                         (GET_FIELD_SP(insn, 20, 21) << 14);
3174                     target = sign_extend(target, 16);
3175                     target <<= 2;
3176                     cpu_src1 = get_src1(dc, insn);
3177                     do_branch_reg(dc, target, insn, cpu_src1);
3178                     goto jmp_insn;
3179                 }
3180             case 0x5:           /* V9 FBPcc */
3181                 {
3182                     int cc = GET_FIELD_SP(insn, 20, 21);
3183                     if (gen_trap_ifnofpu(dc)) {
3184                         goto jmp_insn;
3185                     }
3186                     target = GET_FIELD_SP(insn, 0, 18);
3187                     target = sign_extend(target, 19);
3188                     target <<= 2;
3189                     do_fbranch(dc, target, insn, cc);
3190                     goto jmp_insn;
3191                 }
3192 #else
3193             case 0x7:           /* CBN+x */
3194                 {
3195                     goto ncp_insn;
3196                 }
3197 #endif
3198             case 0x2:           /* BN+x */
3199                 {
3200                     target = GET_FIELD(insn, 10, 31);
3201                     target = sign_extend(target, 22);
3202                     target <<= 2;
3203                     do_branch(dc, target, insn, 0);
3204                     goto jmp_insn;
3205                 }
3206             case 0x6:           /* FBN+x */
3207                 {
3208                     if (gen_trap_ifnofpu(dc)) {
3209                         goto jmp_insn;
3210                     }
3211                     target = GET_FIELD(insn, 10, 31);
3212                     target = sign_extend(target, 22);
3213                     target <<= 2;
3214                     do_fbranch(dc, target, insn, 0);
3215                     goto jmp_insn;
3216                 }
3217             case 0x4:           /* SETHI */
3218                 /* Special-case %g0 because that's the canonical nop.  */
3219                 if (rd) {
3220                     uint32_t value = GET_FIELD(insn, 10, 31);
3221                     TCGv t = gen_dest_gpr(dc, rd);
3222                     tcg_gen_movi_tl(t, value << 10);
3223                     gen_store_gpr(dc, rd, t);
3224                 }
3225                 break;
3226             case 0x0:           /* UNIMPL */
3227             default:
3228                 goto illegal_insn;
3229             }
3230             break;
3231         }
3232         break;
3233     case 1:                     /*CALL*/
3234         {
3235             target_long target = GET_FIELDs(insn, 2, 31) << 2;
3236             TCGv o7 = gen_dest_gpr(dc, 15);
3237
3238             tcg_gen_movi_tl(o7, dc->pc);
3239             gen_store_gpr(dc, 15, o7);
3240             target += dc->pc;
3241             gen_mov_pc_npc(dc);
3242 #ifdef TARGET_SPARC64
3243             if (unlikely(AM_CHECK(dc))) {
3244                 target &= 0xffffffffULL;
3245             }
3246 #endif
3247             dc->npc = target;
3248         }
3249         goto jmp_insn;
3250     case 2:                     /* FPU & Logical Operations */
3251         {
3252             unsigned int xop = GET_FIELD(insn, 7, 12);
3253             TCGv cpu_dst = get_temp_tl(dc);
3254             TCGv cpu_tmp0;
3255
3256             if (xop == 0x3a) {  /* generate trap */
3257                 int cond = GET_FIELD(insn, 3, 6);
3258                 TCGv_i32 trap;
3259                 TCGLabel *l1 = NULL;
3260                 int mask;
3261
3262                 if (cond == 0) {
3263                     /* Trap never.  */
3264                     break;
3265                 }
3266
3267                 save_state(dc);
3268
3269                 if (cond != 8) {
3270                     /* Conditional trap.  */
3271                     DisasCompare cmp;
3272 #ifdef TARGET_SPARC64
3273                     /* V9 icc/xcc */
3274                     int cc = GET_FIELD_SP(insn, 11, 12);
3275                     if (cc == 0) {
3276                         gen_compare(&cmp, 0, cond, dc);
3277                     } else if (cc == 2) {
3278                         gen_compare(&cmp, 1, cond, dc);
3279                     } else {
3280                         goto illegal_insn;
3281                     }
3282 #else
3283                     gen_compare(&cmp, 0, cond, dc);
3284 #endif
3285                     l1 = gen_new_label();
3286                     tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
3287                                       cmp.c1, cmp.c2, l1);
3288                     free_compare(&cmp);
3289                 }
3290
3291                 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
3292                         ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
3293
3294                 /* Don't use the normal temporaries, as they may well have
3295                    gone out of scope with the branch above.  While we're
3296                    doing that we might as well pre-truncate to 32-bit.  */
3297                 trap = tcg_temp_new_i32();
3298
3299                 rs1 = GET_FIELD_SP(insn, 14, 18);
3300                 if (IS_IMM) {
3301                     rs2 = GET_FIELD_SP(insn, 0, 7);
3302                     if (rs1 == 0) {
3303                         tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
3304                         /* Signal that the trap value is fully constant.  */
3305                         mask = 0;
3306                     } else {
3307                         TCGv t1 = gen_load_gpr(dc, rs1);
3308                         tcg_gen_trunc_tl_i32(trap, t1);
3309                         tcg_gen_addi_i32(trap, trap, rs2);
3310                     }
3311                 } else {
3312                     TCGv t1, t2;
3313                     rs2 = GET_FIELD_SP(insn, 0, 4);
3314                     t1 = gen_load_gpr(dc, rs1);
3315                     t2 = gen_load_gpr(dc, rs2);
3316                     tcg_gen_add_tl(t1, t1, t2);
3317                     tcg_gen_trunc_tl_i32(trap, t1);
3318                 }
3319                 if (mask != 0) {
3320                     tcg_gen_andi_i32(trap, trap, mask);
3321                     tcg_gen_addi_i32(trap, trap, TT_TRAP);
3322                 }
3323
3324                 gen_helper_raise_exception(cpu_env, trap);
3325                 tcg_temp_free_i32(trap);
3326
3327                 if (cond == 8) {
3328                     /* An unconditional trap ends the TB.  */
3329                     dc->is_br = 1;
3330                     goto jmp_insn;
3331                 } else {
3332                     /* A conditional trap falls through to the next insn.  */
3333                     gen_set_label(l1);
3334                     break;
3335                 }
3336             } else if (xop == 0x28) {
3337                 rs1 = GET_FIELD(insn, 13, 17);
3338                 switch(rs1) {
3339                 case 0: /* rdy */
3340 #ifndef TARGET_SPARC64
3341                 case 0x01 ... 0x0e: /* undefined in the SPARCv8
3342                                        manual, rdy on the microSPARC
3343                                        II */
3344                 case 0x0f:          /* stbar in the SPARCv8 manual,
3345                                        rdy on the microSPARC II */
3346                 case 0x10 ... 0x1f: /* implementation-dependent in the
3347                                        SPARCv8 manual, rdy on the
3348                                        microSPARC II */
3349                     /* Read Asr17 */
3350                     if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
3351                         TCGv t = gen_dest_gpr(dc, rd);
3352                         /* Read Asr17 for a Leon3 monoprocessor */
3353                         tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
3354                         gen_store_gpr(dc, rd, t);
3355                         break;
3356                     }
3357 #endif
3358                     gen_store_gpr(dc, rd, cpu_y);
3359                     break;
3360 #ifdef TARGET_SPARC64
3361                 case 0x2: /* V9 rdccr */
3362                     update_psr(dc);
3363                     gen_helper_rdccr(cpu_dst, cpu_env);
3364                     gen_store_gpr(dc, rd, cpu_dst);
3365                     break;
3366                 case 0x3: /* V9 rdasi */
3367                     tcg_gen_movi_tl(cpu_dst, dc->asi);
3368                     gen_store_gpr(dc, rd, cpu_dst);
3369                     break;
3370                 case 0x4: /* V9 rdtick */
3371                     {
3372                         TCGv_ptr r_tickptr;
3373                         TCGv_i32 r_const;
3374
3375                         r_tickptr = tcg_temp_new_ptr();
3376                         r_const = tcg_const_i32(dc->mem_idx);
3377                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
3378                                        offsetof(CPUSPARCState, tick));
3379                         gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
3380                                                   r_const);
3381                         tcg_temp_free_ptr(r_tickptr);
3382                         tcg_temp_free_i32(r_const);
3383                         gen_store_gpr(dc, rd, cpu_dst);
3384                     }
3385                     break;
3386                 case 0x5: /* V9 rdpc */
3387                     {
3388                         TCGv t = gen_dest_gpr(dc, rd);
3389                         if (unlikely(AM_CHECK(dc))) {
3390                             tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
3391                         } else {
3392                             tcg_gen_movi_tl(t, dc->pc);
3393                         }
3394                         gen_store_gpr(dc, rd, t);
3395                     }
3396                     break;
3397                 case 0x6: /* V9 rdfprs */
3398                     tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
3399                     gen_store_gpr(dc, rd, cpu_dst);
3400                     break;
3401                 case 0xf: /* V9 membar */
3402                     break; /* no effect */
3403                 case 0x13: /* Graphics Status */
3404                     if (gen_trap_ifnofpu(dc)) {
3405                         goto jmp_insn;
3406                     }
3407                     gen_store_gpr(dc, rd, cpu_gsr);
3408                     break;
3409                 case 0x16: /* Softint */
3410                     tcg_gen_ld32s_tl(cpu_dst, cpu_env,
3411                                      offsetof(CPUSPARCState, softint));
3412                     gen_store_gpr(dc, rd, cpu_dst);
3413                     break;
3414                 case 0x17: /* Tick compare */
3415                     gen_store_gpr(dc, rd, cpu_tick_cmpr);
3416                     break;
3417                 case 0x18: /* System tick */
3418                     {
3419                         TCGv_ptr r_tickptr;
3420                         TCGv_i32 r_const;
3421
3422                         r_tickptr = tcg_temp_new_ptr();
3423                         r_const = tcg_const_i32(dc->mem_idx);
3424                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
3425                                        offsetof(CPUSPARCState, stick));
3426                         gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
3427                                                   r_const);
3428                         tcg_temp_free_ptr(r_tickptr);
3429                         tcg_temp_free_i32(r_const);
3430                         gen_store_gpr(dc, rd, cpu_dst);
3431                     }
3432                     break;
3433                 case 0x19: /* System tick compare */
3434                     gen_store_gpr(dc, rd, cpu_stick_cmpr);
3435                     break;
3436                 case 0x1a: /* UltraSPARC-T1 Strand status */
3437                     /* XXX HYPV check maybe not enough, UA2005 & UA2007 describe
3438                      * this ASR as impl. dep
3439                      */
3440                     CHECK_IU_FEATURE(dc, HYPV);
3441                     {
3442                         TCGv t = gen_dest_gpr(dc, rd);
3443                         tcg_gen_movi_tl(t, 1UL);
3444                         gen_store_gpr(dc, rd, t);
3445                     }
3446                     break;
3447                 case 0x10: /* Performance Control */
3448                 case 0x11: /* Performance Instrumentation Counter */
3449                 case 0x12: /* Dispatch Control */
3450                 case 0x14: /* Softint set, WO */
3451                 case 0x15: /* Softint clear, WO */
3452 #endif
3453                 default:
3454                     goto illegal_insn;
3455                 }
3456 #if !defined(CONFIG_USER_ONLY)
3457             } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
3458 #ifndef TARGET_SPARC64
3459                 if (!supervisor(dc)) {
3460                     goto priv_insn;
3461                 }
3462                 update_psr(dc);
3463                 gen_helper_rdpsr(cpu_dst, cpu_env);
3464 #else
3465                 CHECK_IU_FEATURE(dc, HYPV);
3466                 if (!hypervisor(dc))
3467                     goto priv_insn;
3468                 rs1 = GET_FIELD(insn, 13, 17);
3469                 switch (rs1) {
3470                 case 0: // hpstate
3471                     tcg_gen_ld_i64(cpu_dst, cpu_env,
3472                                    offsetof(CPUSPARCState, hpstate));
3473                     break;
3474                 case 1: // htstate
3475                     // gen_op_rdhtstate();
3476                     break;
3477                 case 3: // hintp
3478                     tcg_gen_mov_tl(cpu_dst, cpu_hintp);
3479                     break;
3480                 case 5: // htba
3481                     tcg_gen_mov_tl(cpu_dst, cpu_htba);
3482                     break;
3483                 case 6: // hver
3484                     tcg_gen_mov_tl(cpu_dst, cpu_hver);
3485                     break;
3486                 case 31: // hstick_cmpr
3487                     tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
3488                     break;
3489                 default:
3490                     goto illegal_insn;
3491                 }
3492 #endif
3493                 gen_store_gpr(dc, rd, cpu_dst);
3494                 break;
3495             } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
3496                 if (!supervisor(dc)) {
3497                     goto priv_insn;
3498                 }
3499                 cpu_tmp0 = get_temp_tl(dc);
3500 #ifdef TARGET_SPARC64
3501                 rs1 = GET_FIELD(insn, 13, 17);
3502                 switch (rs1) {
3503                 case 0: // tpc
3504                     {
3505                         TCGv_ptr r_tsptr;
3506
3507                         r_tsptr = tcg_temp_new_ptr();
3508                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3509                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3510                                       offsetof(trap_state, tpc));
3511                         tcg_temp_free_ptr(r_tsptr);
3512                     }
3513                     break;
3514                 case 1: // tnpc
3515                     {
3516                         TCGv_ptr r_tsptr;
3517
3518                         r_tsptr = tcg_temp_new_ptr();
3519                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3520                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3521                                       offsetof(trap_state, tnpc));
3522                         tcg_temp_free_ptr(r_tsptr);
3523                     }
3524                     break;
3525                 case 2: // tstate
3526                     {
3527                         TCGv_ptr r_tsptr;
3528
3529                         r_tsptr = tcg_temp_new_ptr();
3530                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3531                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3532                                       offsetof(trap_state, tstate));
3533                         tcg_temp_free_ptr(r_tsptr);
3534                     }
3535                     break;
3536                 case 3: // tt
3537                     {
3538                         TCGv_ptr r_tsptr = tcg_temp_new_ptr();
3539
3540                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3541                         tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
3542                                          offsetof(trap_state, tt));
3543                         tcg_temp_free_ptr(r_tsptr);
3544                     }
3545                     break;
3546                 case 4: // tick
3547                     {
3548                         TCGv_ptr r_tickptr;
3549                         TCGv_i32 r_const;
3550
3551                         r_tickptr = tcg_temp_new_ptr();
3552                         r_const = tcg_const_i32(dc->mem_idx);
3553                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
3554                                        offsetof(CPUSPARCState, tick));
3555                         gen_helper_tick_get_count(cpu_tmp0, cpu_env,
3556                                                   r_tickptr, r_const);
3557                         tcg_temp_free_ptr(r_tickptr);
3558                         tcg_temp_free_i32(r_const);
3559                     }
3560                     break;
3561                 case 5: // tba
3562                     tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
3563                     break;
3564                 case 6: // pstate
3565                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3566                                      offsetof(CPUSPARCState, pstate));
3567                     break;
3568                 case 7: // tl
3569                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3570                                      offsetof(CPUSPARCState, tl));
3571                     break;
3572                 case 8: // pil
3573                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3574                                      offsetof(CPUSPARCState, psrpil));
3575                     break;
3576                 case 9: // cwp
3577                     gen_helper_rdcwp(cpu_tmp0, cpu_env);
3578                     break;
3579                 case 10: // cansave
3580                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3581                                      offsetof(CPUSPARCState, cansave));
3582                     break;
3583                 case 11: // canrestore
3584                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3585                                      offsetof(CPUSPARCState, canrestore));
3586                     break;
3587                 case 12: // cleanwin
3588                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3589                                      offsetof(CPUSPARCState, cleanwin));
3590                     break;
3591                 case 13: // otherwin
3592                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3593                                      offsetof(CPUSPARCState, otherwin));
3594                     break;
3595                 case 14: // wstate
3596                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3597                                      offsetof(CPUSPARCState, wstate));
3598                     break;
3599                 case 16: // UA2005 gl
3600                     CHECK_IU_FEATURE(dc, GL);
3601                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3602                                      offsetof(CPUSPARCState, gl));
3603                     break;
3604                 case 26: // UA2005 strand status
3605                     CHECK_IU_FEATURE(dc, HYPV);
3606                     if (!hypervisor(dc))
3607                         goto priv_insn;
3608                     tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
3609                     break;
3610                 case 31: // ver
3611                     tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
3612                     break;
3613                 case 15: // fq
3614                 default:
3615                     goto illegal_insn;
3616                 }
3617 #else
3618                 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
3619 #endif
3620                 gen_store_gpr(dc, rd, cpu_tmp0);
3621                 break;
3622             } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
3623 #ifdef TARGET_SPARC64
3624                 gen_helper_flushw(cpu_env);
3625 #else
3626                 if (!supervisor(dc))
3627                     goto priv_insn;
3628                 gen_store_gpr(dc, rd, cpu_tbr);
3629 #endif
3630                 break;
3631 #endif
3632             } else if (xop == 0x34) {   /* FPU Operations */
3633                 if (gen_trap_ifnofpu(dc)) {
3634                     goto jmp_insn;
3635                 }
3636                 gen_op_clear_ieee_excp_and_FTT();
3637                 rs1 = GET_FIELD(insn, 13, 17);
3638                 rs2 = GET_FIELD(insn, 27, 31);
3639                 xop = GET_FIELD(insn, 18, 26);
3640
3641                 switch (xop) {
3642                 case 0x1: /* fmovs */
3643                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
3644                     gen_store_fpr_F(dc, rd, cpu_src1_32);
3645                     break;
3646                 case 0x5: /* fnegs */
3647                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
3648                     break;
3649                 case 0x9: /* fabss */
3650                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
3651                     break;
3652                 case 0x29: /* fsqrts */
3653                     CHECK_FPU_FEATURE(dc, FSQRT);
3654                     gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
3655                     break;
3656                 case 0x2a: /* fsqrtd */
3657                     CHECK_FPU_FEATURE(dc, FSQRT);
3658                     gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
3659                     break;
3660                 case 0x2b: /* fsqrtq */
3661                     CHECK_FPU_FEATURE(dc, FLOAT128);
3662                     gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
3663                     break;
3664                 case 0x41: /* fadds */
3665                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
3666                     break;
3667                 case 0x42: /* faddd */
3668                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
3669                     break;
3670                 case 0x43: /* faddq */
3671                     CHECK_FPU_FEATURE(dc, FLOAT128);
3672                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
3673                     break;
3674                 case 0x45: /* fsubs */
3675                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3676                     break;
3677                 case 0x46: /* fsubd */
3678                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3679                     break;
3680                 case 0x47: /* fsubq */
3681                     CHECK_FPU_FEATURE(dc, FLOAT128);
3682                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3683                     break;
3684                 case 0x49: /* fmuls */
3685                     CHECK_FPU_FEATURE(dc, FMUL);
3686                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3687                     break;
3688                 case 0x4a: /* fmuld */
3689                     CHECK_FPU_FEATURE(dc, FMUL);
3690                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3691                     break;
3692                 case 0x4b: /* fmulq */
3693                     CHECK_FPU_FEATURE(dc, FLOAT128);
3694                     CHECK_FPU_FEATURE(dc, FMUL);
3695                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3696                     break;
3697                 case 0x4d: /* fdivs */
3698                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3699                     break;
3700                 case 0x4e: /* fdivd */
3701                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3702                     break;
3703                 case 0x4f: /* fdivq */
3704                     CHECK_FPU_FEATURE(dc, FLOAT128);
3705                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3706                     break;
3707                 case 0x69: /* fsmuld */
3708                     CHECK_FPU_FEATURE(dc, FSMULD);
3709                     gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3710                     break;
3711                 case 0x6e: /* fdmulq */
3712                     CHECK_FPU_FEATURE(dc, FLOAT128);
3713                     gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3714                     break;
3715                 case 0xc4: /* fitos */
3716                     gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3717                     break;
3718                 case 0xc6: /* fdtos */
3719                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3720                     break;
3721                 case 0xc7: /* fqtos */
3722                     CHECK_FPU_FEATURE(dc, FLOAT128);
3723                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3724                     break;
3725                 case 0xc8: /* fitod */
3726                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3727                     break;
3728                 case 0xc9: /* fstod */
3729                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3730                     break;
3731                 case 0xcb: /* fqtod */
3732                     CHECK_FPU_FEATURE(dc, FLOAT128);
3733                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3734                     break;
3735                 case 0xcc: /* fitoq */
3736                     CHECK_FPU_FEATURE(dc, FLOAT128);
3737                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3738                     break;
3739                 case 0xcd: /* fstoq */
3740                     CHECK_FPU_FEATURE(dc, FLOAT128);
3741                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3742                     break;
3743                 case 0xce: /* fdtoq */
3744                     CHECK_FPU_FEATURE(dc, FLOAT128);
3745                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3746                     break;
3747                 case 0xd1: /* fstoi */
3748                     gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3749                     break;
3750                 case 0xd2: /* fdtoi */
3751                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3752                     break;
3753                 case 0xd3: /* fqtoi */
3754                     CHECK_FPU_FEATURE(dc, FLOAT128);
3755                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3756                     break;
3757 #ifdef TARGET_SPARC64
3758                 case 0x2: /* V9 fmovd */
3759                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3760                     gen_store_fpr_D(dc, rd, cpu_src1_64);
3761                     break;
3762                 case 0x3: /* V9 fmovq */
3763                     CHECK_FPU_FEATURE(dc, FLOAT128);
3764                     gen_move_Q(dc, rd, rs2);
3765                     break;
3766                 case 0x6: /* V9 fnegd */
3767                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3768                     break;
3769                 case 0x7: /* V9 fnegq */
3770                     CHECK_FPU_FEATURE(dc, FLOAT128);
3771                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3772                     break;
3773                 case 0xa: /* V9 fabsd */
3774                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3775                     break;
3776                 case 0xb: /* V9 fabsq */
3777                     CHECK_FPU_FEATURE(dc, FLOAT128);
3778                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3779                     break;
3780                 case 0x81: /* V9 fstox */
3781                     gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3782                     break;
3783                 case 0x82: /* V9 fdtox */
3784                     gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3785                     break;
3786                 case 0x83: /* V9 fqtox */
3787                     CHECK_FPU_FEATURE(dc, FLOAT128);
3788                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3789                     break;
3790                 case 0x84: /* V9 fxtos */
3791                     gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3792                     break;
3793                 case 0x88: /* V9 fxtod */
3794                     gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3795                     break;
3796                 case 0x8c: /* V9 fxtoq */
3797                     CHECK_FPU_FEATURE(dc, FLOAT128);
3798                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3799                     break;
3800 #endif
3801                 default:
3802                     goto illegal_insn;
3803                 }
3804             } else if (xop == 0x35) {   /* FPU Operations */
3805 #ifdef TARGET_SPARC64
3806                 int cond;
3807 #endif
3808                 if (gen_trap_ifnofpu(dc)) {
3809                     goto jmp_insn;
3810                 }
3811                 gen_op_clear_ieee_excp_and_FTT();
3812                 rs1 = GET_FIELD(insn, 13, 17);
3813                 rs2 = GET_FIELD(insn, 27, 31);
3814                 xop = GET_FIELD(insn, 18, 26);
3815
3816 #ifdef TARGET_SPARC64
3817 #define FMOVR(sz)                                                  \
3818                 do {                                               \
3819                     DisasCompare cmp;                              \
3820                     cond = GET_FIELD_SP(insn, 10, 12);             \
3821                     cpu_src1 = get_src1(dc, insn);                 \
3822                     gen_compare_reg(&cmp, cond, cpu_src1);         \
3823                     gen_fmov##sz(dc, &cmp, rd, rs2);               \
3824                     free_compare(&cmp);                            \
3825                 } while (0)
3826
3827                 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3828                     FMOVR(s);
3829                     break;
3830                 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3831                     FMOVR(d);
3832                     break;
3833                 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3834                     CHECK_FPU_FEATURE(dc, FLOAT128);
3835                     FMOVR(q);
3836                     break;
3837                 }
3838 #undef FMOVR
3839 #endif
3840                 switch (xop) {
3841 #ifdef TARGET_SPARC64
3842 #define FMOVCC(fcc, sz)                                                 \
3843                     do {                                                \
3844                         DisasCompare cmp;                               \
3845                         cond = GET_FIELD_SP(insn, 14, 17);              \
3846                         gen_fcompare(&cmp, fcc, cond);                  \
3847                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3848                         free_compare(&cmp);                             \
3849                     } while (0)
3850
3851                     case 0x001: /* V9 fmovscc %fcc0 */
3852                         FMOVCC(0, s);
3853                         break;
3854                     case 0x002: /* V9 fmovdcc %fcc0 */
3855                         FMOVCC(0, d);
3856                         break;
3857                     case 0x003: /* V9 fmovqcc %fcc0 */
3858                         CHECK_FPU_FEATURE(dc, FLOAT128);
3859                         FMOVCC(0, q);
3860                         break;
3861                     case 0x041: /* V9 fmovscc %fcc1 */
3862                         FMOVCC(1, s);
3863                         break;
3864                     case 0x042: /* V9 fmovdcc %fcc1 */
3865                         FMOVCC(1, d);
3866                         break;
3867                     case 0x043: /* V9 fmovqcc %fcc1 */
3868                         CHECK_FPU_FEATURE(dc, FLOAT128);
3869                         FMOVCC(1, q);
3870                         break;
3871                     case 0x081: /* V9 fmovscc %fcc2 */
3872                         FMOVCC(2, s);
3873                         break;
3874                     case 0x082: /* V9 fmovdcc %fcc2 */
3875                         FMOVCC(2, d);
3876                         break;
3877                     case 0x083: /* V9 fmovqcc %fcc2 */
3878                         CHECK_FPU_FEATURE(dc, FLOAT128);
3879                         FMOVCC(2, q);
3880                         break;
3881                     case 0x0c1: /* V9 fmovscc %fcc3 */
3882                         FMOVCC(3, s);
3883                         break;
3884                     case 0x0c2: /* V9 fmovdcc %fcc3 */
3885                         FMOVCC(3, d);
3886                         break;
3887                     case 0x0c3: /* V9 fmovqcc %fcc3 */
3888                         CHECK_FPU_FEATURE(dc, FLOAT128);
3889                         FMOVCC(3, q);
3890                         break;
3891 #undef FMOVCC
3892 #define FMOVCC(xcc, sz)                                                 \
3893                     do {                                                \
3894                         DisasCompare cmp;                               \
3895                         cond = GET_FIELD_SP(insn, 14, 17);              \
3896                         gen_compare(&cmp, xcc, cond, dc);               \
3897                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3898                         free_compare(&cmp);                             \
3899                     } while (0)
3900
3901                     case 0x101: /* V9 fmovscc %icc */
3902                         FMOVCC(0, s);
3903                         break;
3904                     case 0x102: /* V9 fmovdcc %icc */
3905                         FMOVCC(0, d);
3906                         break;
3907                     case 0x103: /* V9 fmovqcc %icc */
3908                         CHECK_FPU_FEATURE(dc, FLOAT128);
3909                         FMOVCC(0, q);
3910                         break;
3911                     case 0x181: /* V9 fmovscc %xcc */
3912                         FMOVCC(1, s);
3913                         break;
3914                     case 0x182: /* V9 fmovdcc %xcc */
3915                         FMOVCC(1, d);
3916                         break;
3917                     case 0x183: /* V9 fmovqcc %xcc */
3918                         CHECK_FPU_FEATURE(dc, FLOAT128);
3919                         FMOVCC(1, q);
3920                         break;
3921 #undef FMOVCC
3922 #endif
3923                     case 0x51: /* fcmps, V9 %fcc */
3924                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3925                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3926                         gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3927                         break;
3928                     case 0x52: /* fcmpd, V9 %fcc */
3929                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3930                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3931                         gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3932                         break;
3933                     case 0x53: /* fcmpq, V9 %fcc */
3934                         CHECK_FPU_FEATURE(dc, FLOAT128);
3935                         gen_op_load_fpr_QT0(QFPREG(rs1));
3936                         gen_op_load_fpr_QT1(QFPREG(rs2));
3937                         gen_op_fcmpq(rd & 3);
3938                         break;
3939                     case 0x55: /* fcmpes, V9 %fcc */
3940                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3941                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3942                         gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3943                         break;
3944                     case 0x56: /* fcmped, V9 %fcc */
3945                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3946                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3947                         gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3948                         break;
3949                     case 0x57: /* fcmpeq, V9 %fcc */
3950                         CHECK_FPU_FEATURE(dc, FLOAT128);
3951                         gen_op_load_fpr_QT0(QFPREG(rs1));
3952                         gen_op_load_fpr_QT1(QFPREG(rs2));
3953                         gen_op_fcmpeq(rd & 3);
3954                         break;
3955                     default:
3956                         goto illegal_insn;
3957                 }
3958             } else if (xop == 0x2) {
3959                 TCGv dst = gen_dest_gpr(dc, rd);
3960                 rs1 = GET_FIELD(insn, 13, 17);
3961                 if (rs1 == 0) {
3962                     /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3963                     if (IS_IMM) {       /* immediate */
3964                         simm = GET_FIELDs(insn, 19, 31);
3965                         tcg_gen_movi_tl(dst, simm);
3966                         gen_store_gpr(dc, rd, dst);
3967                     } else {            /* register */
3968                         rs2 = GET_FIELD(insn, 27, 31);
3969                         if (rs2 == 0) {
3970                             tcg_gen_movi_tl(dst, 0);
3971                             gen_store_gpr(dc, rd, dst);
3972                         } else {
3973                             cpu_src2 = gen_load_gpr(dc, rs2);
3974                             gen_store_gpr(dc, rd, cpu_src2);
3975                         }
3976                     }
3977                 } else {
3978                     cpu_src1 = get_src1(dc, insn);
3979                     if (IS_IMM) {       /* immediate */
3980                         simm = GET_FIELDs(insn, 19, 31);
3981                         tcg_gen_ori_tl(dst, cpu_src1, simm);
3982                         gen_store_gpr(dc, rd, dst);
3983                     } else {            /* register */
3984                         rs2 = GET_FIELD(insn, 27, 31);
3985                         if (rs2 == 0) {
3986                             /* mov shortcut:  or x, %g0, y -> mov x, y */
3987                             gen_store_gpr(dc, rd, cpu_src1);
3988                         } else {
3989                             cpu_src2 = gen_load_gpr(dc, rs2);
3990                             tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
3991                             gen_store_gpr(dc, rd, dst);
3992                         }
3993                     }
3994                 }
3995 #ifdef TARGET_SPARC64
3996             } else if (xop == 0x25) { /* sll, V9 sllx */
3997                 cpu_src1 = get_src1(dc, insn);
3998                 if (IS_IMM) {   /* immediate */
3999                     simm = GET_FIELDs(insn, 20, 31);
4000                     if (insn & (1 << 12)) {
4001                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
4002                     } else {
4003                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
4004                     }
4005                 } else {                /* register */
4006                     rs2 = GET_FIELD(insn, 27, 31);
4007                     cpu_src2 = gen_load_gpr(dc, rs2);
4008                     cpu_tmp0 = get_temp_tl(dc);
4009                     if (insn & (1 << 12)) {
4010                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
4011                     } else {
4012                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
4013                     }
4014                     tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
4015                 }
4016                 gen_store_gpr(dc, rd, cpu_dst);
4017             } else if (xop == 0x26) { /* srl, V9 srlx */
4018                 cpu_src1 = get_src1(dc, insn);
4019                 if (IS_IMM) {   /* immediate */
4020                     simm = GET_FIELDs(insn, 20, 31);
4021                     if (insn & (1 << 12)) {
4022                         tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
4023                     } else {
4024                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
4025                         tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
4026                     }
4027                 } else {                /* register */
4028                     rs2 = GET_FIELD(insn, 27, 31);
4029                     cpu_src2 = gen_load_gpr(dc, rs2);
4030                     cpu_tmp0 = get_temp_tl(dc);
4031                     if (insn & (1 << 12)) {
4032                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
4033                         tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
4034                     } else {
4035                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
4036                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
4037                         tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
4038                     }
4039                 }
4040                 gen_store_gpr(dc, rd, cpu_dst);
4041             } else if (xop == 0x27) { /* sra, V9 srax */
4042                 cpu_src1 = get_src1(dc, insn);
4043                 if (IS_IMM) {   /* immediate */
4044                     simm = GET_FIELDs(insn, 20, 31);
4045                     if (insn & (1 << 12)) {
4046                         tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
4047                     } else {
4048                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
4049                         tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
4050                     }
4051                 } else {                /* register */
4052                     rs2 = GET_FIELD(insn, 27, 31);
4053                     cpu_src2 = gen_load_gpr(dc, rs2);
4054                     cpu_tmp0 = get_temp_tl(dc);
4055                     if (insn & (1 << 12)) {
4056                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
4057                         tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
4058                     } else {
4059                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
4060                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
4061                         tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
4062                     }
4063                 }
4064                 gen_store_gpr(dc, rd, cpu_dst);
4065 #endif
4066             } else if (xop < 0x36) {
4067                 if (xop < 0x20) {
4068                     cpu_src1 = get_src1(dc, insn);
4069                     cpu_src2 = get_src2(dc, insn);
4070                     switch (xop & ~0x10) {
4071                     case 0x0: /* add */
4072                         if (xop & 0x10) {
4073                             gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
4074                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
4075                             dc->cc_op = CC_OP_ADD;
4076                         } else {
4077                             tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4078                         }
4079                         break;
4080                     case 0x1: /* and */
4081                         tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
4082                         if (xop & 0x10) {
4083                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4084                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4085                             dc->cc_op = CC_OP_LOGIC;
4086                         }
4087                         break;
4088                     case 0x2: /* or */
4089                         tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
4090                         if (xop & 0x10) {
4091                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4092                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4093                             dc->cc_op = CC_OP_LOGIC;
4094                         }
4095                         break;
4096                     case 0x3: /* xor */
4097                         tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
4098                         if (xop & 0x10) {
4099                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4100                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4101                             dc->cc_op = CC_OP_LOGIC;
4102                         }
4103                         break;
4104                     case 0x4: /* sub */
4105                         if (xop & 0x10) {
4106                             gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4107                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
4108                             dc->cc_op = CC_OP_SUB;
4109                         } else {
4110                             tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
4111                         }
4112                         break;
4113                     case 0x5: /* andn */
4114                         tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
4115                         if (xop & 0x10) {
4116                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4117                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4118                             dc->cc_op = CC_OP_LOGIC;
4119                         }
4120                         break;
4121                     case 0x6: /* orn */
4122                         tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
4123                         if (xop & 0x10) {
4124                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4125                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4126                             dc->cc_op = CC_OP_LOGIC;
4127                         }
4128                         break;
4129                     case 0x7: /* xorn */
4130                         tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
4131                         if (xop & 0x10) {
4132                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4133                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4134                             dc->cc_op = CC_OP_LOGIC;
4135                         }
4136                         break;
4137                     case 0x8: /* addx, V9 addc */
4138                         gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4139                                         (xop & 0x10));
4140                         break;
4141 #ifdef TARGET_SPARC64
4142                     case 0x9: /* V9 mulx */
4143                         tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
4144                         break;
4145 #endif
4146                     case 0xa: /* umul */
4147                         CHECK_IU_FEATURE(dc, MUL);
4148                         gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
4149                         if (xop & 0x10) {
4150                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4151                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4152                             dc->cc_op = CC_OP_LOGIC;
4153                         }
4154                         break;
4155                     case 0xb: /* smul */
4156                         CHECK_IU_FEATURE(dc, MUL);
4157                         gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
4158                         if (xop & 0x10) {
4159                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4160                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4161                             dc->cc_op = CC_OP_LOGIC;
4162                         }
4163                         break;
4164                     case 0xc: /* subx, V9 subc */
4165                         gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4166                                         (xop & 0x10));
4167                         break;
4168 #ifdef TARGET_SPARC64
4169                     case 0xd: /* V9 udivx */
4170                         gen_helper_udivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4171                         break;
4172 #endif
4173                     case 0xe: /* udiv */
4174                         CHECK_IU_FEATURE(dc, DIV);
4175                         if (xop & 0x10) {
4176                             gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
4177                                                cpu_src2);
4178                             dc->cc_op = CC_OP_DIV;
4179                         } else {
4180                             gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
4181                                             cpu_src2);
4182                         }
4183                         break;
4184                     case 0xf: /* sdiv */
4185                         CHECK_IU_FEATURE(dc, DIV);
4186                         if (xop & 0x10) {
4187                             gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
4188                                                cpu_src2);
4189                             dc->cc_op = CC_OP_DIV;
4190                         } else {
4191                             gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
4192                                             cpu_src2);
4193                         }
4194                         break;
4195                     default:
4196                         goto illegal_insn;
4197                     }
4198                     gen_store_gpr(dc, rd, cpu_dst);
4199                 } else {
4200                     cpu_src1 = get_src1(dc, insn);
4201                     cpu_src2 = get_src2(dc, insn);
4202                     switch (xop) {
4203                     case 0x20: /* taddcc */
4204                         gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
4205                         gen_store_gpr(dc, rd, cpu_dst);
4206                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
4207                         dc->cc_op = CC_OP_TADD;
4208                         break;
4209                     case 0x21: /* tsubcc */
4210                         gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4211                         gen_store_gpr(dc, rd, cpu_dst);
4212                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
4213                         dc->cc_op = CC_OP_TSUB;
4214                         break;
4215                     case 0x22: /* taddcctv */
4216                         gen_helper_taddcctv(cpu_dst, cpu_env,
4217                                             cpu_src1, cpu_src2);
4218                         gen_store_gpr(dc, rd, cpu_dst);
4219                         dc->cc_op = CC_OP_TADDTV;
4220                         break;
4221                     case 0x23: /* tsubcctv */
4222                         gen_helper_tsubcctv(cpu_dst, cpu_env,
4223                                             cpu_src1, cpu_src2);
4224                         gen_store_gpr(dc, rd, cpu_dst);
4225                         dc->cc_op = CC_OP_TSUBTV;
4226                         break;
4227                     case 0x24: /* mulscc */
4228                         update_psr(dc);
4229                         gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
4230                         gen_store_gpr(dc, rd, cpu_dst);
4231                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
4232                         dc->cc_op = CC_OP_ADD;
4233                         break;
4234 #ifndef TARGET_SPARC64
4235                     case 0x25:  /* sll */
4236                         if (IS_IMM) { /* immediate */
4237                             simm = GET_FIELDs(insn, 20, 31);
4238                             tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
4239                         } else { /* register */
4240                             cpu_tmp0 = get_temp_tl(dc);
4241                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4242                             tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
4243                         }
4244                         gen_store_gpr(dc, rd, cpu_dst);
4245                         break;
4246                     case 0x26:  /* srl */
4247                         if (IS_IMM) { /* immediate */
4248                             simm = GET_FIELDs(insn, 20, 31);
4249                             tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
4250                         } else { /* register */
4251                             cpu_tmp0 = get_temp_tl(dc);
4252                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4253                             tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
4254                         }
4255                         gen_store_gpr(dc, rd, cpu_dst);
4256                         break;
4257                     case 0x27:  /* sra */
4258                         if (IS_IMM) { /* immediate */
4259                             simm = GET_FIELDs(insn, 20, 31);
4260                             tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
4261                         } else { /* register */
4262                             cpu_tmp0 = get_temp_tl(dc);
4263                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4264                             tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
4265                         }
4266                         gen_store_gpr(dc, rd, cpu_dst);
4267                         break;
4268 #endif
4269                     case 0x30:
4270                         {
4271                             cpu_tmp0 = get_temp_tl(dc);
4272                             switch(rd) {
4273                             case 0: /* wry */
4274                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4275                                 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
4276                                 break;
4277 #ifndef TARGET_SPARC64
4278                             case 0x01 ... 0x0f: /* undefined in the
4279                                                    SPARCv8 manual, nop
4280                                                    on the microSPARC
4281                                                    II */
4282                             case 0x10 ... 0x1f: /* implementation-dependent
4283                                                    in the SPARCv8
4284                                                    manual, nop on the
4285                                                    microSPARC II */
4286                                 if ((rd == 0x13) && (dc->def->features &
4287                                                      CPU_FEATURE_POWERDOWN)) {
4288                                     /* LEON3 power-down */
4289                                     save_state(dc);
4290                                     gen_helper_power_down(cpu_env);
4291                                 }
4292                                 break;
4293 #else
4294                             case 0x2: /* V9 wrccr */
4295                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4296                                 gen_helper_wrccr(cpu_env, cpu_tmp0);
4297                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4298                                 dc->cc_op = CC_OP_FLAGS;
4299                                 break;
4300                             case 0x3: /* V9 wrasi */
4301                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4302                                 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xff);
4303                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4304                                                 offsetof(CPUSPARCState, asi));
4305                                 /* End TB to notice changed ASI.  */
4306                                 save_state(dc);
4307                                 gen_op_next_insn();
4308                                 tcg_gen_exit_tb(0);
4309                                 dc->is_br = 1;
4310                                 break;
4311                             case 0x6: /* V9 wrfprs */
4312                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4313                                 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_tmp0);
4314                                 dc->fprs_dirty = 0;
4315                                 save_state(dc);
4316                                 gen_op_next_insn();
4317                                 tcg_gen_exit_tb(0);
4318                                 dc->is_br = 1;
4319                                 break;
4320                             case 0xf: /* V9 sir, nop if user */
4321 #if !defined(CONFIG_USER_ONLY)
4322                                 if (supervisor(dc)) {
4323                                     ; // XXX
4324                                 }
4325 #endif
4326                                 break;
4327                             case 0x13: /* Graphics Status */
4328                                 if (gen_trap_ifnofpu(dc)) {
4329                                     goto jmp_insn;
4330                                 }
4331                                 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
4332                                 break;
4333                             case 0x14: /* Softint set */
4334                                 if (!supervisor(dc))
4335                                     goto illegal_insn;
4336                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4337                                 gen_helper_set_softint(cpu_env, cpu_tmp0);
4338                                 break;
4339                             case 0x15: /* Softint clear */
4340                                 if (!supervisor(dc))
4341                                     goto illegal_insn;
4342                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4343                                 gen_helper_clear_softint(cpu_env, cpu_tmp0);
4344                                 break;
4345                             case 0x16: /* Softint write */
4346                                 if (!supervisor(dc))
4347                                     goto illegal_insn;
4348                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4349                                 gen_helper_write_softint(cpu_env, cpu_tmp0);
4350                                 break;
4351                             case 0x17: /* Tick compare */
4352 #if !defined(CONFIG_USER_ONLY)
4353                                 if (!supervisor(dc))
4354                                     goto illegal_insn;
4355 #endif
4356                                 {
4357                                     TCGv_ptr r_tickptr;
4358
4359                                     tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
4360                                                    cpu_src2);
4361                                     r_tickptr = tcg_temp_new_ptr();
4362                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4363                                                    offsetof(CPUSPARCState, tick));
4364                                     gen_helper_tick_set_limit(r_tickptr,
4365                                                               cpu_tick_cmpr);
4366                                     tcg_temp_free_ptr(r_tickptr);
4367                                 }
4368                                 break;
4369                             case 0x18: /* System tick */
4370 #if !defined(CONFIG_USER_ONLY)
4371                                 if (!supervisor(dc))
4372                                     goto illegal_insn;
4373 #endif
4374                                 {
4375                                     TCGv_ptr r_tickptr;
4376
4377                                     tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
4378                                                    cpu_src2);
4379                                     r_tickptr = tcg_temp_new_ptr();
4380                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4381                                                    offsetof(CPUSPARCState, stick));
4382                                     gen_helper_tick_set_count(r_tickptr,
4383                                                               cpu_tmp0);
4384                                     tcg_temp_free_ptr(r_tickptr);
4385                                 }
4386                                 break;
4387                             case 0x19: /* System tick compare */
4388 #if !defined(CONFIG_USER_ONLY)
4389                                 if (!supervisor(dc))
4390                                     goto illegal_insn;
4391 #endif
4392                                 {
4393                                     TCGv_ptr r_tickptr;
4394
4395                                     tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
4396                                                    cpu_src2);
4397                                     r_tickptr = tcg_temp_new_ptr();
4398                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4399                                                    offsetof(CPUSPARCState, stick));
4400                                     gen_helper_tick_set_limit(r_tickptr,
4401                                                               cpu_stick_cmpr);
4402                                     tcg_temp_free_ptr(r_tickptr);
4403                                 }
4404                                 break;
4405
4406                             case 0x10: /* Performance Control */
4407                             case 0x11: /* Performance Instrumentation
4408                                           Counter */
4409                             case 0x12: /* Dispatch Control */
4410 #endif
4411                             default:
4412                                 goto illegal_insn;
4413                             }
4414                         }
4415                         break;
4416 #if !defined(CONFIG_USER_ONLY)
4417                     case 0x31: /* wrpsr, V9 saved, restored */
4418                         {
4419                             if (!supervisor(dc))
4420                                 goto priv_insn;
4421 #ifdef TARGET_SPARC64
4422                             switch (rd) {
4423                             case 0:
4424                                 gen_helper_saved(cpu_env);
4425                                 break;
4426                             case 1:
4427                                 gen_helper_restored(cpu_env);
4428                                 break;
4429                             case 2: /* UA2005 allclean */
4430                             case 3: /* UA2005 otherw */
4431                             case 4: /* UA2005 normalw */
4432                             case 5: /* UA2005 invalw */
4433                                 // XXX
4434                             default:
4435                                 goto illegal_insn;
4436                             }
4437 #else
4438                             cpu_tmp0 = get_temp_tl(dc);
4439                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4440                             gen_helper_wrpsr(cpu_env, cpu_tmp0);
4441                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4442                             dc->cc_op = CC_OP_FLAGS;
4443                             save_state(dc);
4444                             gen_op_next_insn();
4445                             tcg_gen_exit_tb(0);
4446                             dc->is_br = 1;
4447 #endif
4448                         }
4449                         break;
4450                     case 0x32: /* wrwim, V9 wrpr */
4451                         {
4452                             if (!supervisor(dc))
4453                                 goto priv_insn;
4454                             cpu_tmp0 = get_temp_tl(dc);
4455                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4456 #ifdef TARGET_SPARC64
4457                             switch (rd) {
4458                             case 0: // tpc
4459                                 {
4460                                     TCGv_ptr r_tsptr;
4461
4462                                     r_tsptr = tcg_temp_new_ptr();
4463                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4464                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4465                                                   offsetof(trap_state, tpc));
4466                                     tcg_temp_free_ptr(r_tsptr);
4467                                 }
4468                                 break;
4469                             case 1: // tnpc
4470                                 {
4471                                     TCGv_ptr r_tsptr;
4472
4473                                     r_tsptr = tcg_temp_new_ptr();
4474                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4475                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4476                                                   offsetof(trap_state, tnpc));
4477                                     tcg_temp_free_ptr(r_tsptr);
4478                                 }
4479                                 break;
4480                             case 2: // tstate
4481                                 {
4482                                     TCGv_ptr r_tsptr;
4483
4484                                     r_tsptr = tcg_temp_new_ptr();
4485                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4486                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4487                                                   offsetof(trap_state,
4488                                                            tstate));
4489                                     tcg_temp_free_ptr(r_tsptr);
4490                                 }
4491                                 break;
4492                             case 3: // tt
4493                                 {
4494                                     TCGv_ptr r_tsptr;
4495
4496                                     r_tsptr = tcg_temp_new_ptr();
4497                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4498                                     tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
4499                                                     offsetof(trap_state, tt));
4500                                     tcg_temp_free_ptr(r_tsptr);
4501                                 }
4502                                 break;
4503                             case 4: // tick
4504                                 {
4505                                     TCGv_ptr r_tickptr;
4506
4507                                     r_tickptr = tcg_temp_new_ptr();
4508                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4509                                                    offsetof(CPUSPARCState, tick));
4510                                     gen_helper_tick_set_count(r_tickptr,
4511                                                               cpu_tmp0);
4512                                     tcg_temp_free_ptr(r_tickptr);
4513                                 }
4514                                 break;
4515                             case 5: // tba
4516                                 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
4517                                 break;
4518                             case 6: // pstate
4519                                 save_state(dc);
4520                                 gen_helper_wrpstate(cpu_env, cpu_tmp0);
4521                                 dc->npc = DYNAMIC_PC;
4522                                 break;
4523                             case 7: // tl
4524                                 save_state(dc);
4525                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4526                                                offsetof(CPUSPARCState, tl));
4527                                 dc->npc = DYNAMIC_PC;
4528                                 break;
4529                             case 8: // pil
4530                                 gen_helper_wrpil(cpu_env, cpu_tmp0);
4531                                 break;
4532                             case 9: // cwp
4533                                 gen_helper_wrcwp(cpu_env, cpu_tmp0);
4534                                 break;
4535                             case 10: // cansave
4536                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4537                                                 offsetof(CPUSPARCState,
4538                                                          cansave));
4539                                 break;
4540                             case 11: // canrestore
4541                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4542                                                 offsetof(CPUSPARCState,
4543                                                          canrestore));
4544                                 break;
4545                             case 12: // cleanwin
4546                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4547                                                 offsetof(CPUSPARCState,
4548                                                          cleanwin));
4549                                 break;
4550                             case 13: // otherwin
4551                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4552                                                 offsetof(CPUSPARCState,
4553                                                          otherwin));
4554                                 break;
4555                             case 14: // wstate
4556                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4557                                                 offsetof(CPUSPARCState,
4558                                                          wstate));
4559                                 break;
4560                             case 16: // UA2005 gl
4561                                 CHECK_IU_FEATURE(dc, GL);
4562                                 gen_helper_wrgl(cpu_env, cpu_tmp0);
4563                                 break;
4564                             case 26: // UA2005 strand status
4565                                 CHECK_IU_FEATURE(dc, HYPV);
4566                                 if (!hypervisor(dc))
4567                                     goto priv_insn;
4568                                 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
4569                                 break;
4570                             default:
4571                                 goto illegal_insn;
4572                             }
4573 #else
4574                             tcg_gen_trunc_tl_i32(cpu_wim, cpu_tmp0);
4575                             if (dc->def->nwindows != 32) {
4576                                 tcg_gen_andi_tl(cpu_wim, cpu_wim,
4577                                                 (1 << dc->def->nwindows) - 1);
4578                             }
4579 #endif
4580                         }
4581                         break;
4582                     case 0x33: /* wrtbr, UA2005 wrhpr */
4583                         {
4584 #ifndef TARGET_SPARC64
4585                             if (!supervisor(dc))
4586                                 goto priv_insn;
4587                             tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
4588 #else
4589                             CHECK_IU_FEATURE(dc, HYPV);
4590                             if (!hypervisor(dc))
4591                                 goto priv_insn;
4592                             cpu_tmp0 = get_temp_tl(dc);
4593                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4594                             switch (rd) {
4595                             case 0: // hpstate
4596                                 tcg_gen_st_i64(cpu_tmp0, cpu_env,
4597                                                offsetof(CPUSPARCState,
4598                                                         hpstate));
4599                                 save_state(dc);
4600                                 gen_op_next_insn();
4601                                 tcg_gen_exit_tb(0);
4602                                 dc->is_br = 1;
4603                                 break;
4604                             case 1: // htstate
4605                                 // XXX gen_op_wrhtstate();
4606                                 break;
4607                             case 3: // hintp
4608                                 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
4609                                 break;
4610                             case 5: // htba
4611                                 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
4612                                 break;
4613                             case 31: // hstick_cmpr
4614                                 {
4615                                     TCGv_ptr r_tickptr;
4616
4617                                     tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
4618                                     r_tickptr = tcg_temp_new_ptr();
4619                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4620                                                    offsetof(CPUSPARCState, hstick));
4621                                     gen_helper_tick_set_limit(r_tickptr,
4622                                                               cpu_hstick_cmpr);
4623                                     tcg_temp_free_ptr(r_tickptr);
4624                                 }
4625                                 break;
4626                             case 6: // hver readonly
4627                             default:
4628                                 goto illegal_insn;
4629                             }
4630 #endif
4631                         }
4632                         break;
4633 #endif
4634 #ifdef TARGET_SPARC64
4635                     case 0x2c: /* V9 movcc */
4636                         {
4637                             int cc = GET_FIELD_SP(insn, 11, 12);
4638                             int cond = GET_FIELD_SP(insn, 14, 17);
4639                             DisasCompare cmp;
4640                             TCGv dst;
4641
4642                             if (insn & (1 << 18)) {
4643                                 if (cc == 0) {
4644                                     gen_compare(&cmp, 0, cond, dc);
4645                                 } else if (cc == 2) {
4646                                     gen_compare(&cmp, 1, cond, dc);
4647                                 } else {
4648                                     goto illegal_insn;
4649                                 }
4650                             } else {
4651                                 gen_fcompare(&cmp, cc, cond);
4652                             }
4653
4654                             /* The get_src2 above loaded the normal 13-bit
4655                                immediate field, not the 11-bit field we have
4656                                in movcc.  But it did handle the reg case.  */
4657                             if (IS_IMM) {
4658                                 simm = GET_FIELD_SPs(insn, 0, 10);
4659                                 tcg_gen_movi_tl(cpu_src2, simm);
4660                             }
4661
4662                             dst = gen_load_gpr(dc, rd);
4663                             tcg_gen_movcond_tl(cmp.cond, dst,
4664                                                cmp.c1, cmp.c2,
4665                                                cpu_src2, dst);
4666                             free_compare(&cmp);
4667                             gen_store_gpr(dc, rd, dst);
4668                             break;
4669                         }
4670                     case 0x2d: /* V9 sdivx */
4671                         gen_helper_sdivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4672                         gen_store_gpr(dc, rd, cpu_dst);
4673                         break;
4674                     case 0x2e: /* V9 popc */
4675                         tcg_gen_ctpop_tl(cpu_dst, cpu_src2);
4676                         gen_store_gpr(dc, rd, cpu_dst);
4677                         break;
4678                     case 0x2f: /* V9 movr */
4679                         {
4680                             int cond = GET_FIELD_SP(insn, 10, 12);
4681                             DisasCompare cmp;
4682                             TCGv dst;
4683
4684                             gen_compare_reg(&cmp, cond, cpu_src1);
4685
4686                             /* The get_src2 above loaded the normal 13-bit
4687                                immediate field, not the 10-bit field we have
4688                                in movr.  But it did handle the reg case.  */
4689                             if (IS_IMM) {
4690                                 simm = GET_FIELD_SPs(insn, 0, 9);
4691                                 tcg_gen_movi_tl(cpu_src2, simm);
4692                             }
4693
4694                             dst = gen_load_gpr(dc, rd);
4695                             tcg_gen_movcond_tl(cmp.cond, dst,
4696                                                cmp.c1, cmp.c2,
4697                                                cpu_src2, dst);
4698                             free_compare(&cmp);
4699                             gen_store_gpr(dc, rd, dst);
4700                             break;
4701                         }
4702 #endif
4703                     default:
4704                         goto illegal_insn;
4705                     }
4706                 }
4707             } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4708 #ifdef TARGET_SPARC64
4709                 int opf = GET_FIELD_SP(insn, 5, 13);
4710                 rs1 = GET_FIELD(insn, 13, 17);
4711                 rs2 = GET_FIELD(insn, 27, 31);
4712                 if (gen_trap_ifnofpu(dc)) {
4713                     goto jmp_insn;
4714                 }
4715
4716                 switch (opf) {
4717                 case 0x000: /* VIS I edge8cc */
4718                     CHECK_FPU_FEATURE(dc, VIS1);
4719                     cpu_src1 = gen_load_gpr(dc, rs1);
4720                     cpu_src2 = gen_load_gpr(dc, rs2);
4721                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4722                     gen_store_gpr(dc, rd, cpu_dst);
4723                     break;
4724                 case 0x001: /* VIS II edge8n */
4725                     CHECK_FPU_FEATURE(dc, VIS2);
4726                     cpu_src1 = gen_load_gpr(dc, rs1);
4727                     cpu_src2 = gen_load_gpr(dc, rs2);
4728                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4729                     gen_store_gpr(dc, rd, cpu_dst);
4730                     break;
4731                 case 0x002: /* VIS I edge8lcc */
4732                     CHECK_FPU_FEATURE(dc, VIS1);
4733                     cpu_src1 = gen_load_gpr(dc, rs1);
4734                     cpu_src2 = gen_load_gpr(dc, rs2);
4735                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4736                     gen_store_gpr(dc, rd, cpu_dst);
4737                     break;
4738                 case 0x003: /* VIS II edge8ln */
4739                     CHECK_FPU_FEATURE(dc, VIS2);
4740                     cpu_src1 = gen_load_gpr(dc, rs1);
4741                     cpu_src2 = gen_load_gpr(dc, rs2);
4742                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4743                     gen_store_gpr(dc, rd, cpu_dst);
4744                     break;
4745                 case 0x004: /* VIS I edge16cc */
4746                     CHECK_FPU_FEATURE(dc, VIS1);
4747                     cpu_src1 = gen_load_gpr(dc, rs1);
4748                     cpu_src2 = gen_load_gpr(dc, rs2);
4749                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4750                     gen_store_gpr(dc, rd, cpu_dst);
4751                     break;
4752                 case 0x005: /* VIS II edge16n */
4753                     CHECK_FPU_FEATURE(dc, VIS2);
4754                     cpu_src1 = gen_load_gpr(dc, rs1);
4755                     cpu_src2 = gen_load_gpr(dc, rs2);
4756                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4757                     gen_store_gpr(dc, rd, cpu_dst);
4758                     break;
4759                 case 0x006: /* VIS I edge16lcc */
4760                     CHECK_FPU_FEATURE(dc, VIS1);
4761                     cpu_src1 = gen_load_gpr(dc, rs1);
4762                     cpu_src2 = gen_load_gpr(dc, rs2);
4763                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4764                     gen_store_gpr(dc, rd, cpu_dst);
4765                     break;
4766                 case 0x007: /* VIS II edge16ln */
4767                     CHECK_FPU_FEATURE(dc, VIS2);
4768                     cpu_src1 = gen_load_gpr(dc, rs1);
4769                     cpu_src2 = gen_load_gpr(dc, rs2);
4770                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4771                     gen_store_gpr(dc, rd, cpu_dst);
4772                     break;
4773                 case 0x008: /* VIS I edge32cc */
4774                     CHECK_FPU_FEATURE(dc, VIS1);
4775                     cpu_src1 = gen_load_gpr(dc, rs1);
4776                     cpu_src2 = gen_load_gpr(dc, rs2);
4777                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4778                     gen_store_gpr(dc, rd, cpu_dst);
4779                     break;
4780                 case 0x009: /* VIS II edge32n */
4781                     CHECK_FPU_FEATURE(dc, VIS2);
4782                     cpu_src1 = gen_load_gpr(dc, rs1);
4783                     cpu_src2 = gen_load_gpr(dc, rs2);
4784                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4785                     gen_store_gpr(dc, rd, cpu_dst);
4786                     break;
4787                 case 0x00a: /* VIS I edge32lcc */
4788                     CHECK_FPU_FEATURE(dc, VIS1);
4789                     cpu_src1 = gen_load_gpr(dc, rs1);
4790                     cpu_src2 = gen_load_gpr(dc, rs2);
4791                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4792                     gen_store_gpr(dc, rd, cpu_dst);
4793                     break;
4794                 case 0x00b: /* VIS II edge32ln */
4795                     CHECK_FPU_FEATURE(dc, VIS2);
4796                     cpu_src1 = gen_load_gpr(dc, rs1);
4797                     cpu_src2 = gen_load_gpr(dc, rs2);
4798                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4799                     gen_store_gpr(dc, rd, cpu_dst);
4800                     break;
4801                 case 0x010: /* VIS I array8 */
4802                     CHECK_FPU_FEATURE(dc, VIS1);
4803                     cpu_src1 = gen_load_gpr(dc, rs1);
4804                     cpu_src2 = gen_load_gpr(dc, rs2);
4805                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4806                     gen_store_gpr(dc, rd, cpu_dst);
4807                     break;
4808                 case 0x012: /* VIS I array16 */
4809                     CHECK_FPU_FEATURE(dc, VIS1);
4810                     cpu_src1 = gen_load_gpr(dc, rs1);
4811                     cpu_src2 = gen_load_gpr(dc, rs2);
4812                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4813                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4814                     gen_store_gpr(dc, rd, cpu_dst);
4815                     break;
4816                 case 0x014: /* VIS I array32 */
4817                     CHECK_FPU_FEATURE(dc, VIS1);
4818                     cpu_src1 = gen_load_gpr(dc, rs1);
4819                     cpu_src2 = gen_load_gpr(dc, rs2);
4820                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4821                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4822                     gen_store_gpr(dc, rd, cpu_dst);
4823                     break;
4824                 case 0x018: /* VIS I alignaddr */
4825                     CHECK_FPU_FEATURE(dc, VIS1);
4826                     cpu_src1 = gen_load_gpr(dc, rs1);
4827                     cpu_src2 = gen_load_gpr(dc, rs2);
4828                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4829                     gen_store_gpr(dc, rd, cpu_dst);
4830                     break;
4831                 case 0x01a: /* VIS I alignaddrl */
4832                     CHECK_FPU_FEATURE(dc, VIS1);
4833                     cpu_src1 = gen_load_gpr(dc, rs1);
4834                     cpu_src2 = gen_load_gpr(dc, rs2);
4835                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4836                     gen_store_gpr(dc, rd, cpu_dst);
4837                     break;
4838                 case 0x019: /* VIS II bmask */
4839                     CHECK_FPU_FEATURE(dc, VIS2);
4840                     cpu_src1 = gen_load_gpr(dc, rs1);
4841                     cpu_src2 = gen_load_gpr(dc, rs2);
4842                     tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4843                     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4844                     gen_store_gpr(dc, rd, cpu_dst);
4845                     break;
4846                 case 0x020: /* VIS I fcmple16 */
4847                     CHECK_FPU_FEATURE(dc, VIS1);
4848                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4849                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4850                     gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4851                     gen_store_gpr(dc, rd, cpu_dst);
4852                     break;
4853                 case 0x022: /* VIS I fcmpne16 */
4854                     CHECK_FPU_FEATURE(dc, VIS1);
4855                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4856                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4857                     gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4858                     gen_store_gpr(dc, rd, cpu_dst);
4859                     break;
4860                 case 0x024: /* VIS I fcmple32 */
4861                     CHECK_FPU_FEATURE(dc, VIS1);
4862                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4863                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4864                     gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4865                     gen_store_gpr(dc, rd, cpu_dst);
4866                     break;
4867                 case 0x026: /* VIS I fcmpne32 */
4868                     CHECK_FPU_FEATURE(dc, VIS1);
4869                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4870                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4871                     gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4872                     gen_store_gpr(dc, rd, cpu_dst);
4873                     break;
4874                 case 0x028: /* VIS I fcmpgt16 */
4875                     CHECK_FPU_FEATURE(dc, VIS1);
4876                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4877                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4878                     gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4879                     gen_store_gpr(dc, rd, cpu_dst);
4880                     break;
4881                 case 0x02a: /* VIS I fcmpeq16 */
4882                     CHECK_FPU_FEATURE(dc, VIS1);
4883                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4884                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4885                     gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4886                     gen_store_gpr(dc, rd, cpu_dst);
4887                     break;
4888                 case 0x02c: /* VIS I fcmpgt32 */
4889                     CHECK_FPU_FEATURE(dc, VIS1);
4890                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4891                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4892                     gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4893                     gen_store_gpr(dc, rd, cpu_dst);
4894                     break;
4895                 case 0x02e: /* VIS I fcmpeq32 */
4896                     CHECK_FPU_FEATURE(dc, VIS1);
4897                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4898                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4899                     gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4900                     gen_store_gpr(dc, rd, cpu_dst);
4901                     break;
4902                 case 0x031: /* VIS I fmul8x16 */
4903                     CHECK_FPU_FEATURE(dc, VIS1);
4904                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4905                     break;
4906                 case 0x033: /* VIS I fmul8x16au */
4907                     CHECK_FPU_FEATURE(dc, VIS1);
4908                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4909                     break;
4910                 case 0x035: /* VIS I fmul8x16al */
4911                     CHECK_FPU_FEATURE(dc, VIS1);
4912                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4913                     break;
4914                 case 0x036: /* VIS I fmul8sux16 */
4915                     CHECK_FPU_FEATURE(dc, VIS1);
4916                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4917                     break;
4918                 case 0x037: /* VIS I fmul8ulx16 */
4919                     CHECK_FPU_FEATURE(dc, VIS1);
4920                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4921                     break;
4922                 case 0x038: /* VIS I fmuld8sux16 */
4923                     CHECK_FPU_FEATURE(dc, VIS1);
4924                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4925                     break;
4926                 case 0x039: /* VIS I fmuld8ulx16 */
4927                     CHECK_FPU_FEATURE(dc, VIS1);
4928                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4929                     break;
4930                 case 0x03a: /* VIS I fpack32 */
4931                     CHECK_FPU_FEATURE(dc, VIS1);
4932                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4933                     break;
4934                 case 0x03b: /* VIS I fpack16 */
4935                     CHECK_FPU_FEATURE(dc, VIS1);
4936                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4937                     cpu_dst_32 = gen_dest_fpr_F(dc);
4938                     gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4939                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4940                     break;
4941                 case 0x03d: /* VIS I fpackfix */
4942                     CHECK_FPU_FEATURE(dc, VIS1);
4943                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4944                     cpu_dst_32 = gen_dest_fpr_F(dc);
4945                     gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4946                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4947                     break;
4948                 case 0x03e: /* VIS I pdist */
4949                     CHECK_FPU_FEATURE(dc, VIS1);
4950                     gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4951                     break;
4952                 case 0x048: /* VIS I faligndata */
4953                     CHECK_FPU_FEATURE(dc, VIS1);
4954                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4955                     break;
4956                 case 0x04b: /* VIS I fpmerge */
4957                     CHECK_FPU_FEATURE(dc, VIS1);
4958                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4959                     break;
4960                 case 0x04c: /* VIS II bshuffle */
4961                     CHECK_FPU_FEATURE(dc, VIS2);
4962                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4963                     break;
4964                 case 0x04d: /* VIS I fexpand */
4965                     CHECK_FPU_FEATURE(dc, VIS1);
4966                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4967                     break;
4968                 case 0x050: /* VIS I fpadd16 */
4969                     CHECK_FPU_FEATURE(dc, VIS1);
4970                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4971                     break;
4972                 case 0x051: /* VIS I fpadd16s */
4973                     CHECK_FPU_FEATURE(dc, VIS1);
4974                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4975                     break;
4976                 case 0x052: /* VIS I fpadd32 */
4977                     CHECK_FPU_FEATURE(dc, VIS1);
4978                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4979                     break;
4980                 case 0x053: /* VIS I fpadd32s */
4981                     CHECK_FPU_FEATURE(dc, VIS1);
4982                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4983                     break;
4984                 case 0x054: /* VIS I fpsub16 */
4985                     CHECK_FPU_FEATURE(dc, VIS1);
4986                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4987                     break;
4988                 case 0x055: /* VIS I fpsub16s */
4989                     CHECK_FPU_FEATURE(dc, VIS1);
4990                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4991                     break;
4992                 case 0x056: /* VIS I fpsub32 */
4993                     CHECK_FPU_FEATURE(dc, VIS1);
4994                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4995                     break;
4996                 case 0x057: /* VIS I fpsub32s */
4997                     CHECK_FPU_FEATURE(dc, VIS1);
4998                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4999                     break;
5000                 case 0x060: /* VIS I fzero */
5001                     CHECK_FPU_FEATURE(dc, VIS1);
5002                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5003                     tcg_gen_movi_i64(cpu_dst_64, 0);
5004                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5005                     break;
5006                 case 0x061: /* VIS I fzeros */
5007                     CHECK_FPU_FEATURE(dc, VIS1);
5008                     cpu_dst_32 = gen_dest_fpr_F(dc);
5009                     tcg_gen_movi_i32(cpu_dst_32, 0);
5010                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5011                     break;
5012                 case 0x062: /* VIS I fnor */
5013                     CHECK_FPU_FEATURE(dc, VIS1);
5014                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
5015                     break;
5016                 case 0x063: /* VIS I fnors */
5017                     CHECK_FPU_FEATURE(dc, VIS1);
5018                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
5019                     break;
5020                 case 0x064: /* VIS I fandnot2 */
5021                     CHECK_FPU_FEATURE(dc, VIS1);
5022                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
5023                     break;
5024                 case 0x065: /* VIS I fandnot2s */
5025                     CHECK_FPU_FEATURE(dc, VIS1);
5026                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
5027                     break;
5028                 case 0x066: /* VIS I fnot2 */
5029                     CHECK_FPU_FEATURE(dc, VIS1);
5030                     gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
5031                     break;
5032                 case 0x067: /* VIS I fnot2s */
5033                     CHECK_FPU_FEATURE(dc, VIS1);
5034                     gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
5035                     break;
5036                 case 0x068: /* VIS I fandnot1 */
5037                     CHECK_FPU_FEATURE(dc, VIS1);
5038                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
5039                     break;
5040                 case 0x069: /* VIS I fandnot1s */
5041                     CHECK_FPU_FEATURE(dc, VIS1);
5042                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
5043                     break;
5044                 case 0x06a: /* VIS I fnot1 */
5045                     CHECK_FPU_FEATURE(dc, VIS1);
5046                     gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
5047                     break;
5048                 case 0x06b: /* VIS I fnot1s */
5049                     CHECK_FPU_FEATURE(dc, VIS1);
5050                     gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
5051                     break;
5052                 case 0x06c: /* VIS I fxor */
5053                     CHECK_FPU_FEATURE(dc, VIS1);
5054                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
5055                     break;
5056                 case 0x06d: /* VIS I fxors */
5057                     CHECK_FPU_FEATURE(dc, VIS1);
5058                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
5059                     break;
5060                 case 0x06e: /* VIS I fnand */
5061                     CHECK_FPU_FEATURE(dc, VIS1);
5062                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
5063                     break;
5064                 case 0x06f: /* VIS I fnands */
5065                     CHECK_FPU_FEATURE(dc, VIS1);
5066                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
5067                     break;
5068                 case 0x070: /* VIS I fand */
5069                     CHECK_FPU_FEATURE(dc, VIS1);
5070                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
5071                     break;
5072                 case 0x071: /* VIS I fands */
5073                     CHECK_FPU_FEATURE(dc, VIS1);
5074                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
5075                     break;
5076                 case 0x072: /* VIS I fxnor */
5077                     CHECK_FPU_FEATURE(dc, VIS1);
5078                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
5079                     break;
5080                 case 0x073: /* VIS I fxnors */
5081                     CHECK_FPU_FEATURE(dc, VIS1);
5082                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
5083                     break;
5084                 case 0x074: /* VIS I fsrc1 */
5085                     CHECK_FPU_FEATURE(dc, VIS1);
5086                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
5087                     gen_store_fpr_D(dc, rd, cpu_src1_64);
5088                     break;
5089                 case 0x075: /* VIS I fsrc1s */
5090                     CHECK_FPU_FEATURE(dc, VIS1);
5091                     cpu_src1_32 = gen_load_fpr_F(dc, rs1);
5092                     gen_store_fpr_F(dc, rd, cpu_src1_32);
5093                     break;
5094                 case 0x076: /* VIS I fornot2 */
5095                     CHECK_FPU_FEATURE(dc, VIS1);
5096                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
5097                     break;
5098                 case 0x077: /* VIS I fornot2s */
5099                     CHECK_FPU_FEATURE(dc, VIS1);
5100                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
5101                     break;
5102                 case 0x078: /* VIS I fsrc2 */
5103                     CHECK_FPU_FEATURE(dc, VIS1);
5104                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
5105                     gen_store_fpr_D(dc, rd, cpu_src1_64);
5106                     break;
5107                 case 0x079: /* VIS I fsrc2s */
5108                     CHECK_FPU_FEATURE(dc, VIS1);
5109                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
5110                     gen_store_fpr_F(dc, rd, cpu_src1_32);
5111                     break;
5112                 case 0x07a: /* VIS I fornot1 */
5113                     CHECK_FPU_FEATURE(dc, VIS1);
5114                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
5115                     break;
5116                 case 0x07b: /* VIS I fornot1s */
5117                     CHECK_FPU_FEATURE(dc, VIS1);
5118                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
5119                     break;
5120                 case 0x07c: /* VIS I for */
5121                     CHECK_FPU_FEATURE(dc, VIS1);
5122                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
5123                     break;
5124                 case 0x07d: /* VIS I fors */
5125                     CHECK_FPU_FEATURE(dc, VIS1);
5126                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
5127                     break;
5128                 case 0x07e: /* VIS I fone */
5129                     CHECK_FPU_FEATURE(dc, VIS1);
5130                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5131                     tcg_gen_movi_i64(cpu_dst_64, -1);
5132                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5133                     break;
5134                 case 0x07f: /* VIS I fones */
5135                     CHECK_FPU_FEATURE(dc, VIS1);
5136                     cpu_dst_32 = gen_dest_fpr_F(dc);
5137                     tcg_gen_movi_i32(cpu_dst_32, -1);
5138                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5139                     break;
5140                 case 0x080: /* VIS I shutdown */
5141                 case 0x081: /* VIS II siam */
5142                     // XXX
5143                     goto illegal_insn;
5144                 default:
5145                     goto illegal_insn;
5146                 }
5147 #else
5148                 goto ncp_insn;
5149 #endif
5150             } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
5151 #ifdef TARGET_SPARC64
5152                 goto illegal_insn;
5153 #else
5154                 goto ncp_insn;
5155 #endif
5156 #ifdef TARGET_SPARC64
5157             } else if (xop == 0x39) { /* V9 return */
5158                 save_state(dc);
5159                 cpu_src1 = get_src1(dc, insn);
5160                 cpu_tmp0 = get_temp_tl(dc);
5161                 if (IS_IMM) {   /* immediate */
5162                     simm = GET_FIELDs(insn, 19, 31);
5163                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5164                 } else {                /* register */
5165                     rs2 = GET_FIELD(insn, 27, 31);
5166                     if (rs2) {
5167                         cpu_src2 = gen_load_gpr(dc, rs2);
5168                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5169                     } else {
5170                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5171                     }
5172                 }
5173                 gen_helper_restore(cpu_env);
5174                 gen_mov_pc_npc(dc);
5175                 gen_check_align(cpu_tmp0, 3);
5176                 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5177                 dc->npc = DYNAMIC_PC;
5178                 goto jmp_insn;
5179 #endif
5180             } else {
5181                 cpu_src1 = get_src1(dc, insn);
5182                 cpu_tmp0 = get_temp_tl(dc);
5183                 if (IS_IMM) {   /* immediate */
5184                     simm = GET_FIELDs(insn, 19, 31);
5185                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5186                 } else {                /* register */
5187                     rs2 = GET_FIELD(insn, 27, 31);
5188                     if (rs2) {
5189                         cpu_src2 = gen_load_gpr(dc, rs2);
5190                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5191                     } else {
5192                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5193                     }
5194                 }
5195                 switch (xop) {
5196                 case 0x38:      /* jmpl */
5197                     {
5198                         TCGv t = gen_dest_gpr(dc, rd);
5199                         tcg_gen_movi_tl(t, dc->pc);
5200                         gen_store_gpr(dc, rd, t);
5201
5202                         gen_mov_pc_npc(dc);
5203                         gen_check_align(cpu_tmp0, 3);
5204                         gen_address_mask(dc, cpu_tmp0);
5205                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5206                         dc->npc = DYNAMIC_PC;
5207                     }
5208                     goto jmp_insn;
5209 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5210                 case 0x39:      /* rett, V9 return */
5211                     {
5212                         if (!supervisor(dc))
5213                             goto priv_insn;
5214                         gen_mov_pc_npc(dc);
5215                         gen_check_align(cpu_tmp0, 3);
5216                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5217                         dc->npc = DYNAMIC_PC;
5218                         gen_helper_rett(cpu_env);
5219                     }
5220                     goto jmp_insn;
5221 #endif
5222                 case 0x3b: /* flush */
5223                     if (!((dc)->def->features & CPU_FEATURE_FLUSH))
5224                         goto unimp_flush;
5225                     /* nop */
5226                     break;
5227                 case 0x3c:      /* save */
5228                     gen_helper_save(cpu_env);
5229                     gen_store_gpr(dc, rd, cpu_tmp0);
5230                     break;
5231                 case 0x3d:      /* restore */
5232                     gen_helper_restore(cpu_env);
5233                     gen_store_gpr(dc, rd, cpu_tmp0);
5234                     break;
5235 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
5236                 case 0x3e:      /* V9 done/retry */
5237                     {
5238                         switch (rd) {
5239                         case 0:
5240                             if (!supervisor(dc))
5241                                 goto priv_insn;
5242                             dc->npc = DYNAMIC_PC;
5243                             dc->pc = DYNAMIC_PC;
5244                             gen_helper_done(cpu_env);
5245                             goto jmp_insn;
5246                         case 1:
5247                             if (!supervisor(dc))
5248                                 goto priv_insn;
5249                             dc->npc = DYNAMIC_PC;
5250                             dc->pc = DYNAMIC_PC;
5251                             gen_helper_retry(cpu_env);
5252                             goto jmp_insn;
5253                         default:
5254                             goto illegal_insn;
5255                         }
5256                     }
5257                     break;
5258 #endif
5259                 default:
5260                     goto illegal_insn;
5261                 }
5262             }
5263             break;
5264         }
5265         break;
5266     case 3:                     /* load/store instructions */
5267         {
5268             unsigned int xop = GET_FIELD(insn, 7, 12);
5269             /* ??? gen_address_mask prevents us from using a source
5270                register directly.  Always generate a temporary.  */
5271             TCGv cpu_addr = get_temp_tl(dc);
5272
5273             tcg_gen_mov_tl(cpu_addr, get_src1(dc, insn));
5274             if (xop == 0x3c || xop == 0x3e) {
5275                 /* V9 casa/casxa : no offset */
5276             } else if (IS_IMM) {     /* immediate */
5277                 simm = GET_FIELDs(insn, 19, 31);
5278                 if (simm != 0) {
5279                     tcg_gen_addi_tl(cpu_addr, cpu_addr, simm);
5280                 }
5281             } else {            /* register */
5282                 rs2 = GET_FIELD(insn, 27, 31);
5283                 if (rs2 != 0) {
5284                     tcg_gen_add_tl(cpu_addr, cpu_addr, gen_load_gpr(dc, rs2));
5285                 }
5286             }
5287             if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
5288                 (xop > 0x17 && xop <= 0x1d ) ||
5289                 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
5290                 TCGv cpu_val = gen_dest_gpr(dc, rd);
5291
5292                 switch (xop) {
5293                 case 0x0:       /* ld, V9 lduw, load unsigned word */
5294                     gen_address_mask(dc, cpu_addr);
5295                     tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
5296                     break;
5297                 case 0x1:       /* ldub, load unsigned byte */
5298                     gen_address_mask(dc, cpu_addr);
5299                     tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
5300                     break;
5301                 case 0x2:       /* lduh, load unsigned halfword */
5302                     gen_address_mask(dc, cpu_addr);
5303                     tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
5304                     break;
5305                 case 0x3:       /* ldd, load double word */
5306                     if (rd & 1)
5307                         goto illegal_insn;
5308                     else {
5309                         TCGv_i64 t64;
5310
5311                         gen_address_mask(dc, cpu_addr);
5312                         t64 = tcg_temp_new_i64();
5313                         tcg_gen_qemu_ld64(t64, cpu_addr, dc->mem_idx);
5314                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5315                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5316                         gen_store_gpr(dc, rd + 1, cpu_val);
5317                         tcg_gen_shri_i64(t64, t64, 32);
5318                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5319                         tcg_temp_free_i64(t64);
5320                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5321                     }
5322                     break;
5323                 case 0x9:       /* ldsb, load signed byte */
5324                     gen_address_mask(dc, cpu_addr);
5325                     tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
5326                     break;
5327                 case 0xa:       /* ldsh, load signed halfword */
5328                     gen_address_mask(dc, cpu_addr);
5329                     tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
5330                     break;
5331                 case 0xd:       /* ldstub */
5332                     gen_ldstub(dc, cpu_val, cpu_addr, dc->mem_idx);
5333                     break;
5334                 case 0x0f:
5335                     /* swap, swap register with memory. Also atomically */
5336                     CHECK_IU_FEATURE(dc, SWAP);
5337                     cpu_src1 = gen_load_gpr(dc, rd);
5338                     gen_swap(dc, cpu_val, cpu_src1, cpu_addr,
5339                              dc->mem_idx, MO_TEUL);
5340                     break;
5341 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5342                 case 0x10:      /* lda, V9 lduwa, load word alternate */
5343                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5344                     break;
5345                 case 0x11:      /* lduba, load unsigned byte alternate */
5346                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5347                     break;
5348                 case 0x12:      /* lduha, load unsigned halfword alternate */
5349                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5350                     break;
5351                 case 0x13:      /* ldda, load double word alternate */
5352                     if (rd & 1) {
5353                         goto illegal_insn;
5354                     }
5355                     gen_ldda_asi(dc, cpu_addr, insn, rd);
5356                     goto skip_move;
5357                 case 0x19:      /* ldsba, load signed byte alternate */
5358                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_SB);
5359                     break;
5360                 case 0x1a:      /* ldsha, load signed halfword alternate */
5361                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESW);
5362                     break;
5363                 case 0x1d:      /* ldstuba -- XXX: should be atomically */
5364                     gen_ldstub_asi(dc, cpu_val, cpu_addr, insn);
5365                     break;
5366                 case 0x1f:      /* swapa, swap reg with alt. memory. Also
5367                                    atomically */
5368                     CHECK_IU_FEATURE(dc, SWAP);
5369                     cpu_src1 = gen_load_gpr(dc, rd);
5370                     gen_swap_asi(dc, cpu_val, cpu_src1, cpu_addr, insn);
5371                     break;
5372
5373 #ifndef TARGET_SPARC64
5374                 case 0x30: /* ldc */
5375                 case 0x31: /* ldcsr */
5376                 case 0x33: /* lddc */
5377                     goto ncp_insn;
5378 #endif
5379 #endif
5380 #ifdef TARGET_SPARC64
5381                 case 0x08: /* V9 ldsw */
5382                     gen_address_mask(dc, cpu_addr);
5383                     tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
5384                     break;
5385                 case 0x0b: /* V9 ldx */
5386                     gen_address_mask(dc, cpu_addr);
5387                     tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
5388                     break;
5389                 case 0x18: /* V9 ldswa */
5390                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESL);
5391                     break;
5392                 case 0x1b: /* V9 ldxa */
5393                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEQ);
5394                     break;
5395                 case 0x2d: /* V9 prefetch, no effect */
5396                     goto skip_move;
5397                 case 0x30: /* V9 ldfa */
5398                     if (gen_trap_ifnofpu(dc)) {
5399                         goto jmp_insn;
5400                     }
5401                     gen_ldf_asi(dc, cpu_addr, insn, 4, rd);
5402                     gen_update_fprs_dirty(dc, rd);
5403                     goto skip_move;
5404                 case 0x33: /* V9 lddfa */
5405                     if (gen_trap_ifnofpu(dc)) {
5406                         goto jmp_insn;
5407                     }
5408                     gen_ldf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5409                     gen_update_fprs_dirty(dc, DFPREG(rd));
5410                     goto skip_move;
5411                 case 0x3d: /* V9 prefetcha, no effect */
5412                     goto skip_move;
5413                 case 0x32: /* V9 ldqfa */
5414                     CHECK_FPU_FEATURE(dc, FLOAT128);
5415                     if (gen_trap_ifnofpu(dc)) {
5416                         goto jmp_insn;
5417                     }
5418                     gen_ldf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5419                     gen_update_fprs_dirty(dc, QFPREG(rd));
5420                     goto skip_move;
5421 #endif
5422                 default:
5423                     goto illegal_insn;
5424                 }
5425                 gen_store_gpr(dc, rd, cpu_val);
5426 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5427             skip_move: ;
5428 #endif
5429             } else if (xop >= 0x20 && xop < 0x24) {
5430                 if (gen_trap_ifnofpu(dc)) {
5431                     goto jmp_insn;
5432                 }
5433                 switch (xop) {
5434                 case 0x20:      /* ldf, load fpreg */
5435                     gen_address_mask(dc, cpu_addr);
5436                     cpu_dst_32 = gen_dest_fpr_F(dc);
5437                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5438                                         dc->mem_idx, MO_TEUL);
5439                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5440                     break;
5441                 case 0x21:      /* ldfsr, V9 ldxfsr */
5442 #ifdef TARGET_SPARC64
5443                     gen_address_mask(dc, cpu_addr);
5444                     if (rd == 1) {
5445                         TCGv_i64 t64 = tcg_temp_new_i64();
5446                         tcg_gen_qemu_ld_i64(t64, cpu_addr,
5447                                             dc->mem_idx, MO_TEQ);
5448                         gen_helper_ldxfsr(cpu_fsr, cpu_env, cpu_fsr, t64);
5449                         tcg_temp_free_i64(t64);
5450                         break;
5451                     }
5452 #endif
5453                     cpu_dst_32 = get_temp_i32(dc);
5454                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5455                                         dc->mem_idx, MO_TEUL);
5456                     gen_helper_ldfsr(cpu_fsr, cpu_env, cpu_fsr, cpu_dst_32);
5457                     break;
5458                 case 0x22:      /* ldqf, load quad fpreg */
5459                     CHECK_FPU_FEATURE(dc, FLOAT128);
5460                     gen_address_mask(dc, cpu_addr);
5461                     cpu_src1_64 = tcg_temp_new_i64();
5462                     tcg_gen_qemu_ld_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5463                                         MO_TEQ | MO_ALIGN_4);
5464                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5465                     cpu_src2_64 = tcg_temp_new_i64();
5466                     tcg_gen_qemu_ld_i64(cpu_src2_64, cpu_addr, dc->mem_idx,
5467                                         MO_TEQ | MO_ALIGN_4);
5468                     gen_store_fpr_Q(dc, rd, cpu_src1_64, cpu_src2_64);
5469                     tcg_temp_free_i64(cpu_src1_64);
5470                     tcg_temp_free_i64(cpu_src2_64);
5471                     break;
5472                 case 0x23:      /* lddf, load double fpreg */
5473                     gen_address_mask(dc, cpu_addr);
5474                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5475                     tcg_gen_qemu_ld_i64(cpu_dst_64, cpu_addr, dc->mem_idx,
5476                                         MO_TEQ | MO_ALIGN_4);
5477                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5478                     break;
5479                 default:
5480                     goto illegal_insn;
5481                 }
5482             } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
5483                        xop == 0xe || xop == 0x1e) {
5484                 TCGv cpu_val = gen_load_gpr(dc, rd);
5485
5486                 switch (xop) {
5487                 case 0x4: /* st, store word */
5488                     gen_address_mask(dc, cpu_addr);
5489                     tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
5490                     break;
5491                 case 0x5: /* stb, store byte */
5492                     gen_address_mask(dc, cpu_addr);
5493                     tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
5494                     break;
5495                 case 0x6: /* sth, store halfword */
5496                     gen_address_mask(dc, cpu_addr);
5497                     tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
5498                     break;
5499                 case 0x7: /* std, store double word */
5500                     if (rd & 1)
5501                         goto illegal_insn;
5502                     else {
5503                         TCGv_i64 t64;
5504                         TCGv lo;
5505
5506                         gen_address_mask(dc, cpu_addr);
5507                         lo = gen_load_gpr(dc, rd + 1);
5508                         t64 = tcg_temp_new_i64();
5509                         tcg_gen_concat_tl_i64(t64, lo, cpu_val);
5510                         tcg_gen_qemu_st64(t64, cpu_addr, dc->mem_idx);
5511                         tcg_temp_free_i64(t64);
5512                     }
5513                     break;
5514 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5515                 case 0x14: /* sta, V9 stwa, store word alternate */
5516                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5517                     break;
5518                 case 0x15: /* stba, store byte alternate */
5519                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5520                     break;
5521                 case 0x16: /* stha, store halfword alternate */
5522                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5523                     break;
5524                 case 0x17: /* stda, store double word alternate */
5525                     if (rd & 1) {
5526                         goto illegal_insn;
5527                     }
5528                     gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
5529                     break;
5530 #endif
5531 #ifdef TARGET_SPARC64
5532                 case 0x0e: /* V9 stx */
5533                     gen_address_mask(dc, cpu_addr);
5534                     tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
5535                     break;
5536                 case 0x1e: /* V9 stxa */
5537                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEQ);
5538                     break;
5539 #endif
5540                 default:
5541                     goto illegal_insn;
5542                 }
5543             } else if (xop > 0x23 && xop < 0x28) {
5544                 if (gen_trap_ifnofpu(dc)) {
5545                     goto jmp_insn;
5546                 }
5547                 switch (xop) {
5548                 case 0x24: /* stf, store fpreg */
5549                     gen_address_mask(dc, cpu_addr);
5550                     cpu_src1_32 = gen_load_fpr_F(dc, rd);
5551                     tcg_gen_qemu_st_i32(cpu_src1_32, cpu_addr,
5552                                         dc->mem_idx, MO_TEUL);
5553                     break;
5554                 case 0x25: /* stfsr, V9 stxfsr */
5555                     {
5556 #ifdef TARGET_SPARC64
5557                         gen_address_mask(dc, cpu_addr);
5558                         if (rd == 1) {
5559                             tcg_gen_qemu_st64(cpu_fsr, cpu_addr, dc->mem_idx);
5560                             break;
5561                         }
5562 #endif
5563                         tcg_gen_qemu_st32(cpu_fsr, cpu_addr, dc->mem_idx);
5564                     }
5565                     break;
5566                 case 0x26:
5567 #ifdef TARGET_SPARC64
5568                     /* V9 stqf, store quad fpreg */
5569                     CHECK_FPU_FEATURE(dc, FLOAT128);
5570                     gen_address_mask(dc, cpu_addr);
5571                     /* ??? While stqf only requires 4-byte alignment, it is
5572                        legal for the cpu to signal the unaligned exception.
5573                        The OS trap handler is then required to fix it up.
5574                        For qemu, this avoids having to probe the second page
5575                        before performing the first write.  */
5576                     cpu_src1_64 = gen_load_fpr_Q0(dc, rd);
5577                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5578                                         dc->mem_idx, MO_TEQ | MO_ALIGN_16);
5579                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5580                     cpu_src2_64 = gen_load_fpr_Q1(dc, rd);
5581                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5582                                         dc->mem_idx, MO_TEQ);
5583                     break;
5584 #else /* !TARGET_SPARC64 */
5585                     /* stdfq, store floating point queue */
5586 #if defined(CONFIG_USER_ONLY)
5587                     goto illegal_insn;
5588 #else
5589                     if (!supervisor(dc))
5590                         goto priv_insn;
5591                     if (gen_trap_ifnofpu(dc)) {
5592                         goto jmp_insn;
5593                     }
5594                     goto nfq_insn;
5595 #endif
5596 #endif
5597                 case 0x27: /* stdf, store double fpreg */
5598                     gen_address_mask(dc, cpu_addr);
5599                     cpu_src1_64 = gen_load_fpr_D(dc, rd);
5600                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5601                                         MO_TEQ | MO_ALIGN_4);
5602                     break;
5603                 default:
5604                     goto illegal_insn;
5605                 }
5606             } else if (xop > 0x33 && xop < 0x3f) {
5607                 switch (xop) {
5608 #ifdef TARGET_SPARC64
5609                 case 0x34: /* V9 stfa */
5610                     if (gen_trap_ifnofpu(dc)) {
5611                         goto jmp_insn;
5612                     }
5613                     gen_stf_asi(dc, cpu_addr, insn, 4, rd);
5614                     break;
5615                 case 0x36: /* V9 stqfa */
5616                     {
5617                         CHECK_FPU_FEATURE(dc, FLOAT128);
5618                         if (gen_trap_ifnofpu(dc)) {
5619                             goto jmp_insn;
5620                         }
5621                         gen_stf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5622                     }
5623                     break;
5624                 case 0x37: /* V9 stdfa */
5625                     if (gen_trap_ifnofpu(dc)) {
5626                         goto jmp_insn;
5627                     }
5628                     gen_stf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5629                     break;
5630                 case 0x3e: /* V9 casxa */
5631                     rs2 = GET_FIELD(insn, 27, 31);
5632                     cpu_src2 = gen_load_gpr(dc, rs2);
5633                     gen_casx_asi(dc, cpu_addr, cpu_src2, insn, rd);
5634                     break;
5635 #else
5636                 case 0x34: /* stc */
5637                 case 0x35: /* stcsr */
5638                 case 0x36: /* stdcq */
5639                 case 0x37: /* stdc */
5640                     goto ncp_insn;
5641 #endif
5642 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5643                 case 0x3c: /* V9 or LEON3 casa */
5644 #ifndef TARGET_SPARC64
5645                     CHECK_IU_FEATURE(dc, CASA);
5646 #endif
5647                     rs2 = GET_FIELD(insn, 27, 31);
5648                     cpu_src2 = gen_load_gpr(dc, rs2);
5649                     gen_cas_asi(dc, cpu_addr, cpu_src2, insn, rd);
5650                     break;
5651 #endif
5652                 default:
5653                     goto illegal_insn;
5654                 }
5655             } else {
5656                 goto illegal_insn;
5657             }
5658         }
5659         break;
5660     }
5661     /* default case for non jump instructions */
5662     if (dc->npc == DYNAMIC_PC) {
5663         dc->pc = DYNAMIC_PC;
5664         gen_op_next_insn();
5665     } else if (dc->npc == JUMP_PC) {
5666         /* we can do a static jump */
5667         gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5668         dc->is_br = 1;
5669     } else {
5670         dc->pc = dc->npc;
5671         dc->npc = dc->npc + 4;
5672     }
5673  jmp_insn:
5674     goto egress;
5675  illegal_insn:
5676     gen_exception(dc, TT_ILL_INSN);
5677     goto egress;
5678  unimp_flush:
5679     gen_exception(dc, TT_UNIMP_FLUSH);
5680     goto egress;
5681 #if !defined(CONFIG_USER_ONLY)
5682  priv_insn:
5683     gen_exception(dc, TT_PRIV_INSN);
5684     goto egress;
5685 #endif
5686  nfpu_insn:
5687     gen_op_fpexception_im(dc, FSR_FTT_UNIMPFPOP);
5688     goto egress;
5689 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5690  nfq_insn:
5691     gen_op_fpexception_im(dc, FSR_FTT_SEQ_ERROR);
5692     goto egress;
5693 #endif
5694 #ifndef TARGET_SPARC64
5695  ncp_insn:
5696     gen_exception(dc, TT_NCP_INSN);
5697     goto egress;
5698 #endif
5699  egress:
5700     if (dc->n_t32 != 0) {
5701         int i;
5702         for (i = dc->n_t32 - 1; i >= 0; --i) {
5703             tcg_temp_free_i32(dc->t32[i]);
5704         }
5705         dc->n_t32 = 0;
5706     }
5707     if (dc->n_ttl != 0) {
5708         int i;
5709         for (i = dc->n_ttl - 1; i >= 0; --i) {
5710             tcg_temp_free(dc->ttl[i]);
5711         }
5712         dc->n_ttl = 0;
5713     }
5714 }
5715
5716 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5717 {
5718     SPARCCPU *cpu = sparc_env_get_cpu(env);
5719     CPUState *cs = CPU(cpu);
5720     target_ulong pc_start, last_pc;
5721     DisasContext dc1, *dc = &dc1;
5722     int num_insns;
5723     int max_insns;
5724     unsigned int insn;
5725
5726     memset(dc, 0, sizeof(DisasContext));
5727     dc->tb = tb;
5728     pc_start = tb->pc;
5729     dc->pc = pc_start;
5730     last_pc = dc->pc;
5731     dc->npc = (target_ulong) tb->cs_base;
5732     dc->cc_op = CC_OP_DYNAMIC;
5733     dc->mem_idx = tb->flags & TB_FLAG_MMU_MASK;
5734     dc->def = env->def;
5735     dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5736     dc->address_mask_32bit = tb_am_enabled(tb->flags);
5737     dc->singlestep = (cs->singlestep_enabled || singlestep);
5738 #ifndef CONFIG_USER_ONLY
5739     dc->supervisor = (tb->flags & TB_FLAG_SUPER) != 0;
5740 #endif
5741 #ifdef TARGET_SPARC64
5742     dc->fprs_dirty = 0;
5743     dc->asi = (tb->flags >> TB_FLAG_ASI_SHIFT) & 0xff;
5744 #ifndef CONFIG_USER_ONLY
5745     dc->hypervisor = (tb->flags & TB_FLAG_HYPER) != 0;
5746 #endif
5747 #endif
5748
5749     num_insns = 0;
5750     max_insns = tb->cflags & CF_COUNT_MASK;
5751     if (max_insns == 0) {
5752         max_insns = CF_COUNT_MASK;
5753     }
5754     if (max_insns > TCG_MAX_INSNS) {
5755         max_insns = TCG_MAX_INSNS;
5756     }
5757
5758     gen_tb_start(tb);
5759     do {
5760         if (dc->npc & JUMP_PC) {
5761             assert(dc->jump_pc[1] == dc->pc + 4);
5762             tcg_gen_insn_start(dc->pc, dc->jump_pc[0] | JUMP_PC);
5763         } else {
5764             tcg_gen_insn_start(dc->pc, dc->npc);
5765         }
5766         num_insns++;
5767         last_pc = dc->pc;
5768
5769         if (unlikely(cpu_breakpoint_test(cs, dc->pc, BP_ANY))) {
5770             if (dc->pc != pc_start) {
5771                 save_state(dc);
5772             }
5773             gen_helper_debug(cpu_env);
5774             tcg_gen_exit_tb(0);
5775             dc->is_br = 1;
5776             goto exit_gen_loop;
5777         }
5778
5779         if (num_insns == max_insns && (tb->cflags & CF_LAST_IO)) {
5780             gen_io_start();
5781         }
5782
5783         insn = cpu_ldl_code(env, dc->pc);
5784
5785         disas_sparc_insn(dc, insn);
5786
5787         if (dc->is_br)
5788             break;
5789         /* if the next PC is different, we abort now */
5790         if (dc->pc != (last_pc + 4))
5791             break;
5792         /* if we reach a page boundary, we stop generation so that the
5793            PC of a TT_TFAULT exception is always in the right page */
5794         if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5795             break;
5796         /* if single step mode, we generate only one instruction and
5797            generate an exception */
5798         if (dc->singlestep) {
5799             break;
5800         }
5801     } while (!tcg_op_buf_full() &&
5802              (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5803              num_insns < max_insns);
5804
5805  exit_gen_loop:
5806     if (tb->cflags & CF_LAST_IO) {
5807         gen_io_end();
5808     }
5809     if (!dc->is_br) {
5810         if (dc->pc != DYNAMIC_PC &&
5811             (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5812             /* static PC and NPC: we can use direct chaining */
5813             gen_goto_tb(dc, 0, dc->pc, dc->npc);
5814         } else {
5815             if (dc->pc != DYNAMIC_PC) {
5816                 tcg_gen_movi_tl(cpu_pc, dc->pc);
5817             }
5818             save_npc(dc);
5819             tcg_gen_exit_tb(0);
5820         }
5821     }
5822     gen_tb_end(tb, num_insns);
5823
5824     tb->size = last_pc + 4 - pc_start;
5825     tb->icount = num_insns;
5826
5827 #ifdef DEBUG_DISAS
5828     if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)
5829         && qemu_log_in_addr_range(pc_start)) {
5830         qemu_log_lock();
5831         qemu_log("--------------\n");
5832         qemu_log("IN: %s\n", lookup_symbol(pc_start));
5833         log_target_disas(cs, pc_start, last_pc + 4 - pc_start, 0);
5834         qemu_log("\n");
5835         qemu_log_unlock();
5836     }
5837 #endif
5838 }
5839
5840 void gen_intermediate_code_init(CPUSPARCState *env)
5841 {
5842     static int inited;
5843     static const char gregnames[32][4] = {
5844         "g0", "g1", "g2", "g3", "g4", "g5", "g6", "g7",
5845         "o0", "o1", "o2", "o3", "o4", "o5", "o6", "o7",
5846         "l0", "l1", "l2", "l3", "l4", "l5", "l6", "l7",
5847         "i0", "i1", "i2", "i3", "i4", "i5", "i6", "i7",
5848     };
5849     static const char fregnames[32][4] = {
5850         "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5851         "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5852         "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5853         "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5854     };
5855
5856     static const struct { TCGv_i32 *ptr; int off; const char *name; } r32[] = {
5857 #ifdef TARGET_SPARC64
5858         { &cpu_xcc, offsetof(CPUSPARCState, xcc), "xcc" },
5859         { &cpu_fprs, offsetof(CPUSPARCState, fprs), "fprs" },
5860 #else
5861         { &cpu_wim, offsetof(CPUSPARCState, wim), "wim" },
5862 #endif
5863         { &cpu_cc_op, offsetof(CPUSPARCState, cc_op), "cc_op" },
5864         { &cpu_psr, offsetof(CPUSPARCState, psr), "psr" },
5865     };
5866
5867     static const struct { TCGv *ptr; int off; const char *name; } rtl[] = {
5868 #ifdef TARGET_SPARC64
5869         { &cpu_gsr, offsetof(CPUSPARCState, gsr), "gsr" },
5870         { &cpu_tick_cmpr, offsetof(CPUSPARCState, tick_cmpr), "tick_cmpr" },
5871         { &cpu_stick_cmpr, offsetof(CPUSPARCState, stick_cmpr), "stick_cmpr" },
5872         { &cpu_hstick_cmpr, offsetof(CPUSPARCState, hstick_cmpr),
5873           "hstick_cmpr" },
5874         { &cpu_hintp, offsetof(CPUSPARCState, hintp), "hintp" },
5875         { &cpu_htba, offsetof(CPUSPARCState, htba), "htba" },
5876         { &cpu_hver, offsetof(CPUSPARCState, hver), "hver" },
5877         { &cpu_ssr, offsetof(CPUSPARCState, ssr), "ssr" },
5878         { &cpu_ver, offsetof(CPUSPARCState, version), "ver" },
5879 #endif
5880         { &cpu_cond, offsetof(CPUSPARCState, cond), "cond" },
5881         { &cpu_cc_src, offsetof(CPUSPARCState, cc_src), "cc_src" },
5882         { &cpu_cc_src2, offsetof(CPUSPARCState, cc_src2), "cc_src2" },
5883         { &cpu_cc_dst, offsetof(CPUSPARCState, cc_dst), "cc_dst" },
5884         { &cpu_fsr, offsetof(CPUSPARCState, fsr), "fsr" },
5885         { &cpu_pc, offsetof(CPUSPARCState, pc), "pc" },
5886         { &cpu_npc, offsetof(CPUSPARCState, npc), "npc" },
5887         { &cpu_y, offsetof(CPUSPARCState, y), "y" },
5888 #ifndef CONFIG_USER_ONLY
5889         { &cpu_tbr, offsetof(CPUSPARCState, tbr), "tbr" },
5890 #endif
5891     };
5892
5893     unsigned int i;
5894
5895     /* init various static tables */
5896     if (inited) {
5897         return;
5898     }
5899     inited = 1;
5900
5901     cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5902     tcg_ctx.tcg_env = cpu_env;
5903
5904     cpu_regwptr = tcg_global_mem_new_ptr(cpu_env,
5905                                          offsetof(CPUSPARCState, regwptr),
5906                                          "regwptr");
5907
5908     for (i = 0; i < ARRAY_SIZE(r32); ++i) {
5909         *r32[i].ptr = tcg_global_mem_new_i32(cpu_env, r32[i].off, r32[i].name);
5910     }
5911
5912     for (i = 0; i < ARRAY_SIZE(rtl); ++i) {
5913         *rtl[i].ptr = tcg_global_mem_new(cpu_env, rtl[i].off, rtl[i].name);
5914     }
5915
5916     TCGV_UNUSED(cpu_regs[0]);
5917     for (i = 1; i < 8; ++i) {
5918         cpu_regs[i] = tcg_global_mem_new(cpu_env,
5919                                          offsetof(CPUSPARCState, gregs[i]),
5920                                          gregnames[i]);
5921     }
5922
5923     for (i = 8; i < 32; ++i) {
5924         cpu_regs[i] = tcg_global_mem_new(cpu_regwptr,
5925                                          (i - 8) * sizeof(target_ulong),
5926                                          gregnames[i]);
5927     }
5928
5929     for (i = 0; i < TARGET_DPREGS; i++) {
5930         cpu_fpr[i] = tcg_global_mem_new_i64(cpu_env,
5931                                             offsetof(CPUSPARCState, fpr[i]),
5932                                             fregnames[i]);
5933     }
5934 }
5935
5936 void restore_state_to_opc(CPUSPARCState *env, TranslationBlock *tb,
5937                           target_ulong *data)
5938 {
5939     target_ulong pc = data[0];
5940     target_ulong npc = data[1];
5941
5942     env->pc = pc;
5943     if (npc == DYNAMIC_PC) {
5944         /* dynamic NPC: already stored */
5945     } else if (npc & JUMP_PC) {
5946         /* jump PC: use 'cond' and the jump targets of the translation */
5947         if (env->cond) {
5948             env->npc = npc & ~3;
5949         } else {
5950             env->npc = pc + 4;
5951         }
5952     } else {
5953         env->npc = npc;
5954     }
5955 }
This page took 0.348974 seconds and 4 git commands to generate.