]> Git Repo - qemu.git/blob - target-sparc/translate.c
Merge remote-tracking branch 'remotes/gkurz/tags/for-upstream' into staging
[qemu.git] / target-sparc / translate.c
1 /*
2    SPARC translation
3
4    Copyright (C) 2003 Thomas M. Ogrisegg <[email protected]>
5    Copyright (C) 2003-2005 Fabrice Bellard
6
7    This library is free software; you can redistribute it and/or
8    modify it under the terms of the GNU Lesser General Public
9    License as published by the Free Software Foundation; either
10    version 2 of the License, or (at your option) any later version.
11
12    This library is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15    Lesser General Public License for more details.
16
17    You should have received a copy of the GNU Lesser General Public
18    License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20
21 #include "qemu/osdep.h"
22
23 #include "cpu.h"
24 #include "disas/disas.h"
25 #include "exec/helper-proto.h"
26 #include "exec/exec-all.h"
27 #include "tcg-op.h"
28 #include "exec/cpu_ldst.h"
29
30 #include "exec/helper-gen.h"
31
32 #include "trace-tcg.h"
33 #include "exec/log.h"
34
35
36 #define DEBUG_DISAS
37
38 #define DYNAMIC_PC  1 /* dynamic pc value */
39 #define JUMP_PC     2 /* dynamic pc value which takes only two values
40                          according to jump_pc[T2] */
41
42 /* global register indexes */
43 static TCGv_env cpu_env;
44 static TCGv_ptr cpu_regwptr;
45 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
46 static TCGv_i32 cpu_cc_op;
47 static TCGv_i32 cpu_psr;
48 static TCGv cpu_fsr, cpu_pc, cpu_npc;
49 static TCGv cpu_regs[32];
50 static TCGv cpu_y;
51 #ifndef CONFIG_USER_ONLY
52 static TCGv cpu_tbr;
53 #endif
54 static TCGv cpu_cond;
55 #ifdef TARGET_SPARC64
56 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
57 static TCGv cpu_gsr;
58 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
59 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
60 static TCGv_i32 cpu_softint;
61 #else
62 static TCGv cpu_wim;
63 #endif
64 /* Floating point registers */
65 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
66
67 #include "exec/gen-icount.h"
68
69 typedef struct DisasContext {
70     target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
71     target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
72     target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
73     int is_br;
74     int mem_idx;
75     int fpu_enabled;
76     int address_mask_32bit;
77     int singlestep;
78     uint32_t cc_op;  /* current CC operation */
79     struct TranslationBlock *tb;
80     sparc_def_t *def;
81     TCGv_i32 t32[3];
82     TCGv ttl[5];
83     int n_t32;
84     int n_ttl;
85 } DisasContext;
86
87 typedef struct {
88     TCGCond cond;
89     bool is_bool;
90     bool g1, g2;
91     TCGv c1, c2;
92 } DisasCompare;
93
94 // This function uses non-native bit order
95 #define GET_FIELD(X, FROM, TO)                                  \
96     ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
97
98 // This function uses the order in the manuals, i.e. bit 0 is 2^0
99 #define GET_FIELD_SP(X, FROM, TO)               \
100     GET_FIELD(X, 31 - (TO), 31 - (FROM))
101
102 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
103 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
104
105 #ifdef TARGET_SPARC64
106 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
107 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
108 #else
109 #define DFPREG(r) (r & 0x1e)
110 #define QFPREG(r) (r & 0x1c)
111 #endif
112
113 #define UA2005_HTRAP_MASK 0xff
114 #define V8_TRAP_MASK 0x7f
115
116 static int sign_extend(int x, int len)
117 {
118     len = 32 - len;
119     return (x << len) >> len;
120 }
121
122 #define IS_IMM (insn & (1<<13))
123
124 static inline TCGv_i32 get_temp_i32(DisasContext *dc)
125 {
126     TCGv_i32 t;
127     assert(dc->n_t32 < ARRAY_SIZE(dc->t32));
128     dc->t32[dc->n_t32++] = t = tcg_temp_new_i32();
129     return t;
130 }
131
132 static inline TCGv get_temp_tl(DisasContext *dc)
133 {
134     TCGv t;
135     assert(dc->n_ttl < ARRAY_SIZE(dc->ttl));
136     dc->ttl[dc->n_ttl++] = t = tcg_temp_new();
137     return t;
138 }
139
140 static inline void gen_update_fprs_dirty(int rd)
141 {
142 #if defined(TARGET_SPARC64)
143     tcg_gen_ori_i32(cpu_fprs, cpu_fprs, (rd < 32) ? 1 : 2);
144 #endif
145 }
146
147 /* floating point registers moves */
148 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
149 {
150 #if TCG_TARGET_REG_BITS == 32
151     if (src & 1) {
152         return TCGV_LOW(cpu_fpr[src / 2]);
153     } else {
154         return TCGV_HIGH(cpu_fpr[src / 2]);
155     }
156 #else
157     if (src & 1) {
158         return MAKE_TCGV_I32(GET_TCGV_I64(cpu_fpr[src / 2]));
159     } else {
160         TCGv_i32 ret = get_temp_i32(dc);
161         TCGv_i64 t = tcg_temp_new_i64();
162
163         tcg_gen_shri_i64(t, cpu_fpr[src / 2], 32);
164         tcg_gen_extrl_i64_i32(ret, t);
165         tcg_temp_free_i64(t);
166
167         return ret;
168     }
169 #endif
170 }
171
172 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
173 {
174 #if TCG_TARGET_REG_BITS == 32
175     if (dst & 1) {
176         tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
177     } else {
178         tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
179     }
180 #else
181     TCGv_i64 t = MAKE_TCGV_I64(GET_TCGV_I32(v));
182     tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
183                         (dst & 1 ? 0 : 32), 32);
184 #endif
185     gen_update_fprs_dirty(dst);
186 }
187
188 static TCGv_i32 gen_dest_fpr_F(DisasContext *dc)
189 {
190     return get_temp_i32(dc);
191 }
192
193 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
194 {
195     src = DFPREG(src);
196     return cpu_fpr[src / 2];
197 }
198
199 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
200 {
201     dst = DFPREG(dst);
202     tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
203     gen_update_fprs_dirty(dst);
204 }
205
206 static TCGv_i64 gen_dest_fpr_D(DisasContext *dc, unsigned int dst)
207 {
208     return cpu_fpr[DFPREG(dst) / 2];
209 }
210
211 static void gen_op_load_fpr_QT0(unsigned int src)
212 {
213     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
214                    offsetof(CPU_QuadU, ll.upper));
215     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
216                    offsetof(CPU_QuadU, ll.lower));
217 }
218
219 static void gen_op_load_fpr_QT1(unsigned int src)
220 {
221     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
222                    offsetof(CPU_QuadU, ll.upper));
223     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
224                    offsetof(CPU_QuadU, ll.lower));
225 }
226
227 static void gen_op_store_QT0_fpr(unsigned int dst)
228 {
229     tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
230                    offsetof(CPU_QuadU, ll.upper));
231     tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
232                    offsetof(CPU_QuadU, ll.lower));
233 }
234
235 #ifdef TARGET_SPARC64
236 static void gen_move_Q(unsigned int rd, unsigned int rs)
237 {
238     rd = QFPREG(rd);
239     rs = QFPREG(rs);
240
241     tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
242     tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
243     gen_update_fprs_dirty(rd);
244 }
245 #endif
246
247 /* moves */
248 #ifdef CONFIG_USER_ONLY
249 #define supervisor(dc) 0
250 #ifdef TARGET_SPARC64
251 #define hypervisor(dc) 0
252 #endif
253 #else
254 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
255 #ifdef TARGET_SPARC64
256 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
257 #else
258 #endif
259 #endif
260
261 #ifdef TARGET_SPARC64
262 #ifndef TARGET_ABI32
263 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
264 #else
265 #define AM_CHECK(dc) (1)
266 #endif
267 #endif
268
269 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
270 {
271 #ifdef TARGET_SPARC64
272     if (AM_CHECK(dc))
273         tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
274 #endif
275 }
276
277 static inline TCGv gen_load_gpr(DisasContext *dc, int reg)
278 {
279     if (reg > 0) {
280         assert(reg < 32);
281         return cpu_regs[reg];
282     } else {
283         TCGv t = get_temp_tl(dc);
284         tcg_gen_movi_tl(t, 0);
285         return t;
286     }
287 }
288
289 static inline void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
290 {
291     if (reg > 0) {
292         assert(reg < 32);
293         tcg_gen_mov_tl(cpu_regs[reg], v);
294     }
295 }
296
297 static inline TCGv gen_dest_gpr(DisasContext *dc, int reg)
298 {
299     if (reg > 0) {
300         assert(reg < 32);
301         return cpu_regs[reg];
302     } else {
303         return get_temp_tl(dc);
304     }
305 }
306
307 static inline bool use_goto_tb(DisasContext *s, target_ulong pc,
308                                target_ulong npc)
309 {
310     if (unlikely(s->singlestep)) {
311         return false;
312     }
313
314 #ifndef CONFIG_USER_ONLY
315     return (pc & TARGET_PAGE_MASK) == (s->tb->pc & TARGET_PAGE_MASK) &&
316            (npc & TARGET_PAGE_MASK) == (s->tb->pc & TARGET_PAGE_MASK);
317 #else
318     return true;
319 #endif
320 }
321
322 static inline void gen_goto_tb(DisasContext *s, int tb_num,
323                                target_ulong pc, target_ulong npc)
324 {
325     if (use_goto_tb(s, pc, npc))  {
326         /* jump to same page: we can use a direct jump */
327         tcg_gen_goto_tb(tb_num);
328         tcg_gen_movi_tl(cpu_pc, pc);
329         tcg_gen_movi_tl(cpu_npc, npc);
330         tcg_gen_exit_tb((uintptr_t)s->tb + tb_num);
331     } else {
332         /* jump to another page: currently not optimized */
333         tcg_gen_movi_tl(cpu_pc, pc);
334         tcg_gen_movi_tl(cpu_npc, npc);
335         tcg_gen_exit_tb(0);
336     }
337 }
338
339 // XXX suboptimal
340 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
341 {
342     tcg_gen_extu_i32_tl(reg, src);
343     tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
344     tcg_gen_andi_tl(reg, reg, 0x1);
345 }
346
347 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
348 {
349     tcg_gen_extu_i32_tl(reg, src);
350     tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
351     tcg_gen_andi_tl(reg, reg, 0x1);
352 }
353
354 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
355 {
356     tcg_gen_extu_i32_tl(reg, src);
357     tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
358     tcg_gen_andi_tl(reg, reg, 0x1);
359 }
360
361 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
362 {
363     tcg_gen_extu_i32_tl(reg, src);
364     tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
365     tcg_gen_andi_tl(reg, reg, 0x1);
366 }
367
368 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
369 {
370     tcg_gen_mov_tl(cpu_cc_src, src1);
371     tcg_gen_mov_tl(cpu_cc_src2, src2);
372     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
373     tcg_gen_mov_tl(dst, cpu_cc_dst);
374 }
375
376 static TCGv_i32 gen_add32_carry32(void)
377 {
378     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
379
380     /* Carry is computed from a previous add: (dst < src)  */
381 #if TARGET_LONG_BITS == 64
382     cc_src1_32 = tcg_temp_new_i32();
383     cc_src2_32 = tcg_temp_new_i32();
384     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_dst);
385     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src);
386 #else
387     cc_src1_32 = cpu_cc_dst;
388     cc_src2_32 = cpu_cc_src;
389 #endif
390
391     carry_32 = tcg_temp_new_i32();
392     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
393
394 #if TARGET_LONG_BITS == 64
395     tcg_temp_free_i32(cc_src1_32);
396     tcg_temp_free_i32(cc_src2_32);
397 #endif
398
399     return carry_32;
400 }
401
402 static TCGv_i32 gen_sub32_carry32(void)
403 {
404     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
405
406     /* Carry is computed from a previous borrow: (src1 < src2)  */
407 #if TARGET_LONG_BITS == 64
408     cc_src1_32 = tcg_temp_new_i32();
409     cc_src2_32 = tcg_temp_new_i32();
410     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_src);
411     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src2);
412 #else
413     cc_src1_32 = cpu_cc_src;
414     cc_src2_32 = cpu_cc_src2;
415 #endif
416
417     carry_32 = tcg_temp_new_i32();
418     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
419
420 #if TARGET_LONG_BITS == 64
421     tcg_temp_free_i32(cc_src1_32);
422     tcg_temp_free_i32(cc_src2_32);
423 #endif
424
425     return carry_32;
426 }
427
428 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
429                             TCGv src2, int update_cc)
430 {
431     TCGv_i32 carry_32;
432     TCGv carry;
433
434     switch (dc->cc_op) {
435     case CC_OP_DIV:
436     case CC_OP_LOGIC:
437         /* Carry is known to be zero.  Fall back to plain ADD.  */
438         if (update_cc) {
439             gen_op_add_cc(dst, src1, src2);
440         } else {
441             tcg_gen_add_tl(dst, src1, src2);
442         }
443         return;
444
445     case CC_OP_ADD:
446     case CC_OP_TADD:
447     case CC_OP_TADDTV:
448         if (TARGET_LONG_BITS == 32) {
449             /* We can re-use the host's hardware carry generation by using
450                an ADD2 opcode.  We discard the low part of the output.
451                Ideally we'd combine this operation with the add that
452                generated the carry in the first place.  */
453             carry = tcg_temp_new();
454             tcg_gen_add2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
455             tcg_temp_free(carry);
456             goto add_done;
457         }
458         carry_32 = gen_add32_carry32();
459         break;
460
461     case CC_OP_SUB:
462     case CC_OP_TSUB:
463     case CC_OP_TSUBTV:
464         carry_32 = gen_sub32_carry32();
465         break;
466
467     default:
468         /* We need external help to produce the carry.  */
469         carry_32 = tcg_temp_new_i32();
470         gen_helper_compute_C_icc(carry_32, cpu_env);
471         break;
472     }
473
474 #if TARGET_LONG_BITS == 64
475     carry = tcg_temp_new();
476     tcg_gen_extu_i32_i64(carry, carry_32);
477 #else
478     carry = carry_32;
479 #endif
480
481     tcg_gen_add_tl(dst, src1, src2);
482     tcg_gen_add_tl(dst, dst, carry);
483
484     tcg_temp_free_i32(carry_32);
485 #if TARGET_LONG_BITS == 64
486     tcg_temp_free(carry);
487 #endif
488
489  add_done:
490     if (update_cc) {
491         tcg_gen_mov_tl(cpu_cc_src, src1);
492         tcg_gen_mov_tl(cpu_cc_src2, src2);
493         tcg_gen_mov_tl(cpu_cc_dst, dst);
494         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
495         dc->cc_op = CC_OP_ADDX;
496     }
497 }
498
499 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
500 {
501     tcg_gen_mov_tl(cpu_cc_src, src1);
502     tcg_gen_mov_tl(cpu_cc_src2, src2);
503     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
504     tcg_gen_mov_tl(dst, cpu_cc_dst);
505 }
506
507 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
508                             TCGv src2, int update_cc)
509 {
510     TCGv_i32 carry_32;
511     TCGv carry;
512
513     switch (dc->cc_op) {
514     case CC_OP_DIV:
515     case CC_OP_LOGIC:
516         /* Carry is known to be zero.  Fall back to plain SUB.  */
517         if (update_cc) {
518             gen_op_sub_cc(dst, src1, src2);
519         } else {
520             tcg_gen_sub_tl(dst, src1, src2);
521         }
522         return;
523
524     case CC_OP_ADD:
525     case CC_OP_TADD:
526     case CC_OP_TADDTV:
527         carry_32 = gen_add32_carry32();
528         break;
529
530     case CC_OP_SUB:
531     case CC_OP_TSUB:
532     case CC_OP_TSUBTV:
533         if (TARGET_LONG_BITS == 32) {
534             /* We can re-use the host's hardware carry generation by using
535                a SUB2 opcode.  We discard the low part of the output.
536                Ideally we'd combine this operation with the add that
537                generated the carry in the first place.  */
538             carry = tcg_temp_new();
539             tcg_gen_sub2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
540             tcg_temp_free(carry);
541             goto sub_done;
542         }
543         carry_32 = gen_sub32_carry32();
544         break;
545
546     default:
547         /* We need external help to produce the carry.  */
548         carry_32 = tcg_temp_new_i32();
549         gen_helper_compute_C_icc(carry_32, cpu_env);
550         break;
551     }
552
553 #if TARGET_LONG_BITS == 64
554     carry = tcg_temp_new();
555     tcg_gen_extu_i32_i64(carry, carry_32);
556 #else
557     carry = carry_32;
558 #endif
559
560     tcg_gen_sub_tl(dst, src1, src2);
561     tcg_gen_sub_tl(dst, dst, carry);
562
563     tcg_temp_free_i32(carry_32);
564 #if TARGET_LONG_BITS == 64
565     tcg_temp_free(carry);
566 #endif
567
568  sub_done:
569     if (update_cc) {
570         tcg_gen_mov_tl(cpu_cc_src, src1);
571         tcg_gen_mov_tl(cpu_cc_src2, src2);
572         tcg_gen_mov_tl(cpu_cc_dst, dst);
573         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
574         dc->cc_op = CC_OP_SUBX;
575     }
576 }
577
578 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
579 {
580     TCGv r_temp, zero, t0;
581
582     r_temp = tcg_temp_new();
583     t0 = tcg_temp_new();
584
585     /* old op:
586     if (!(env->y & 1))
587         T1 = 0;
588     */
589     zero = tcg_const_tl(0);
590     tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
591     tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
592     tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
593     tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
594                        zero, cpu_cc_src2);
595     tcg_temp_free(zero);
596
597     // b2 = T0 & 1;
598     // env->y = (b2 << 31) | (env->y >> 1);
599     tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
600     tcg_gen_shli_tl(r_temp, r_temp, 31);
601     tcg_gen_shri_tl(t0, cpu_y, 1);
602     tcg_gen_andi_tl(t0, t0, 0x7fffffff);
603     tcg_gen_or_tl(t0, t0, r_temp);
604     tcg_gen_andi_tl(cpu_y, t0, 0xffffffff);
605
606     // b1 = N ^ V;
607     gen_mov_reg_N(t0, cpu_psr);
608     gen_mov_reg_V(r_temp, cpu_psr);
609     tcg_gen_xor_tl(t0, t0, r_temp);
610     tcg_temp_free(r_temp);
611
612     // T0 = (b1 << 31) | (T0 >> 1);
613     // src1 = T0;
614     tcg_gen_shli_tl(t0, t0, 31);
615     tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
616     tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
617     tcg_temp_free(t0);
618
619     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
620
621     tcg_gen_mov_tl(dst, cpu_cc_dst);
622 }
623
624 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
625 {
626 #if TARGET_LONG_BITS == 32
627     if (sign_ext) {
628         tcg_gen_muls2_tl(dst, cpu_y, src1, src2);
629     } else {
630         tcg_gen_mulu2_tl(dst, cpu_y, src1, src2);
631     }
632 #else
633     TCGv t0 = tcg_temp_new_i64();
634     TCGv t1 = tcg_temp_new_i64();
635
636     if (sign_ext) {
637         tcg_gen_ext32s_i64(t0, src1);
638         tcg_gen_ext32s_i64(t1, src2);
639     } else {
640         tcg_gen_ext32u_i64(t0, src1);
641         tcg_gen_ext32u_i64(t1, src2);
642     }
643
644     tcg_gen_mul_i64(dst, t0, t1);
645     tcg_temp_free(t0);
646     tcg_temp_free(t1);
647
648     tcg_gen_shri_i64(cpu_y, dst, 32);
649 #endif
650 }
651
652 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
653 {
654     /* zero-extend truncated operands before multiplication */
655     gen_op_multiply(dst, src1, src2, 0);
656 }
657
658 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
659 {
660     /* sign-extend truncated operands before multiplication */
661     gen_op_multiply(dst, src1, src2, 1);
662 }
663
664 // 1
665 static inline void gen_op_eval_ba(TCGv dst)
666 {
667     tcg_gen_movi_tl(dst, 1);
668 }
669
670 // Z
671 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
672 {
673     gen_mov_reg_Z(dst, src);
674 }
675
676 // Z | (N ^ V)
677 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
678 {
679     TCGv t0 = tcg_temp_new();
680     gen_mov_reg_N(t0, src);
681     gen_mov_reg_V(dst, src);
682     tcg_gen_xor_tl(dst, dst, t0);
683     gen_mov_reg_Z(t0, src);
684     tcg_gen_or_tl(dst, dst, t0);
685     tcg_temp_free(t0);
686 }
687
688 // N ^ V
689 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
690 {
691     TCGv t0 = tcg_temp_new();
692     gen_mov_reg_V(t0, src);
693     gen_mov_reg_N(dst, src);
694     tcg_gen_xor_tl(dst, dst, t0);
695     tcg_temp_free(t0);
696 }
697
698 // C | Z
699 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
700 {
701     TCGv t0 = tcg_temp_new();
702     gen_mov_reg_Z(t0, src);
703     gen_mov_reg_C(dst, src);
704     tcg_gen_or_tl(dst, dst, t0);
705     tcg_temp_free(t0);
706 }
707
708 // C
709 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
710 {
711     gen_mov_reg_C(dst, src);
712 }
713
714 // V
715 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
716 {
717     gen_mov_reg_V(dst, src);
718 }
719
720 // 0
721 static inline void gen_op_eval_bn(TCGv dst)
722 {
723     tcg_gen_movi_tl(dst, 0);
724 }
725
726 // N
727 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
728 {
729     gen_mov_reg_N(dst, src);
730 }
731
732 // !Z
733 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
734 {
735     gen_mov_reg_Z(dst, src);
736     tcg_gen_xori_tl(dst, dst, 0x1);
737 }
738
739 // !(Z | (N ^ V))
740 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
741 {
742     gen_op_eval_ble(dst, src);
743     tcg_gen_xori_tl(dst, dst, 0x1);
744 }
745
746 // !(N ^ V)
747 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
748 {
749     gen_op_eval_bl(dst, src);
750     tcg_gen_xori_tl(dst, dst, 0x1);
751 }
752
753 // !(C | Z)
754 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
755 {
756     gen_op_eval_bleu(dst, src);
757     tcg_gen_xori_tl(dst, dst, 0x1);
758 }
759
760 // !C
761 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
762 {
763     gen_mov_reg_C(dst, src);
764     tcg_gen_xori_tl(dst, dst, 0x1);
765 }
766
767 // !N
768 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
769 {
770     gen_mov_reg_N(dst, src);
771     tcg_gen_xori_tl(dst, dst, 0x1);
772 }
773
774 // !V
775 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
776 {
777     gen_mov_reg_V(dst, src);
778     tcg_gen_xori_tl(dst, dst, 0x1);
779 }
780
781 /*
782   FPSR bit field FCC1 | FCC0:
783    0 =
784    1 <
785    2 >
786    3 unordered
787 */
788 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
789                                     unsigned int fcc_offset)
790 {
791     tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
792     tcg_gen_andi_tl(reg, reg, 0x1);
793 }
794
795 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
796                                     unsigned int fcc_offset)
797 {
798     tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
799     tcg_gen_andi_tl(reg, reg, 0x1);
800 }
801
802 // !0: FCC0 | FCC1
803 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
804                                     unsigned int fcc_offset)
805 {
806     TCGv t0 = tcg_temp_new();
807     gen_mov_reg_FCC0(dst, src, fcc_offset);
808     gen_mov_reg_FCC1(t0, src, fcc_offset);
809     tcg_gen_or_tl(dst, dst, t0);
810     tcg_temp_free(t0);
811 }
812
813 // 1 or 2: FCC0 ^ FCC1
814 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
815                                     unsigned int fcc_offset)
816 {
817     TCGv t0 = tcg_temp_new();
818     gen_mov_reg_FCC0(dst, src, fcc_offset);
819     gen_mov_reg_FCC1(t0, src, fcc_offset);
820     tcg_gen_xor_tl(dst, dst, t0);
821     tcg_temp_free(t0);
822 }
823
824 // 1 or 3: FCC0
825 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
826                                     unsigned int fcc_offset)
827 {
828     gen_mov_reg_FCC0(dst, src, fcc_offset);
829 }
830
831 // 1: FCC0 & !FCC1
832 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
833                                     unsigned int fcc_offset)
834 {
835     TCGv t0 = tcg_temp_new();
836     gen_mov_reg_FCC0(dst, src, fcc_offset);
837     gen_mov_reg_FCC1(t0, src, fcc_offset);
838     tcg_gen_andc_tl(dst, dst, t0);
839     tcg_temp_free(t0);
840 }
841
842 // 2 or 3: FCC1
843 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
844                                     unsigned int fcc_offset)
845 {
846     gen_mov_reg_FCC1(dst, src, fcc_offset);
847 }
848
849 // 2: !FCC0 & FCC1
850 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
851                                     unsigned int fcc_offset)
852 {
853     TCGv t0 = tcg_temp_new();
854     gen_mov_reg_FCC0(dst, src, fcc_offset);
855     gen_mov_reg_FCC1(t0, src, fcc_offset);
856     tcg_gen_andc_tl(dst, t0, dst);
857     tcg_temp_free(t0);
858 }
859
860 // 3: FCC0 & FCC1
861 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
862                                     unsigned int fcc_offset)
863 {
864     TCGv t0 = tcg_temp_new();
865     gen_mov_reg_FCC0(dst, src, fcc_offset);
866     gen_mov_reg_FCC1(t0, src, fcc_offset);
867     tcg_gen_and_tl(dst, dst, t0);
868     tcg_temp_free(t0);
869 }
870
871 // 0: !(FCC0 | FCC1)
872 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
873                                     unsigned int fcc_offset)
874 {
875     TCGv t0 = tcg_temp_new();
876     gen_mov_reg_FCC0(dst, src, fcc_offset);
877     gen_mov_reg_FCC1(t0, src, fcc_offset);
878     tcg_gen_or_tl(dst, dst, t0);
879     tcg_gen_xori_tl(dst, dst, 0x1);
880     tcg_temp_free(t0);
881 }
882
883 // 0 or 3: !(FCC0 ^ FCC1)
884 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
885                                     unsigned int fcc_offset)
886 {
887     TCGv t0 = tcg_temp_new();
888     gen_mov_reg_FCC0(dst, src, fcc_offset);
889     gen_mov_reg_FCC1(t0, src, fcc_offset);
890     tcg_gen_xor_tl(dst, dst, t0);
891     tcg_gen_xori_tl(dst, dst, 0x1);
892     tcg_temp_free(t0);
893 }
894
895 // 0 or 2: !FCC0
896 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
897                                     unsigned int fcc_offset)
898 {
899     gen_mov_reg_FCC0(dst, src, fcc_offset);
900     tcg_gen_xori_tl(dst, dst, 0x1);
901 }
902
903 // !1: !(FCC0 & !FCC1)
904 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
905                                     unsigned int fcc_offset)
906 {
907     TCGv t0 = tcg_temp_new();
908     gen_mov_reg_FCC0(dst, src, fcc_offset);
909     gen_mov_reg_FCC1(t0, src, fcc_offset);
910     tcg_gen_andc_tl(dst, dst, t0);
911     tcg_gen_xori_tl(dst, dst, 0x1);
912     tcg_temp_free(t0);
913 }
914
915 // 0 or 1: !FCC1
916 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
917                                     unsigned int fcc_offset)
918 {
919     gen_mov_reg_FCC1(dst, src, fcc_offset);
920     tcg_gen_xori_tl(dst, dst, 0x1);
921 }
922
923 // !2: !(!FCC0 & FCC1)
924 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
925                                     unsigned int fcc_offset)
926 {
927     TCGv t0 = tcg_temp_new();
928     gen_mov_reg_FCC0(dst, src, fcc_offset);
929     gen_mov_reg_FCC1(t0, src, fcc_offset);
930     tcg_gen_andc_tl(dst, t0, dst);
931     tcg_gen_xori_tl(dst, dst, 0x1);
932     tcg_temp_free(t0);
933 }
934
935 // !3: !(FCC0 & FCC1)
936 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
937                                     unsigned int fcc_offset)
938 {
939     TCGv t0 = tcg_temp_new();
940     gen_mov_reg_FCC0(dst, src, fcc_offset);
941     gen_mov_reg_FCC1(t0, src, fcc_offset);
942     tcg_gen_and_tl(dst, dst, t0);
943     tcg_gen_xori_tl(dst, dst, 0x1);
944     tcg_temp_free(t0);
945 }
946
947 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
948                                target_ulong pc2, TCGv r_cond)
949 {
950     TCGLabel *l1 = gen_new_label();
951
952     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
953
954     gen_goto_tb(dc, 0, pc1, pc1 + 4);
955
956     gen_set_label(l1);
957     gen_goto_tb(dc, 1, pc2, pc2 + 4);
958 }
959
960 static void gen_branch_a(DisasContext *dc, target_ulong pc1)
961 {
962     TCGLabel *l1 = gen_new_label();
963     target_ulong npc = dc->npc;
964
965     tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cond, 0, l1);
966
967     gen_goto_tb(dc, 0, npc, pc1);
968
969     gen_set_label(l1);
970     gen_goto_tb(dc, 1, npc + 4, npc + 8);
971
972     dc->is_br = 1;
973 }
974
975 static void gen_branch_n(DisasContext *dc, target_ulong pc1)
976 {
977     target_ulong npc = dc->npc;
978
979     if (likely(npc != DYNAMIC_PC)) {
980         dc->pc = npc;
981         dc->jump_pc[0] = pc1;
982         dc->jump_pc[1] = npc + 4;
983         dc->npc = JUMP_PC;
984     } else {
985         TCGv t, z;
986
987         tcg_gen_mov_tl(cpu_pc, cpu_npc);
988
989         tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
990         t = tcg_const_tl(pc1);
991         z = tcg_const_tl(0);
992         tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, z, t, cpu_npc);
993         tcg_temp_free(t);
994         tcg_temp_free(z);
995
996         dc->pc = DYNAMIC_PC;
997     }
998 }
999
1000 static inline void gen_generic_branch(DisasContext *dc)
1001 {
1002     TCGv npc0 = tcg_const_tl(dc->jump_pc[0]);
1003     TCGv npc1 = tcg_const_tl(dc->jump_pc[1]);
1004     TCGv zero = tcg_const_tl(0);
1005
1006     tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
1007
1008     tcg_temp_free(npc0);
1009     tcg_temp_free(npc1);
1010     tcg_temp_free(zero);
1011 }
1012
1013 /* call this function before using the condition register as it may
1014    have been set for a jump */
1015 static inline void flush_cond(DisasContext *dc)
1016 {
1017     if (dc->npc == JUMP_PC) {
1018         gen_generic_branch(dc);
1019         dc->npc = DYNAMIC_PC;
1020     }
1021 }
1022
1023 static inline void save_npc(DisasContext *dc)
1024 {
1025     if (dc->npc == JUMP_PC) {
1026         gen_generic_branch(dc);
1027         dc->npc = DYNAMIC_PC;
1028     } else if (dc->npc != DYNAMIC_PC) {
1029         tcg_gen_movi_tl(cpu_npc, dc->npc);
1030     }
1031 }
1032
1033 static inline void update_psr(DisasContext *dc)
1034 {
1035     if (dc->cc_op != CC_OP_FLAGS) {
1036         dc->cc_op = CC_OP_FLAGS;
1037         gen_helper_compute_psr(cpu_env);
1038     }
1039 }
1040
1041 static inline void save_state(DisasContext *dc)
1042 {
1043     tcg_gen_movi_tl(cpu_pc, dc->pc);
1044     save_npc(dc);
1045 }
1046
1047 static inline void gen_mov_pc_npc(DisasContext *dc)
1048 {
1049     if (dc->npc == JUMP_PC) {
1050         gen_generic_branch(dc);
1051         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1052         dc->pc = DYNAMIC_PC;
1053     } else if (dc->npc == DYNAMIC_PC) {
1054         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1055         dc->pc = DYNAMIC_PC;
1056     } else {
1057         dc->pc = dc->npc;
1058     }
1059 }
1060
1061 static inline void gen_op_next_insn(void)
1062 {
1063     tcg_gen_mov_tl(cpu_pc, cpu_npc);
1064     tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1065 }
1066
1067 static void free_compare(DisasCompare *cmp)
1068 {
1069     if (!cmp->g1) {
1070         tcg_temp_free(cmp->c1);
1071     }
1072     if (!cmp->g2) {
1073         tcg_temp_free(cmp->c2);
1074     }
1075 }
1076
1077 static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1078                         DisasContext *dc)
1079 {
1080     static int subcc_cond[16] = {
1081         TCG_COND_NEVER,
1082         TCG_COND_EQ,
1083         TCG_COND_LE,
1084         TCG_COND_LT,
1085         TCG_COND_LEU,
1086         TCG_COND_LTU,
1087         -1, /* neg */
1088         -1, /* overflow */
1089         TCG_COND_ALWAYS,
1090         TCG_COND_NE,
1091         TCG_COND_GT,
1092         TCG_COND_GE,
1093         TCG_COND_GTU,
1094         TCG_COND_GEU,
1095         -1, /* pos */
1096         -1, /* no overflow */
1097     };
1098
1099     static int logic_cond[16] = {
1100         TCG_COND_NEVER,
1101         TCG_COND_EQ,     /* eq:  Z */
1102         TCG_COND_LE,     /* le:  Z | (N ^ V) -> Z | N */
1103         TCG_COND_LT,     /* lt:  N ^ V -> N */
1104         TCG_COND_EQ,     /* leu: C | Z -> Z */
1105         TCG_COND_NEVER,  /* ltu: C -> 0 */
1106         TCG_COND_LT,     /* neg: N */
1107         TCG_COND_NEVER,  /* vs:  V -> 0 */
1108         TCG_COND_ALWAYS,
1109         TCG_COND_NE,     /* ne:  !Z */
1110         TCG_COND_GT,     /* gt:  !(Z | (N ^ V)) -> !(Z | N) */
1111         TCG_COND_GE,     /* ge:  !(N ^ V) -> !N */
1112         TCG_COND_NE,     /* gtu: !(C | Z) -> !Z */
1113         TCG_COND_ALWAYS, /* geu: !C -> 1 */
1114         TCG_COND_GE,     /* pos: !N */
1115         TCG_COND_ALWAYS, /* vc:  !V -> 1 */
1116     };
1117
1118     TCGv_i32 r_src;
1119     TCGv r_dst;
1120
1121 #ifdef TARGET_SPARC64
1122     if (xcc) {
1123         r_src = cpu_xcc;
1124     } else {
1125         r_src = cpu_psr;
1126     }
1127 #else
1128     r_src = cpu_psr;
1129 #endif
1130
1131     switch (dc->cc_op) {
1132     case CC_OP_LOGIC:
1133         cmp->cond = logic_cond[cond];
1134     do_compare_dst_0:
1135         cmp->is_bool = false;
1136         cmp->g2 = false;
1137         cmp->c2 = tcg_const_tl(0);
1138 #ifdef TARGET_SPARC64
1139         if (!xcc) {
1140             cmp->g1 = false;
1141             cmp->c1 = tcg_temp_new();
1142             tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1143             break;
1144         }
1145 #endif
1146         cmp->g1 = true;
1147         cmp->c1 = cpu_cc_dst;
1148         break;
1149
1150     case CC_OP_SUB:
1151         switch (cond) {
1152         case 6:  /* neg */
1153         case 14: /* pos */
1154             cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1155             goto do_compare_dst_0;
1156
1157         case 7: /* overflow */
1158         case 15: /* !overflow */
1159             goto do_dynamic;
1160
1161         default:
1162             cmp->cond = subcc_cond[cond];
1163             cmp->is_bool = false;
1164 #ifdef TARGET_SPARC64
1165             if (!xcc) {
1166                 /* Note that sign-extension works for unsigned compares as
1167                    long as both operands are sign-extended.  */
1168                 cmp->g1 = cmp->g2 = false;
1169                 cmp->c1 = tcg_temp_new();
1170                 cmp->c2 = tcg_temp_new();
1171                 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1172                 tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1173                 break;
1174             }
1175 #endif
1176             cmp->g1 = cmp->g2 = true;
1177             cmp->c1 = cpu_cc_src;
1178             cmp->c2 = cpu_cc_src2;
1179             break;
1180         }
1181         break;
1182
1183     default:
1184     do_dynamic:
1185         gen_helper_compute_psr(cpu_env);
1186         dc->cc_op = CC_OP_FLAGS;
1187         /* FALLTHRU */
1188
1189     case CC_OP_FLAGS:
1190         /* We're going to generate a boolean result.  */
1191         cmp->cond = TCG_COND_NE;
1192         cmp->is_bool = true;
1193         cmp->g1 = cmp->g2 = false;
1194         cmp->c1 = r_dst = tcg_temp_new();
1195         cmp->c2 = tcg_const_tl(0);
1196
1197         switch (cond) {
1198         case 0x0:
1199             gen_op_eval_bn(r_dst);
1200             break;
1201         case 0x1:
1202             gen_op_eval_be(r_dst, r_src);
1203             break;
1204         case 0x2:
1205             gen_op_eval_ble(r_dst, r_src);
1206             break;
1207         case 0x3:
1208             gen_op_eval_bl(r_dst, r_src);
1209             break;
1210         case 0x4:
1211             gen_op_eval_bleu(r_dst, r_src);
1212             break;
1213         case 0x5:
1214             gen_op_eval_bcs(r_dst, r_src);
1215             break;
1216         case 0x6:
1217             gen_op_eval_bneg(r_dst, r_src);
1218             break;
1219         case 0x7:
1220             gen_op_eval_bvs(r_dst, r_src);
1221             break;
1222         case 0x8:
1223             gen_op_eval_ba(r_dst);
1224             break;
1225         case 0x9:
1226             gen_op_eval_bne(r_dst, r_src);
1227             break;
1228         case 0xa:
1229             gen_op_eval_bg(r_dst, r_src);
1230             break;
1231         case 0xb:
1232             gen_op_eval_bge(r_dst, r_src);
1233             break;
1234         case 0xc:
1235             gen_op_eval_bgu(r_dst, r_src);
1236             break;
1237         case 0xd:
1238             gen_op_eval_bcc(r_dst, r_src);
1239             break;
1240         case 0xe:
1241             gen_op_eval_bpos(r_dst, r_src);
1242             break;
1243         case 0xf:
1244             gen_op_eval_bvc(r_dst, r_src);
1245             break;
1246         }
1247         break;
1248     }
1249 }
1250
1251 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1252 {
1253     unsigned int offset;
1254     TCGv r_dst;
1255
1256     /* For now we still generate a straight boolean result.  */
1257     cmp->cond = TCG_COND_NE;
1258     cmp->is_bool = true;
1259     cmp->g1 = cmp->g2 = false;
1260     cmp->c1 = r_dst = tcg_temp_new();
1261     cmp->c2 = tcg_const_tl(0);
1262
1263     switch (cc) {
1264     default:
1265     case 0x0:
1266         offset = 0;
1267         break;
1268     case 0x1:
1269         offset = 32 - 10;
1270         break;
1271     case 0x2:
1272         offset = 34 - 10;
1273         break;
1274     case 0x3:
1275         offset = 36 - 10;
1276         break;
1277     }
1278
1279     switch (cond) {
1280     case 0x0:
1281         gen_op_eval_bn(r_dst);
1282         break;
1283     case 0x1:
1284         gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1285         break;
1286     case 0x2:
1287         gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1288         break;
1289     case 0x3:
1290         gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1291         break;
1292     case 0x4:
1293         gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1294         break;
1295     case 0x5:
1296         gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1297         break;
1298     case 0x6:
1299         gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1300         break;
1301     case 0x7:
1302         gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1303         break;
1304     case 0x8:
1305         gen_op_eval_ba(r_dst);
1306         break;
1307     case 0x9:
1308         gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1309         break;
1310     case 0xa:
1311         gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1312         break;
1313     case 0xb:
1314         gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1315         break;
1316     case 0xc:
1317         gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1318         break;
1319     case 0xd:
1320         gen_op_eval_fble(r_dst, cpu_fsr, offset);
1321         break;
1322     case 0xe:
1323         gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1324         break;
1325     case 0xf:
1326         gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1327         break;
1328     }
1329 }
1330
1331 static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1332                      DisasContext *dc)
1333 {
1334     DisasCompare cmp;
1335     gen_compare(&cmp, cc, cond, dc);
1336
1337     /* The interface is to return a boolean in r_dst.  */
1338     if (cmp.is_bool) {
1339         tcg_gen_mov_tl(r_dst, cmp.c1);
1340     } else {
1341         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1342     }
1343
1344     free_compare(&cmp);
1345 }
1346
1347 static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1348 {
1349     DisasCompare cmp;
1350     gen_fcompare(&cmp, cc, cond);
1351
1352     /* The interface is to return a boolean in r_dst.  */
1353     if (cmp.is_bool) {
1354         tcg_gen_mov_tl(r_dst, cmp.c1);
1355     } else {
1356         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1357     }
1358
1359     free_compare(&cmp);
1360 }
1361
1362 #ifdef TARGET_SPARC64
1363 // Inverted logic
1364 static const int gen_tcg_cond_reg[8] = {
1365     -1,
1366     TCG_COND_NE,
1367     TCG_COND_GT,
1368     TCG_COND_GE,
1369     -1,
1370     TCG_COND_EQ,
1371     TCG_COND_LE,
1372     TCG_COND_LT,
1373 };
1374
1375 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1376 {
1377     cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1378     cmp->is_bool = false;
1379     cmp->g1 = true;
1380     cmp->g2 = false;
1381     cmp->c1 = r_src;
1382     cmp->c2 = tcg_const_tl(0);
1383 }
1384
1385 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1386 {
1387     DisasCompare cmp;
1388     gen_compare_reg(&cmp, cond, r_src);
1389
1390     /* The interface is to return a boolean in r_dst.  */
1391     tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1392
1393     free_compare(&cmp);
1394 }
1395 #endif
1396
1397 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1398 {
1399     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1400     target_ulong target = dc->pc + offset;
1401
1402 #ifdef TARGET_SPARC64
1403     if (unlikely(AM_CHECK(dc))) {
1404         target &= 0xffffffffULL;
1405     }
1406 #endif
1407     if (cond == 0x0) {
1408         /* unconditional not taken */
1409         if (a) {
1410             dc->pc = dc->npc + 4;
1411             dc->npc = dc->pc + 4;
1412         } else {
1413             dc->pc = dc->npc;
1414             dc->npc = dc->pc + 4;
1415         }
1416     } else if (cond == 0x8) {
1417         /* unconditional taken */
1418         if (a) {
1419             dc->pc = target;
1420             dc->npc = dc->pc + 4;
1421         } else {
1422             dc->pc = dc->npc;
1423             dc->npc = target;
1424             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1425         }
1426     } else {
1427         flush_cond(dc);
1428         gen_cond(cpu_cond, cc, cond, dc);
1429         if (a) {
1430             gen_branch_a(dc, target);
1431         } else {
1432             gen_branch_n(dc, target);
1433         }
1434     }
1435 }
1436
1437 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1438 {
1439     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1440     target_ulong target = dc->pc + offset;
1441
1442 #ifdef TARGET_SPARC64
1443     if (unlikely(AM_CHECK(dc))) {
1444         target &= 0xffffffffULL;
1445     }
1446 #endif
1447     if (cond == 0x0) {
1448         /* unconditional not taken */
1449         if (a) {
1450             dc->pc = dc->npc + 4;
1451             dc->npc = dc->pc + 4;
1452         } else {
1453             dc->pc = dc->npc;
1454             dc->npc = dc->pc + 4;
1455         }
1456     } else if (cond == 0x8) {
1457         /* unconditional taken */
1458         if (a) {
1459             dc->pc = target;
1460             dc->npc = dc->pc + 4;
1461         } else {
1462             dc->pc = dc->npc;
1463             dc->npc = target;
1464             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1465         }
1466     } else {
1467         flush_cond(dc);
1468         gen_fcond(cpu_cond, cc, cond);
1469         if (a) {
1470             gen_branch_a(dc, target);
1471         } else {
1472             gen_branch_n(dc, target);
1473         }
1474     }
1475 }
1476
1477 #ifdef TARGET_SPARC64
1478 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1479                           TCGv r_reg)
1480 {
1481     unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1482     target_ulong target = dc->pc + offset;
1483
1484     if (unlikely(AM_CHECK(dc))) {
1485         target &= 0xffffffffULL;
1486     }
1487     flush_cond(dc);
1488     gen_cond_reg(cpu_cond, cond, r_reg);
1489     if (a) {
1490         gen_branch_a(dc, target);
1491     } else {
1492         gen_branch_n(dc, target);
1493     }
1494 }
1495
1496 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1497 {
1498     switch (fccno) {
1499     case 0:
1500         gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1501         break;
1502     case 1:
1503         gen_helper_fcmps_fcc1(cpu_env, r_rs1, r_rs2);
1504         break;
1505     case 2:
1506         gen_helper_fcmps_fcc2(cpu_env, r_rs1, r_rs2);
1507         break;
1508     case 3:
1509         gen_helper_fcmps_fcc3(cpu_env, r_rs1, r_rs2);
1510         break;
1511     }
1512 }
1513
1514 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1515 {
1516     switch (fccno) {
1517     case 0:
1518         gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1519         break;
1520     case 1:
1521         gen_helper_fcmpd_fcc1(cpu_env, r_rs1, r_rs2);
1522         break;
1523     case 2:
1524         gen_helper_fcmpd_fcc2(cpu_env, r_rs1, r_rs2);
1525         break;
1526     case 3:
1527         gen_helper_fcmpd_fcc3(cpu_env, r_rs1, r_rs2);
1528         break;
1529     }
1530 }
1531
1532 static inline void gen_op_fcmpq(int fccno)
1533 {
1534     switch (fccno) {
1535     case 0:
1536         gen_helper_fcmpq(cpu_env);
1537         break;
1538     case 1:
1539         gen_helper_fcmpq_fcc1(cpu_env);
1540         break;
1541     case 2:
1542         gen_helper_fcmpq_fcc2(cpu_env);
1543         break;
1544     case 3:
1545         gen_helper_fcmpq_fcc3(cpu_env);
1546         break;
1547     }
1548 }
1549
1550 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1551 {
1552     switch (fccno) {
1553     case 0:
1554         gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1555         break;
1556     case 1:
1557         gen_helper_fcmpes_fcc1(cpu_env, r_rs1, r_rs2);
1558         break;
1559     case 2:
1560         gen_helper_fcmpes_fcc2(cpu_env, r_rs1, r_rs2);
1561         break;
1562     case 3:
1563         gen_helper_fcmpes_fcc3(cpu_env, r_rs1, r_rs2);
1564         break;
1565     }
1566 }
1567
1568 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1569 {
1570     switch (fccno) {
1571     case 0:
1572         gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1573         break;
1574     case 1:
1575         gen_helper_fcmped_fcc1(cpu_env, r_rs1, r_rs2);
1576         break;
1577     case 2:
1578         gen_helper_fcmped_fcc2(cpu_env, r_rs1, r_rs2);
1579         break;
1580     case 3:
1581         gen_helper_fcmped_fcc3(cpu_env, r_rs1, r_rs2);
1582         break;
1583     }
1584 }
1585
1586 static inline void gen_op_fcmpeq(int fccno)
1587 {
1588     switch (fccno) {
1589     case 0:
1590         gen_helper_fcmpeq(cpu_env);
1591         break;
1592     case 1:
1593         gen_helper_fcmpeq_fcc1(cpu_env);
1594         break;
1595     case 2:
1596         gen_helper_fcmpeq_fcc2(cpu_env);
1597         break;
1598     case 3:
1599         gen_helper_fcmpeq_fcc3(cpu_env);
1600         break;
1601     }
1602 }
1603
1604 #else
1605
1606 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1607 {
1608     gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1609 }
1610
1611 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1612 {
1613     gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1614 }
1615
1616 static inline void gen_op_fcmpq(int fccno)
1617 {
1618     gen_helper_fcmpq(cpu_env);
1619 }
1620
1621 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1622 {
1623     gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1624 }
1625
1626 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1627 {
1628     gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1629 }
1630
1631 static inline void gen_op_fcmpeq(int fccno)
1632 {
1633     gen_helper_fcmpeq(cpu_env);
1634 }
1635 #endif
1636
1637 static inline void gen_op_fpexception_im(int fsr_flags)
1638 {
1639     TCGv_i32 r_const;
1640
1641     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1642     tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1643     r_const = tcg_const_i32(TT_FP_EXCP);
1644     gen_helper_raise_exception(cpu_env, r_const);
1645     tcg_temp_free_i32(r_const);
1646 }
1647
1648 static int gen_trap_ifnofpu(DisasContext *dc)
1649 {
1650 #if !defined(CONFIG_USER_ONLY)
1651     if (!dc->fpu_enabled) {
1652         TCGv_i32 r_const;
1653
1654         save_state(dc);
1655         r_const = tcg_const_i32(TT_NFPU_INSN);
1656         gen_helper_raise_exception(cpu_env, r_const);
1657         tcg_temp_free_i32(r_const);
1658         dc->is_br = 1;
1659         return 1;
1660     }
1661 #endif
1662     return 0;
1663 }
1664
1665 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1666 {
1667     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1668 }
1669
1670 static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1671                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1672 {
1673     TCGv_i32 dst, src;
1674
1675     src = gen_load_fpr_F(dc, rs);
1676     dst = gen_dest_fpr_F(dc);
1677
1678     gen(dst, cpu_env, src);
1679
1680     gen_store_fpr_F(dc, rd, dst);
1681 }
1682
1683 static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1684                                  void (*gen)(TCGv_i32, TCGv_i32))
1685 {
1686     TCGv_i32 dst, src;
1687
1688     src = gen_load_fpr_F(dc, rs);
1689     dst = gen_dest_fpr_F(dc);
1690
1691     gen(dst, src);
1692
1693     gen_store_fpr_F(dc, rd, dst);
1694 }
1695
1696 static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1697                         void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1698 {
1699     TCGv_i32 dst, src1, src2;
1700
1701     src1 = gen_load_fpr_F(dc, rs1);
1702     src2 = gen_load_fpr_F(dc, rs2);
1703     dst = gen_dest_fpr_F(dc);
1704
1705     gen(dst, cpu_env, src1, src2);
1706
1707     gen_store_fpr_F(dc, rd, dst);
1708 }
1709
1710 #ifdef TARGET_SPARC64
1711 static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1712                                   void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1713 {
1714     TCGv_i32 dst, src1, src2;
1715
1716     src1 = gen_load_fpr_F(dc, rs1);
1717     src2 = gen_load_fpr_F(dc, rs2);
1718     dst = gen_dest_fpr_F(dc);
1719
1720     gen(dst, src1, src2);
1721
1722     gen_store_fpr_F(dc, rd, dst);
1723 }
1724 #endif
1725
1726 static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1727                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1728 {
1729     TCGv_i64 dst, src;
1730
1731     src = gen_load_fpr_D(dc, rs);
1732     dst = gen_dest_fpr_D(dc, rd);
1733
1734     gen(dst, cpu_env, src);
1735
1736     gen_store_fpr_D(dc, rd, dst);
1737 }
1738
1739 #ifdef TARGET_SPARC64
1740 static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1741                                  void (*gen)(TCGv_i64, TCGv_i64))
1742 {
1743     TCGv_i64 dst, src;
1744
1745     src = gen_load_fpr_D(dc, rs);
1746     dst = gen_dest_fpr_D(dc, rd);
1747
1748     gen(dst, src);
1749
1750     gen_store_fpr_D(dc, rd, dst);
1751 }
1752 #endif
1753
1754 static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1755                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1756 {
1757     TCGv_i64 dst, src1, src2;
1758
1759     src1 = gen_load_fpr_D(dc, rs1);
1760     src2 = gen_load_fpr_D(dc, rs2);
1761     dst = gen_dest_fpr_D(dc, rd);
1762
1763     gen(dst, cpu_env, src1, src2);
1764
1765     gen_store_fpr_D(dc, rd, dst);
1766 }
1767
1768 #ifdef TARGET_SPARC64
1769 static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1770                                   void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1771 {
1772     TCGv_i64 dst, src1, src2;
1773
1774     src1 = gen_load_fpr_D(dc, rs1);
1775     src2 = gen_load_fpr_D(dc, rs2);
1776     dst = gen_dest_fpr_D(dc, rd);
1777
1778     gen(dst, src1, src2);
1779
1780     gen_store_fpr_D(dc, rd, dst);
1781 }
1782
1783 static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1784                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1785 {
1786     TCGv_i64 dst, src1, src2;
1787
1788     src1 = gen_load_fpr_D(dc, rs1);
1789     src2 = gen_load_fpr_D(dc, rs2);
1790     dst = gen_dest_fpr_D(dc, rd);
1791
1792     gen(dst, cpu_gsr, src1, src2);
1793
1794     gen_store_fpr_D(dc, rd, dst);
1795 }
1796
1797 static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1798                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1799 {
1800     TCGv_i64 dst, src0, src1, src2;
1801
1802     src1 = gen_load_fpr_D(dc, rs1);
1803     src2 = gen_load_fpr_D(dc, rs2);
1804     src0 = gen_load_fpr_D(dc, rd);
1805     dst = gen_dest_fpr_D(dc, rd);
1806
1807     gen(dst, src0, src1, src2);
1808
1809     gen_store_fpr_D(dc, rd, dst);
1810 }
1811 #endif
1812
1813 static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1814                               void (*gen)(TCGv_ptr))
1815 {
1816     gen_op_load_fpr_QT1(QFPREG(rs));
1817
1818     gen(cpu_env);
1819
1820     gen_op_store_QT0_fpr(QFPREG(rd));
1821     gen_update_fprs_dirty(QFPREG(rd));
1822 }
1823
1824 #ifdef TARGET_SPARC64
1825 static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1826                                  void (*gen)(TCGv_ptr))
1827 {
1828     gen_op_load_fpr_QT1(QFPREG(rs));
1829
1830     gen(cpu_env);
1831
1832     gen_op_store_QT0_fpr(QFPREG(rd));
1833     gen_update_fprs_dirty(QFPREG(rd));
1834 }
1835 #endif
1836
1837 static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1838                                void (*gen)(TCGv_ptr))
1839 {
1840     gen_op_load_fpr_QT0(QFPREG(rs1));
1841     gen_op_load_fpr_QT1(QFPREG(rs2));
1842
1843     gen(cpu_env);
1844
1845     gen_op_store_QT0_fpr(QFPREG(rd));
1846     gen_update_fprs_dirty(QFPREG(rd));
1847 }
1848
1849 static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1850                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1851 {
1852     TCGv_i64 dst;
1853     TCGv_i32 src1, src2;
1854
1855     src1 = gen_load_fpr_F(dc, rs1);
1856     src2 = gen_load_fpr_F(dc, rs2);
1857     dst = gen_dest_fpr_D(dc, rd);
1858
1859     gen(dst, cpu_env, src1, src2);
1860
1861     gen_store_fpr_D(dc, rd, dst);
1862 }
1863
1864 static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1865                                void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1866 {
1867     TCGv_i64 src1, src2;
1868
1869     src1 = gen_load_fpr_D(dc, rs1);
1870     src2 = gen_load_fpr_D(dc, rs2);
1871
1872     gen(cpu_env, src1, src2);
1873
1874     gen_op_store_QT0_fpr(QFPREG(rd));
1875     gen_update_fprs_dirty(QFPREG(rd));
1876 }
1877
1878 #ifdef TARGET_SPARC64
1879 static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1880                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1881 {
1882     TCGv_i64 dst;
1883     TCGv_i32 src;
1884
1885     src = gen_load_fpr_F(dc, rs);
1886     dst = gen_dest_fpr_D(dc, rd);
1887
1888     gen(dst, cpu_env, src);
1889
1890     gen_store_fpr_D(dc, rd, dst);
1891 }
1892 #endif
1893
1894 static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1895                                  void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1896 {
1897     TCGv_i64 dst;
1898     TCGv_i32 src;
1899
1900     src = gen_load_fpr_F(dc, rs);
1901     dst = gen_dest_fpr_D(dc, rd);
1902
1903     gen(dst, cpu_env, src);
1904
1905     gen_store_fpr_D(dc, rd, dst);
1906 }
1907
1908 static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1909                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1910 {
1911     TCGv_i32 dst;
1912     TCGv_i64 src;
1913
1914     src = gen_load_fpr_D(dc, rs);
1915     dst = gen_dest_fpr_F(dc);
1916
1917     gen(dst, cpu_env, src);
1918
1919     gen_store_fpr_F(dc, rd, dst);
1920 }
1921
1922 static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1923                               void (*gen)(TCGv_i32, TCGv_ptr))
1924 {
1925     TCGv_i32 dst;
1926
1927     gen_op_load_fpr_QT1(QFPREG(rs));
1928     dst = gen_dest_fpr_F(dc);
1929
1930     gen(dst, cpu_env);
1931
1932     gen_store_fpr_F(dc, rd, dst);
1933 }
1934
1935 static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1936                               void (*gen)(TCGv_i64, TCGv_ptr))
1937 {
1938     TCGv_i64 dst;
1939
1940     gen_op_load_fpr_QT1(QFPREG(rs));
1941     dst = gen_dest_fpr_D(dc, rd);
1942
1943     gen(dst, cpu_env);
1944
1945     gen_store_fpr_D(dc, rd, dst);
1946 }
1947
1948 static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1949                                  void (*gen)(TCGv_ptr, TCGv_i32))
1950 {
1951     TCGv_i32 src;
1952
1953     src = gen_load_fpr_F(dc, rs);
1954
1955     gen(cpu_env, src);
1956
1957     gen_op_store_QT0_fpr(QFPREG(rd));
1958     gen_update_fprs_dirty(QFPREG(rd));
1959 }
1960
1961 static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1962                                  void (*gen)(TCGv_ptr, TCGv_i64))
1963 {
1964     TCGv_i64 src;
1965
1966     src = gen_load_fpr_D(dc, rs);
1967
1968     gen(cpu_env, src);
1969
1970     gen_op_store_QT0_fpr(QFPREG(rd));
1971     gen_update_fprs_dirty(QFPREG(rd));
1972 }
1973
1974 /* asi moves */
1975 #ifdef TARGET_SPARC64
1976 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1977 {
1978     int asi;
1979     TCGv_i32 r_asi;
1980
1981     if (IS_IMM) {
1982         r_asi = tcg_temp_new_i32();
1983         tcg_gen_mov_i32(r_asi, cpu_asi);
1984     } else {
1985         asi = GET_FIELD(insn, 19, 26);
1986         r_asi = tcg_const_i32(asi);
1987     }
1988     return r_asi;
1989 }
1990
1991 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1992                               int sign)
1993 {
1994     TCGv_i32 r_asi, r_size, r_sign;
1995
1996     r_asi = gen_get_asi(insn, addr);
1997     r_size = tcg_const_i32(size);
1998     r_sign = tcg_const_i32(sign);
1999     gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_size, r_sign);
2000     tcg_temp_free_i32(r_sign);
2001     tcg_temp_free_i32(r_size);
2002     tcg_temp_free_i32(r_asi);
2003 }
2004
2005 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2006 {
2007     TCGv_i32 r_asi, r_size;
2008
2009     r_asi = gen_get_asi(insn, addr);
2010     r_size = tcg_const_i32(size);
2011     gen_helper_st_asi(cpu_env, addr, src, r_asi, r_size);
2012     tcg_temp_free_i32(r_size);
2013     tcg_temp_free_i32(r_asi);
2014 }
2015
2016 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
2017 {
2018     TCGv_i32 r_asi, r_size, r_rd;
2019
2020     r_asi = gen_get_asi(insn, addr);
2021     r_size = tcg_const_i32(size);
2022     r_rd = tcg_const_i32(rd);
2023     gen_helper_ldf_asi(cpu_env, addr, r_asi, r_size, r_rd);
2024     tcg_temp_free_i32(r_rd);
2025     tcg_temp_free_i32(r_size);
2026     tcg_temp_free_i32(r_asi);
2027 }
2028
2029 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
2030 {
2031     TCGv_i32 r_asi, r_size, r_rd;
2032
2033     r_asi = gen_get_asi(insn, addr);
2034     r_size = tcg_const_i32(size);
2035     r_rd = tcg_const_i32(rd);
2036     gen_helper_stf_asi(cpu_env, addr, r_asi, r_size, r_rd);
2037     tcg_temp_free_i32(r_rd);
2038     tcg_temp_free_i32(r_size);
2039     tcg_temp_free_i32(r_asi);
2040 }
2041
2042 static inline void gen_swap_asi(TCGv dst, TCGv src, TCGv addr, int insn)
2043 {
2044     TCGv_i32 r_asi, r_size, r_sign;
2045     TCGv_i64 t64 = tcg_temp_new_i64();
2046
2047     r_asi = gen_get_asi(insn, addr);
2048     r_size = tcg_const_i32(4);
2049     r_sign = tcg_const_i32(0);
2050     gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2051     tcg_temp_free_i32(r_sign);
2052     gen_helper_st_asi(cpu_env, addr, src, r_asi, r_size);
2053     tcg_temp_free_i32(r_size);
2054     tcg_temp_free_i32(r_asi);
2055     tcg_gen_trunc_i64_tl(dst, t64);
2056     tcg_temp_free_i64(t64);
2057 }
2058
2059 static inline void gen_ldda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2060                                 int insn, int rd)
2061 {
2062     TCGv_i32 r_asi, r_rd;
2063
2064     r_asi = gen_get_asi(insn, addr);
2065     r_rd = tcg_const_i32(rd);
2066     gen_helper_ldda_asi(cpu_env, addr, r_asi, r_rd);
2067     tcg_temp_free_i32(r_rd);
2068     tcg_temp_free_i32(r_asi);
2069 }
2070
2071 static inline void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2072                                 int insn, int rd)
2073 {
2074     TCGv_i32 r_asi, r_size;
2075     TCGv lo = gen_load_gpr(dc, rd + 1);
2076     TCGv_i64 t64 = tcg_temp_new_i64();
2077
2078     tcg_gen_concat_tl_i64(t64, lo, hi);
2079     r_asi = gen_get_asi(insn, addr);
2080     r_size = tcg_const_i32(8);
2081     gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_size);
2082     tcg_temp_free_i32(r_size);
2083     tcg_temp_free_i32(r_asi);
2084     tcg_temp_free_i64(t64);
2085 }
2086
2087 static inline void gen_casx_asi(DisasContext *dc, TCGv addr,
2088                                 TCGv val2, int insn, int rd)
2089 {
2090     TCGv val1 = gen_load_gpr(dc, rd);
2091     TCGv dst = gen_dest_gpr(dc, rd);
2092     TCGv_i32 r_asi = gen_get_asi(insn, addr);
2093
2094     gen_helper_casx_asi(dst, cpu_env, addr, val1, val2, r_asi);
2095     tcg_temp_free_i32(r_asi);
2096     gen_store_gpr(dc, rd, dst);
2097 }
2098
2099 #elif !defined(CONFIG_USER_ONLY)
2100
2101 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
2102                               int sign)
2103 {
2104     TCGv_i32 r_asi, r_size, r_sign;
2105     TCGv_i64 t64 = tcg_temp_new_i64();
2106
2107     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2108     r_size = tcg_const_i32(size);
2109     r_sign = tcg_const_i32(sign);
2110     gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2111     tcg_temp_free_i32(r_sign);
2112     tcg_temp_free_i32(r_size);
2113     tcg_temp_free_i32(r_asi);
2114     tcg_gen_trunc_i64_tl(dst, t64);
2115     tcg_temp_free_i64(t64);
2116 }
2117
2118 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2119 {
2120     TCGv_i32 r_asi, r_size;
2121     TCGv_i64 t64 = tcg_temp_new_i64();
2122
2123     tcg_gen_extu_tl_i64(t64, src);
2124     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2125     r_size = tcg_const_i32(size);
2126     gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_size);
2127     tcg_temp_free_i32(r_size);
2128     tcg_temp_free_i32(r_asi);
2129     tcg_temp_free_i64(t64);
2130 }
2131
2132 static inline void gen_swap_asi(TCGv dst, TCGv src, TCGv addr, int insn)
2133 {
2134     TCGv_i32 r_asi, r_size, r_sign;
2135     TCGv_i64 r_val, t64;
2136
2137     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2138     r_size = tcg_const_i32(4);
2139     r_sign = tcg_const_i32(0);
2140     t64 = tcg_temp_new_i64();
2141     gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2142     tcg_temp_free(r_sign);
2143     r_val = tcg_temp_new_i64();
2144     tcg_gen_extu_tl_i64(r_val, src);
2145     gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2146     tcg_temp_free_i64(r_val);
2147     tcg_temp_free_i32(r_size);
2148     tcg_temp_free_i32(r_asi);
2149     tcg_gen_trunc_i64_tl(dst, t64);
2150     tcg_temp_free_i64(t64);
2151 }
2152
2153 static inline void gen_ldda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2154                                 int insn, int rd)
2155 {
2156     TCGv_i32 r_asi, r_size, r_sign;
2157     TCGv t;
2158     TCGv_i64 t64;
2159
2160     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2161     r_size = tcg_const_i32(8);
2162     r_sign = tcg_const_i32(0);
2163     t64 = tcg_temp_new_i64();
2164     gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2165     tcg_temp_free_i32(r_sign);
2166     tcg_temp_free_i32(r_size);
2167     tcg_temp_free_i32(r_asi);
2168
2169     /* ??? Work around an apparent bug in Ubuntu gcc 4.8.2-10ubuntu2+12,
2170        whereby "rd + 1" elicits "error: array subscript is above array".
2171        Since we have already asserted that rd is even, the semantics
2172        are unchanged.  */
2173     t = gen_dest_gpr(dc, rd | 1);
2174     tcg_gen_trunc_i64_tl(t, t64);
2175     gen_store_gpr(dc, rd | 1, t);
2176
2177     tcg_gen_shri_i64(t64, t64, 32);
2178     tcg_gen_trunc_i64_tl(hi, t64);
2179     tcg_temp_free_i64(t64);
2180     gen_store_gpr(dc, rd, hi);
2181 }
2182
2183 static inline void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2184                                 int insn, int rd)
2185 {
2186     TCGv_i32 r_asi, r_size;
2187     TCGv lo = gen_load_gpr(dc, rd + 1);
2188     TCGv_i64 t64 = tcg_temp_new_i64();
2189
2190     tcg_gen_concat_tl_i64(t64, lo, hi);
2191     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2192     r_size = tcg_const_i32(8);
2193     gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_size);
2194     tcg_temp_free_i32(r_size);
2195     tcg_temp_free_i32(r_asi);
2196     tcg_temp_free_i64(t64);
2197 }
2198 #endif
2199
2200 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2201 static inline void gen_cas_asi(DisasContext *dc, TCGv addr,
2202                                TCGv val2, int insn, int rd)
2203 {
2204     TCGv val1 = gen_load_gpr(dc, rd);
2205     TCGv dst = gen_dest_gpr(dc, rd);
2206 #ifdef TARGET_SPARC64
2207     TCGv_i32 r_asi = gen_get_asi(insn, addr);
2208 #else
2209     TCGv_i32 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2210 #endif
2211
2212     gen_helper_cas_asi(dst, cpu_env, addr, val1, val2, r_asi);
2213     tcg_temp_free_i32(r_asi);
2214     gen_store_gpr(dc, rd, dst);
2215 }
2216
2217 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
2218 {
2219     TCGv_i64 r_val;
2220     TCGv_i32 r_asi, r_size;
2221
2222     gen_ld_asi(dst, addr, insn, 1, 0);
2223
2224     r_val = tcg_const_i64(0xffULL);
2225     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2226     r_size = tcg_const_i32(1);
2227     gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2228     tcg_temp_free_i32(r_size);
2229     tcg_temp_free_i32(r_asi);
2230     tcg_temp_free_i64(r_val);
2231 }
2232 #endif
2233
2234 static TCGv get_src1(DisasContext *dc, unsigned int insn)
2235 {
2236     unsigned int rs1 = GET_FIELD(insn, 13, 17);
2237     return gen_load_gpr(dc, rs1);
2238 }
2239
2240 static TCGv get_src2(DisasContext *dc, unsigned int insn)
2241 {
2242     if (IS_IMM) { /* immediate */
2243         target_long simm = GET_FIELDs(insn, 19, 31);
2244         TCGv t = get_temp_tl(dc);
2245         tcg_gen_movi_tl(t, simm);
2246         return t;
2247     } else {      /* register */
2248         unsigned int rs2 = GET_FIELD(insn, 27, 31);
2249         return gen_load_gpr(dc, rs2);
2250     }
2251 }
2252
2253 #ifdef TARGET_SPARC64
2254 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2255 {
2256     TCGv_i32 c32, zero, dst, s1, s2;
2257
2258     /* We have two choices here: extend the 32 bit data and use movcond_i64,
2259        or fold the comparison down to 32 bits and use movcond_i32.  Choose
2260        the later.  */
2261     c32 = tcg_temp_new_i32();
2262     if (cmp->is_bool) {
2263         tcg_gen_extrl_i64_i32(c32, cmp->c1);
2264     } else {
2265         TCGv_i64 c64 = tcg_temp_new_i64();
2266         tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2267         tcg_gen_extrl_i64_i32(c32, c64);
2268         tcg_temp_free_i64(c64);
2269     }
2270
2271     s1 = gen_load_fpr_F(dc, rs);
2272     s2 = gen_load_fpr_F(dc, rd);
2273     dst = gen_dest_fpr_F(dc);
2274     zero = tcg_const_i32(0);
2275
2276     tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2277
2278     tcg_temp_free_i32(c32);
2279     tcg_temp_free_i32(zero);
2280     gen_store_fpr_F(dc, rd, dst);
2281 }
2282
2283 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2284 {
2285     TCGv_i64 dst = gen_dest_fpr_D(dc, rd);
2286     tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2287                         gen_load_fpr_D(dc, rs),
2288                         gen_load_fpr_D(dc, rd));
2289     gen_store_fpr_D(dc, rd, dst);
2290 }
2291
2292 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2293 {
2294     int qd = QFPREG(rd);
2295     int qs = QFPREG(rs);
2296
2297     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2298                         cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2299     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2300                         cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2301
2302     gen_update_fprs_dirty(qd);
2303 }
2304
2305 #ifndef CONFIG_USER_ONLY
2306 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_env cpu_env)
2307 {
2308     TCGv_i32 r_tl = tcg_temp_new_i32();
2309
2310     /* load env->tl into r_tl */
2311     tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2312
2313     /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2314     tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2315
2316     /* calculate offset to current trap state from env->ts, reuse r_tl */
2317     tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2318     tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2319
2320     /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2321     {
2322         TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2323         tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2324         tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2325         tcg_temp_free_ptr(r_tl_tmp);
2326     }
2327
2328     tcg_temp_free_i32(r_tl);
2329 }
2330 #endif
2331
2332 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2333                      int width, bool cc, bool left)
2334 {
2335     TCGv lo1, lo2, t1, t2;
2336     uint64_t amask, tabl, tabr;
2337     int shift, imask, omask;
2338
2339     if (cc) {
2340         tcg_gen_mov_tl(cpu_cc_src, s1);
2341         tcg_gen_mov_tl(cpu_cc_src2, s2);
2342         tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2343         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2344         dc->cc_op = CC_OP_SUB;
2345     }
2346
2347     /* Theory of operation: there are two tables, left and right (not to
2348        be confused with the left and right versions of the opcode).  These
2349        are indexed by the low 3 bits of the inputs.  To make things "easy",
2350        these tables are loaded into two constants, TABL and TABR below.
2351        The operation index = (input & imask) << shift calculates the index
2352        into the constant, while val = (table >> index) & omask calculates
2353        the value we're looking for.  */
2354     switch (width) {
2355     case 8:
2356         imask = 0x7;
2357         shift = 3;
2358         omask = 0xff;
2359         if (left) {
2360             tabl = 0x80c0e0f0f8fcfeffULL;
2361             tabr = 0xff7f3f1f0f070301ULL;
2362         } else {
2363             tabl = 0x0103070f1f3f7fffULL;
2364             tabr = 0xfffefcf8f0e0c080ULL;
2365         }
2366         break;
2367     case 16:
2368         imask = 0x6;
2369         shift = 1;
2370         omask = 0xf;
2371         if (left) {
2372             tabl = 0x8cef;
2373             tabr = 0xf731;
2374         } else {
2375             tabl = 0x137f;
2376             tabr = 0xfec8;
2377         }
2378         break;
2379     case 32:
2380         imask = 0x4;
2381         shift = 0;
2382         omask = 0x3;
2383         if (left) {
2384             tabl = (2 << 2) | 3;
2385             tabr = (3 << 2) | 1;
2386         } else {
2387             tabl = (1 << 2) | 3;
2388             tabr = (3 << 2) | 2;
2389         }
2390         break;
2391     default:
2392         abort();
2393     }
2394
2395     lo1 = tcg_temp_new();
2396     lo2 = tcg_temp_new();
2397     tcg_gen_andi_tl(lo1, s1, imask);
2398     tcg_gen_andi_tl(lo2, s2, imask);
2399     tcg_gen_shli_tl(lo1, lo1, shift);
2400     tcg_gen_shli_tl(lo2, lo2, shift);
2401
2402     t1 = tcg_const_tl(tabl);
2403     t2 = tcg_const_tl(tabr);
2404     tcg_gen_shr_tl(lo1, t1, lo1);
2405     tcg_gen_shr_tl(lo2, t2, lo2);
2406     tcg_gen_andi_tl(dst, lo1, omask);
2407     tcg_gen_andi_tl(lo2, lo2, omask);
2408
2409     amask = -8;
2410     if (AM_CHECK(dc)) {
2411         amask &= 0xffffffffULL;
2412     }
2413     tcg_gen_andi_tl(s1, s1, amask);
2414     tcg_gen_andi_tl(s2, s2, amask);
2415
2416     /* We want to compute
2417         dst = (s1 == s2 ? lo1 : lo1 & lo2).
2418        We've already done dst = lo1, so this reduces to
2419         dst &= (s1 == s2 ? -1 : lo2)
2420        Which we perform by
2421         lo2 |= -(s1 == s2)
2422         dst &= lo2
2423     */
2424     tcg_gen_setcond_tl(TCG_COND_EQ, t1, s1, s2);
2425     tcg_gen_neg_tl(t1, t1);
2426     tcg_gen_or_tl(lo2, lo2, t1);
2427     tcg_gen_and_tl(dst, dst, lo2);
2428
2429     tcg_temp_free(lo1);
2430     tcg_temp_free(lo2);
2431     tcg_temp_free(t1);
2432     tcg_temp_free(t2);
2433 }
2434
2435 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2436 {
2437     TCGv tmp = tcg_temp_new();
2438
2439     tcg_gen_add_tl(tmp, s1, s2);
2440     tcg_gen_andi_tl(dst, tmp, -8);
2441     if (left) {
2442         tcg_gen_neg_tl(tmp, tmp);
2443     }
2444     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2445
2446     tcg_temp_free(tmp);
2447 }
2448
2449 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2450 {
2451     TCGv t1, t2, shift;
2452
2453     t1 = tcg_temp_new();
2454     t2 = tcg_temp_new();
2455     shift = tcg_temp_new();
2456
2457     tcg_gen_andi_tl(shift, gsr, 7);
2458     tcg_gen_shli_tl(shift, shift, 3);
2459     tcg_gen_shl_tl(t1, s1, shift);
2460
2461     /* A shift of 64 does not produce 0 in TCG.  Divide this into a
2462        shift of (up to 63) followed by a constant shift of 1.  */
2463     tcg_gen_xori_tl(shift, shift, 63);
2464     tcg_gen_shr_tl(t2, s2, shift);
2465     tcg_gen_shri_tl(t2, t2, 1);
2466
2467     tcg_gen_or_tl(dst, t1, t2);
2468
2469     tcg_temp_free(t1);
2470     tcg_temp_free(t2);
2471     tcg_temp_free(shift);
2472 }
2473 #endif
2474
2475 #define CHECK_IU_FEATURE(dc, FEATURE)                      \
2476     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2477         goto illegal_insn;
2478 #define CHECK_FPU_FEATURE(dc, FEATURE)                     \
2479     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2480         goto nfpu_insn;
2481
2482 /* before an instruction, dc->pc must be static */
2483 static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
2484 {
2485     unsigned int opc, rs1, rs2, rd;
2486     TCGv cpu_src1, cpu_src2;
2487     TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
2488     TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
2489     target_long simm;
2490
2491     opc = GET_FIELD(insn, 0, 1);
2492     rd = GET_FIELD(insn, 2, 6);
2493
2494     switch (opc) {
2495     case 0:                     /* branches/sethi */
2496         {
2497             unsigned int xop = GET_FIELD(insn, 7, 9);
2498             int32_t target;
2499             switch (xop) {
2500 #ifdef TARGET_SPARC64
2501             case 0x1:           /* V9 BPcc */
2502                 {
2503                     int cc;
2504
2505                     target = GET_FIELD_SP(insn, 0, 18);
2506                     target = sign_extend(target, 19);
2507                     target <<= 2;
2508                     cc = GET_FIELD_SP(insn, 20, 21);
2509                     if (cc == 0)
2510                         do_branch(dc, target, insn, 0);
2511                     else if (cc == 2)
2512                         do_branch(dc, target, insn, 1);
2513                     else
2514                         goto illegal_insn;
2515                     goto jmp_insn;
2516                 }
2517             case 0x3:           /* V9 BPr */
2518                 {
2519                     target = GET_FIELD_SP(insn, 0, 13) |
2520                         (GET_FIELD_SP(insn, 20, 21) << 14);
2521                     target = sign_extend(target, 16);
2522                     target <<= 2;
2523                     cpu_src1 = get_src1(dc, insn);
2524                     do_branch_reg(dc, target, insn, cpu_src1);
2525                     goto jmp_insn;
2526                 }
2527             case 0x5:           /* V9 FBPcc */
2528                 {
2529                     int cc = GET_FIELD_SP(insn, 20, 21);
2530                     if (gen_trap_ifnofpu(dc)) {
2531                         goto jmp_insn;
2532                     }
2533                     target = GET_FIELD_SP(insn, 0, 18);
2534                     target = sign_extend(target, 19);
2535                     target <<= 2;
2536                     do_fbranch(dc, target, insn, cc);
2537                     goto jmp_insn;
2538                 }
2539 #else
2540             case 0x7:           /* CBN+x */
2541                 {
2542                     goto ncp_insn;
2543                 }
2544 #endif
2545             case 0x2:           /* BN+x */
2546                 {
2547                     target = GET_FIELD(insn, 10, 31);
2548                     target = sign_extend(target, 22);
2549                     target <<= 2;
2550                     do_branch(dc, target, insn, 0);
2551                     goto jmp_insn;
2552                 }
2553             case 0x6:           /* FBN+x */
2554                 {
2555                     if (gen_trap_ifnofpu(dc)) {
2556                         goto jmp_insn;
2557                     }
2558                     target = GET_FIELD(insn, 10, 31);
2559                     target = sign_extend(target, 22);
2560                     target <<= 2;
2561                     do_fbranch(dc, target, insn, 0);
2562                     goto jmp_insn;
2563                 }
2564             case 0x4:           /* SETHI */
2565                 /* Special-case %g0 because that's the canonical nop.  */
2566                 if (rd) {
2567                     uint32_t value = GET_FIELD(insn, 10, 31);
2568                     TCGv t = gen_dest_gpr(dc, rd);
2569                     tcg_gen_movi_tl(t, value << 10);
2570                     gen_store_gpr(dc, rd, t);
2571                 }
2572                 break;
2573             case 0x0:           /* UNIMPL */
2574             default:
2575                 goto illegal_insn;
2576             }
2577             break;
2578         }
2579         break;
2580     case 1:                     /*CALL*/
2581         {
2582             target_long target = GET_FIELDs(insn, 2, 31) << 2;
2583             TCGv o7 = gen_dest_gpr(dc, 15);
2584
2585             tcg_gen_movi_tl(o7, dc->pc);
2586             gen_store_gpr(dc, 15, o7);
2587             target += dc->pc;
2588             gen_mov_pc_npc(dc);
2589 #ifdef TARGET_SPARC64
2590             if (unlikely(AM_CHECK(dc))) {
2591                 target &= 0xffffffffULL;
2592             }
2593 #endif
2594             dc->npc = target;
2595         }
2596         goto jmp_insn;
2597     case 2:                     /* FPU & Logical Operations */
2598         {
2599             unsigned int xop = GET_FIELD(insn, 7, 12);
2600             TCGv cpu_dst = get_temp_tl(dc);
2601             TCGv cpu_tmp0;
2602
2603             if (xop == 0x3a) {  /* generate trap */
2604                 int cond = GET_FIELD(insn, 3, 6);
2605                 TCGv_i32 trap;
2606                 TCGLabel *l1 = NULL;
2607                 int mask;
2608
2609                 if (cond == 0) {
2610                     /* Trap never.  */
2611                     break;
2612                 }
2613
2614                 save_state(dc);
2615
2616                 if (cond != 8) {
2617                     /* Conditional trap.  */
2618                     DisasCompare cmp;
2619 #ifdef TARGET_SPARC64
2620                     /* V9 icc/xcc */
2621                     int cc = GET_FIELD_SP(insn, 11, 12);
2622                     if (cc == 0) {
2623                         gen_compare(&cmp, 0, cond, dc);
2624                     } else if (cc == 2) {
2625                         gen_compare(&cmp, 1, cond, dc);
2626                     } else {
2627                         goto illegal_insn;
2628                     }
2629 #else
2630                     gen_compare(&cmp, 0, cond, dc);
2631 #endif
2632                     l1 = gen_new_label();
2633                     tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
2634                                       cmp.c1, cmp.c2, l1);
2635                     free_compare(&cmp);
2636                 }
2637
2638                 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
2639                         ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
2640
2641                 /* Don't use the normal temporaries, as they may well have
2642                    gone out of scope with the branch above.  While we're
2643                    doing that we might as well pre-truncate to 32-bit.  */
2644                 trap = tcg_temp_new_i32();
2645
2646                 rs1 = GET_FIELD_SP(insn, 14, 18);
2647                 if (IS_IMM) {
2648                     rs2 = GET_FIELD_SP(insn, 0, 6);
2649                     if (rs1 == 0) {
2650                         tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
2651                         /* Signal that the trap value is fully constant.  */
2652                         mask = 0;
2653                     } else {
2654                         TCGv t1 = gen_load_gpr(dc, rs1);
2655                         tcg_gen_trunc_tl_i32(trap, t1);
2656                         tcg_gen_addi_i32(trap, trap, rs2);
2657                     }
2658                 } else {
2659                     TCGv t1, t2;
2660                     rs2 = GET_FIELD_SP(insn, 0, 4);
2661                     t1 = gen_load_gpr(dc, rs1);
2662                     t2 = gen_load_gpr(dc, rs2);
2663                     tcg_gen_add_tl(t1, t1, t2);
2664                     tcg_gen_trunc_tl_i32(trap, t1);
2665                 }
2666                 if (mask != 0) {
2667                     tcg_gen_andi_i32(trap, trap, mask);
2668                     tcg_gen_addi_i32(trap, trap, TT_TRAP);
2669                 }
2670
2671                 gen_helper_raise_exception(cpu_env, trap);
2672                 tcg_temp_free_i32(trap);
2673
2674                 if (cond == 8) {
2675                     /* An unconditional trap ends the TB.  */
2676                     dc->is_br = 1;
2677                     goto jmp_insn;
2678                 } else {
2679                     /* A conditional trap falls through to the next insn.  */
2680                     gen_set_label(l1);
2681                     break;
2682                 }
2683             } else if (xop == 0x28) {
2684                 rs1 = GET_FIELD(insn, 13, 17);
2685                 switch(rs1) {
2686                 case 0: /* rdy */
2687 #ifndef TARGET_SPARC64
2688                 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2689                                        manual, rdy on the microSPARC
2690                                        II */
2691                 case 0x0f:          /* stbar in the SPARCv8 manual,
2692                                        rdy on the microSPARC II */
2693                 case 0x10 ... 0x1f: /* implementation-dependent in the
2694                                        SPARCv8 manual, rdy on the
2695                                        microSPARC II */
2696                     /* Read Asr17 */
2697                     if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2698                         TCGv t = gen_dest_gpr(dc, rd);
2699                         /* Read Asr17 for a Leon3 monoprocessor */
2700                         tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
2701                         gen_store_gpr(dc, rd, t);
2702                         break;
2703                     }
2704 #endif
2705                     gen_store_gpr(dc, rd, cpu_y);
2706                     break;
2707 #ifdef TARGET_SPARC64
2708                 case 0x2: /* V9 rdccr */
2709                     update_psr(dc);
2710                     gen_helper_rdccr(cpu_dst, cpu_env);
2711                     gen_store_gpr(dc, rd, cpu_dst);
2712                     break;
2713                 case 0x3: /* V9 rdasi */
2714                     tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2715                     gen_store_gpr(dc, rd, cpu_dst);
2716                     break;
2717                 case 0x4: /* V9 rdtick */
2718                     {
2719                         TCGv_ptr r_tickptr;
2720                         TCGv_i32 r_const;
2721
2722                         r_tickptr = tcg_temp_new_ptr();
2723                         r_const = tcg_const_i32(dc->mem_idx);
2724                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2725                                        offsetof(CPUSPARCState, tick));
2726                         gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
2727                                                   r_const);
2728                         tcg_temp_free_ptr(r_tickptr);
2729                         tcg_temp_free_i32(r_const);
2730                         gen_store_gpr(dc, rd, cpu_dst);
2731                     }
2732                     break;
2733                 case 0x5: /* V9 rdpc */
2734                     {
2735                         TCGv t = gen_dest_gpr(dc, rd);
2736                         if (unlikely(AM_CHECK(dc))) {
2737                             tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
2738                         } else {
2739                             tcg_gen_movi_tl(t, dc->pc);
2740                         }
2741                         gen_store_gpr(dc, rd, t);
2742                     }
2743                     break;
2744                 case 0x6: /* V9 rdfprs */
2745                     tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2746                     gen_store_gpr(dc, rd, cpu_dst);
2747                     break;
2748                 case 0xf: /* V9 membar */
2749                     break; /* no effect */
2750                 case 0x13: /* Graphics Status */
2751                     if (gen_trap_ifnofpu(dc)) {
2752                         goto jmp_insn;
2753                     }
2754                     gen_store_gpr(dc, rd, cpu_gsr);
2755                     break;
2756                 case 0x16: /* Softint */
2757                     tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2758                     gen_store_gpr(dc, rd, cpu_dst);
2759                     break;
2760                 case 0x17: /* Tick compare */
2761                     gen_store_gpr(dc, rd, cpu_tick_cmpr);
2762                     break;
2763                 case 0x18: /* System tick */
2764                     {
2765                         TCGv_ptr r_tickptr;
2766                         TCGv_i32 r_const;
2767
2768                         r_tickptr = tcg_temp_new_ptr();
2769                         r_const = tcg_const_i32(dc->mem_idx);
2770                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2771                                        offsetof(CPUSPARCState, stick));
2772                         gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
2773                                                   r_const);
2774                         tcg_temp_free_ptr(r_tickptr);
2775                         tcg_temp_free_i32(r_const);
2776                         gen_store_gpr(dc, rd, cpu_dst);
2777                     }
2778                     break;
2779                 case 0x19: /* System tick compare */
2780                     gen_store_gpr(dc, rd, cpu_stick_cmpr);
2781                     break;
2782                 case 0x10: /* Performance Control */
2783                 case 0x11: /* Performance Instrumentation Counter */
2784                 case 0x12: /* Dispatch Control */
2785                 case 0x14: /* Softint set, WO */
2786                 case 0x15: /* Softint clear, WO */
2787 #endif
2788                 default:
2789                     goto illegal_insn;
2790                 }
2791 #if !defined(CONFIG_USER_ONLY)
2792             } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2793 #ifndef TARGET_SPARC64
2794                 if (!supervisor(dc)) {
2795                     goto priv_insn;
2796                 }
2797                 update_psr(dc);
2798                 gen_helper_rdpsr(cpu_dst, cpu_env);
2799 #else
2800                 CHECK_IU_FEATURE(dc, HYPV);
2801                 if (!hypervisor(dc))
2802                     goto priv_insn;
2803                 rs1 = GET_FIELD(insn, 13, 17);
2804                 switch (rs1) {
2805                 case 0: // hpstate
2806                     // gen_op_rdhpstate();
2807                     break;
2808                 case 1: // htstate
2809                     // gen_op_rdhtstate();
2810                     break;
2811                 case 3: // hintp
2812                     tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2813                     break;
2814                 case 5: // htba
2815                     tcg_gen_mov_tl(cpu_dst, cpu_htba);
2816                     break;
2817                 case 6: // hver
2818                     tcg_gen_mov_tl(cpu_dst, cpu_hver);
2819                     break;
2820                 case 31: // hstick_cmpr
2821                     tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2822                     break;
2823                 default:
2824                     goto illegal_insn;
2825                 }
2826 #endif
2827                 gen_store_gpr(dc, rd, cpu_dst);
2828                 break;
2829             } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2830                 if (!supervisor(dc)) {
2831                     goto priv_insn;
2832                 }
2833                 cpu_tmp0 = get_temp_tl(dc);
2834 #ifdef TARGET_SPARC64
2835                 rs1 = GET_FIELD(insn, 13, 17);
2836                 switch (rs1) {
2837                 case 0: // tpc
2838                     {
2839                         TCGv_ptr r_tsptr;
2840
2841                         r_tsptr = tcg_temp_new_ptr();
2842                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2843                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2844                                       offsetof(trap_state, tpc));
2845                         tcg_temp_free_ptr(r_tsptr);
2846                     }
2847                     break;
2848                 case 1: // tnpc
2849                     {
2850                         TCGv_ptr r_tsptr;
2851
2852                         r_tsptr = tcg_temp_new_ptr();
2853                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2854                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2855                                       offsetof(trap_state, tnpc));
2856                         tcg_temp_free_ptr(r_tsptr);
2857                     }
2858                     break;
2859                 case 2: // tstate
2860                     {
2861                         TCGv_ptr r_tsptr;
2862
2863                         r_tsptr = tcg_temp_new_ptr();
2864                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2865                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2866                                       offsetof(trap_state, tstate));
2867                         tcg_temp_free_ptr(r_tsptr);
2868                     }
2869                     break;
2870                 case 3: // tt
2871                     {
2872                         TCGv_ptr r_tsptr = tcg_temp_new_ptr();
2873
2874                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2875                         tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
2876                                          offsetof(trap_state, tt));
2877                         tcg_temp_free_ptr(r_tsptr);
2878                     }
2879                     break;
2880                 case 4: // tick
2881                     {
2882                         TCGv_ptr r_tickptr;
2883                         TCGv_i32 r_const;
2884
2885                         r_tickptr = tcg_temp_new_ptr();
2886                         r_const = tcg_const_i32(dc->mem_idx);
2887                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2888                                        offsetof(CPUSPARCState, tick));
2889                         gen_helper_tick_get_count(cpu_tmp0, cpu_env,
2890                                                   r_tickptr, r_const);
2891                         tcg_temp_free_ptr(r_tickptr);
2892                         tcg_temp_free_i32(r_const);
2893                     }
2894                     break;
2895                 case 5: // tba
2896                     tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2897                     break;
2898                 case 6: // pstate
2899                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2900                                      offsetof(CPUSPARCState, pstate));
2901                     break;
2902                 case 7: // tl
2903                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2904                                      offsetof(CPUSPARCState, tl));
2905                     break;
2906                 case 8: // pil
2907                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2908                                      offsetof(CPUSPARCState, psrpil));
2909                     break;
2910                 case 9: // cwp
2911                     gen_helper_rdcwp(cpu_tmp0, cpu_env);
2912                     break;
2913                 case 10: // cansave
2914                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2915                                      offsetof(CPUSPARCState, cansave));
2916                     break;
2917                 case 11: // canrestore
2918                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2919                                      offsetof(CPUSPARCState, canrestore));
2920                     break;
2921                 case 12: // cleanwin
2922                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2923                                      offsetof(CPUSPARCState, cleanwin));
2924                     break;
2925                 case 13: // otherwin
2926                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2927                                      offsetof(CPUSPARCState, otherwin));
2928                     break;
2929                 case 14: // wstate
2930                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2931                                      offsetof(CPUSPARCState, wstate));
2932                     break;
2933                 case 16: // UA2005 gl
2934                     CHECK_IU_FEATURE(dc, GL);
2935                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2936                                      offsetof(CPUSPARCState, gl));
2937                     break;
2938                 case 26: // UA2005 strand status
2939                     CHECK_IU_FEATURE(dc, HYPV);
2940                     if (!hypervisor(dc))
2941                         goto priv_insn;
2942                     tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2943                     break;
2944                 case 31: // ver
2945                     tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2946                     break;
2947                 case 15: // fq
2948                 default:
2949                     goto illegal_insn;
2950                 }
2951 #else
2952                 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2953 #endif
2954                 gen_store_gpr(dc, rd, cpu_tmp0);
2955                 break;
2956             } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2957 #ifdef TARGET_SPARC64
2958                 save_state(dc);
2959                 gen_helper_flushw(cpu_env);
2960 #else
2961                 if (!supervisor(dc))
2962                     goto priv_insn;
2963                 gen_store_gpr(dc, rd, cpu_tbr);
2964 #endif
2965                 break;
2966 #endif
2967             } else if (xop == 0x34) {   /* FPU Operations */
2968                 if (gen_trap_ifnofpu(dc)) {
2969                     goto jmp_insn;
2970                 }
2971                 gen_op_clear_ieee_excp_and_FTT();
2972                 rs1 = GET_FIELD(insn, 13, 17);
2973                 rs2 = GET_FIELD(insn, 27, 31);
2974                 xop = GET_FIELD(insn, 18, 26);
2975                 save_state(dc);
2976                 switch (xop) {
2977                 case 0x1: /* fmovs */
2978                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2979                     gen_store_fpr_F(dc, rd, cpu_src1_32);
2980                     break;
2981                 case 0x5: /* fnegs */
2982                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
2983                     break;
2984                 case 0x9: /* fabss */
2985                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
2986                     break;
2987                 case 0x29: /* fsqrts */
2988                     CHECK_FPU_FEATURE(dc, FSQRT);
2989                     gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
2990                     break;
2991                 case 0x2a: /* fsqrtd */
2992                     CHECK_FPU_FEATURE(dc, FSQRT);
2993                     gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
2994                     break;
2995                 case 0x2b: /* fsqrtq */
2996                     CHECK_FPU_FEATURE(dc, FLOAT128);
2997                     gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
2998                     break;
2999                 case 0x41: /* fadds */
3000                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
3001                     break;
3002                 case 0x42: /* faddd */
3003                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
3004                     break;
3005                 case 0x43: /* faddq */
3006                     CHECK_FPU_FEATURE(dc, FLOAT128);
3007                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
3008                     break;
3009                 case 0x45: /* fsubs */
3010                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3011                     break;
3012                 case 0x46: /* fsubd */
3013                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3014                     break;
3015                 case 0x47: /* fsubq */
3016                     CHECK_FPU_FEATURE(dc, FLOAT128);
3017                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3018                     break;
3019                 case 0x49: /* fmuls */
3020                     CHECK_FPU_FEATURE(dc, FMUL);
3021                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3022                     break;
3023                 case 0x4a: /* fmuld */
3024                     CHECK_FPU_FEATURE(dc, FMUL);
3025                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3026                     break;
3027                 case 0x4b: /* fmulq */
3028                     CHECK_FPU_FEATURE(dc, FLOAT128);
3029                     CHECK_FPU_FEATURE(dc, FMUL);
3030                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3031                     break;
3032                 case 0x4d: /* fdivs */
3033                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3034                     break;
3035                 case 0x4e: /* fdivd */
3036                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3037                     break;
3038                 case 0x4f: /* fdivq */
3039                     CHECK_FPU_FEATURE(dc, FLOAT128);
3040                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3041                     break;
3042                 case 0x69: /* fsmuld */
3043                     CHECK_FPU_FEATURE(dc, FSMULD);
3044                     gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3045                     break;
3046                 case 0x6e: /* fdmulq */
3047                     CHECK_FPU_FEATURE(dc, FLOAT128);
3048                     gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3049                     break;
3050                 case 0xc4: /* fitos */
3051                     gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3052                     break;
3053                 case 0xc6: /* fdtos */
3054                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3055                     break;
3056                 case 0xc7: /* fqtos */
3057                     CHECK_FPU_FEATURE(dc, FLOAT128);
3058                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3059                     break;
3060                 case 0xc8: /* fitod */
3061                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3062                     break;
3063                 case 0xc9: /* fstod */
3064                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3065                     break;
3066                 case 0xcb: /* fqtod */
3067                     CHECK_FPU_FEATURE(dc, FLOAT128);
3068                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3069                     break;
3070                 case 0xcc: /* fitoq */
3071                     CHECK_FPU_FEATURE(dc, FLOAT128);
3072                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3073                     break;
3074                 case 0xcd: /* fstoq */
3075                     CHECK_FPU_FEATURE(dc, FLOAT128);
3076                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3077                     break;
3078                 case 0xce: /* fdtoq */
3079                     CHECK_FPU_FEATURE(dc, FLOAT128);
3080                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3081                     break;
3082                 case 0xd1: /* fstoi */
3083                     gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3084                     break;
3085                 case 0xd2: /* fdtoi */
3086                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3087                     break;
3088                 case 0xd3: /* fqtoi */
3089                     CHECK_FPU_FEATURE(dc, FLOAT128);
3090                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3091                     break;
3092 #ifdef TARGET_SPARC64
3093                 case 0x2: /* V9 fmovd */
3094                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3095                     gen_store_fpr_D(dc, rd, cpu_src1_64);
3096                     break;
3097                 case 0x3: /* V9 fmovq */
3098                     CHECK_FPU_FEATURE(dc, FLOAT128);
3099                     gen_move_Q(rd, rs2);
3100                     break;
3101                 case 0x6: /* V9 fnegd */
3102                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3103                     break;
3104                 case 0x7: /* V9 fnegq */
3105                     CHECK_FPU_FEATURE(dc, FLOAT128);
3106                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3107                     break;
3108                 case 0xa: /* V9 fabsd */
3109                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3110                     break;
3111                 case 0xb: /* V9 fabsq */
3112                     CHECK_FPU_FEATURE(dc, FLOAT128);
3113                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3114                     break;
3115                 case 0x81: /* V9 fstox */
3116                     gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3117                     break;
3118                 case 0x82: /* V9 fdtox */
3119                     gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3120                     break;
3121                 case 0x83: /* V9 fqtox */
3122                     CHECK_FPU_FEATURE(dc, FLOAT128);
3123                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3124                     break;
3125                 case 0x84: /* V9 fxtos */
3126                     gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3127                     break;
3128                 case 0x88: /* V9 fxtod */
3129                     gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3130                     break;
3131                 case 0x8c: /* V9 fxtoq */
3132                     CHECK_FPU_FEATURE(dc, FLOAT128);
3133                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3134                     break;
3135 #endif
3136                 default:
3137                     goto illegal_insn;
3138                 }
3139             } else if (xop == 0x35) {   /* FPU Operations */
3140 #ifdef TARGET_SPARC64
3141                 int cond;
3142 #endif
3143                 if (gen_trap_ifnofpu(dc)) {
3144                     goto jmp_insn;
3145                 }
3146                 gen_op_clear_ieee_excp_and_FTT();
3147                 rs1 = GET_FIELD(insn, 13, 17);
3148                 rs2 = GET_FIELD(insn, 27, 31);
3149                 xop = GET_FIELD(insn, 18, 26);
3150                 save_state(dc);
3151
3152 #ifdef TARGET_SPARC64
3153 #define FMOVR(sz)                                                  \
3154                 do {                                               \
3155                     DisasCompare cmp;                              \
3156                     cond = GET_FIELD_SP(insn, 10, 12);             \
3157                     cpu_src1 = get_src1(dc, insn);                 \
3158                     gen_compare_reg(&cmp, cond, cpu_src1);         \
3159                     gen_fmov##sz(dc, &cmp, rd, rs2);               \
3160                     free_compare(&cmp);                            \
3161                 } while (0)
3162
3163                 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3164                     FMOVR(s);
3165                     break;
3166                 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3167                     FMOVR(d);
3168                     break;
3169                 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3170                     CHECK_FPU_FEATURE(dc, FLOAT128);
3171                     FMOVR(q);
3172                     break;
3173                 }
3174 #undef FMOVR
3175 #endif
3176                 switch (xop) {
3177 #ifdef TARGET_SPARC64
3178 #define FMOVCC(fcc, sz)                                                 \
3179                     do {                                                \
3180                         DisasCompare cmp;                               \
3181                         cond = GET_FIELD_SP(insn, 14, 17);              \
3182                         gen_fcompare(&cmp, fcc, cond);                  \
3183                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3184                         free_compare(&cmp);                             \
3185                     } while (0)
3186
3187                     case 0x001: /* V9 fmovscc %fcc0 */
3188                         FMOVCC(0, s);
3189                         break;
3190                     case 0x002: /* V9 fmovdcc %fcc0 */
3191                         FMOVCC(0, d);
3192                         break;
3193                     case 0x003: /* V9 fmovqcc %fcc0 */
3194                         CHECK_FPU_FEATURE(dc, FLOAT128);
3195                         FMOVCC(0, q);
3196                         break;
3197                     case 0x041: /* V9 fmovscc %fcc1 */
3198                         FMOVCC(1, s);
3199                         break;
3200                     case 0x042: /* V9 fmovdcc %fcc1 */
3201                         FMOVCC(1, d);
3202                         break;
3203                     case 0x043: /* V9 fmovqcc %fcc1 */
3204                         CHECK_FPU_FEATURE(dc, FLOAT128);
3205                         FMOVCC(1, q);
3206                         break;
3207                     case 0x081: /* V9 fmovscc %fcc2 */
3208                         FMOVCC(2, s);
3209                         break;
3210                     case 0x082: /* V9 fmovdcc %fcc2 */
3211                         FMOVCC(2, d);
3212                         break;
3213                     case 0x083: /* V9 fmovqcc %fcc2 */
3214                         CHECK_FPU_FEATURE(dc, FLOAT128);
3215                         FMOVCC(2, q);
3216                         break;
3217                     case 0x0c1: /* V9 fmovscc %fcc3 */
3218                         FMOVCC(3, s);
3219                         break;
3220                     case 0x0c2: /* V9 fmovdcc %fcc3 */
3221                         FMOVCC(3, d);
3222                         break;
3223                     case 0x0c3: /* V9 fmovqcc %fcc3 */
3224                         CHECK_FPU_FEATURE(dc, FLOAT128);
3225                         FMOVCC(3, q);
3226                         break;
3227 #undef FMOVCC
3228 #define FMOVCC(xcc, sz)                                                 \
3229                     do {                                                \
3230                         DisasCompare cmp;                               \
3231                         cond = GET_FIELD_SP(insn, 14, 17);              \
3232                         gen_compare(&cmp, xcc, cond, dc);               \
3233                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3234                         free_compare(&cmp);                             \
3235                     } while (0)
3236
3237                     case 0x101: /* V9 fmovscc %icc */
3238                         FMOVCC(0, s);
3239                         break;
3240                     case 0x102: /* V9 fmovdcc %icc */
3241                         FMOVCC(0, d);
3242                         break;
3243                     case 0x103: /* V9 fmovqcc %icc */
3244                         CHECK_FPU_FEATURE(dc, FLOAT128);
3245                         FMOVCC(0, q);
3246                         break;
3247                     case 0x181: /* V9 fmovscc %xcc */
3248                         FMOVCC(1, s);
3249                         break;
3250                     case 0x182: /* V9 fmovdcc %xcc */
3251                         FMOVCC(1, d);
3252                         break;
3253                     case 0x183: /* V9 fmovqcc %xcc */
3254                         CHECK_FPU_FEATURE(dc, FLOAT128);
3255                         FMOVCC(1, q);
3256                         break;
3257 #undef FMOVCC
3258 #endif
3259                     case 0x51: /* fcmps, V9 %fcc */
3260                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3261                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3262                         gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3263                         break;
3264                     case 0x52: /* fcmpd, V9 %fcc */
3265                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3266                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3267                         gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3268                         break;
3269                     case 0x53: /* fcmpq, V9 %fcc */
3270                         CHECK_FPU_FEATURE(dc, FLOAT128);
3271                         gen_op_load_fpr_QT0(QFPREG(rs1));
3272                         gen_op_load_fpr_QT1(QFPREG(rs2));
3273                         gen_op_fcmpq(rd & 3);
3274                         break;
3275                     case 0x55: /* fcmpes, V9 %fcc */
3276                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3277                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3278                         gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3279                         break;
3280                     case 0x56: /* fcmped, V9 %fcc */
3281                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3282                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3283                         gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3284                         break;
3285                     case 0x57: /* fcmpeq, V9 %fcc */
3286                         CHECK_FPU_FEATURE(dc, FLOAT128);
3287                         gen_op_load_fpr_QT0(QFPREG(rs1));
3288                         gen_op_load_fpr_QT1(QFPREG(rs2));
3289                         gen_op_fcmpeq(rd & 3);
3290                         break;
3291                     default:
3292                         goto illegal_insn;
3293                 }
3294             } else if (xop == 0x2) {
3295                 TCGv dst = gen_dest_gpr(dc, rd);
3296                 rs1 = GET_FIELD(insn, 13, 17);
3297                 if (rs1 == 0) {
3298                     /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3299                     if (IS_IMM) {       /* immediate */
3300                         simm = GET_FIELDs(insn, 19, 31);
3301                         tcg_gen_movi_tl(dst, simm);
3302                         gen_store_gpr(dc, rd, dst);
3303                     } else {            /* register */
3304                         rs2 = GET_FIELD(insn, 27, 31);
3305                         if (rs2 == 0) {
3306                             tcg_gen_movi_tl(dst, 0);
3307                             gen_store_gpr(dc, rd, dst);
3308                         } else {
3309                             cpu_src2 = gen_load_gpr(dc, rs2);
3310                             gen_store_gpr(dc, rd, cpu_src2);
3311                         }
3312                     }
3313                 } else {
3314                     cpu_src1 = get_src1(dc, insn);
3315                     if (IS_IMM) {       /* immediate */
3316                         simm = GET_FIELDs(insn, 19, 31);
3317                         tcg_gen_ori_tl(dst, cpu_src1, simm);
3318                         gen_store_gpr(dc, rd, dst);
3319                     } else {            /* register */
3320                         rs2 = GET_FIELD(insn, 27, 31);
3321                         if (rs2 == 0) {
3322                             /* mov shortcut:  or x, %g0, y -> mov x, y */
3323                             gen_store_gpr(dc, rd, cpu_src1);
3324                         } else {
3325                             cpu_src2 = gen_load_gpr(dc, rs2);
3326                             tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
3327                             gen_store_gpr(dc, rd, dst);
3328                         }
3329                     }
3330                 }
3331 #ifdef TARGET_SPARC64
3332             } else if (xop == 0x25) { /* sll, V9 sllx */
3333                 cpu_src1 = get_src1(dc, insn);
3334                 if (IS_IMM) {   /* immediate */
3335                     simm = GET_FIELDs(insn, 20, 31);
3336                     if (insn & (1 << 12)) {
3337                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3338                     } else {
3339                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3340                     }
3341                 } else {                /* register */
3342                     rs2 = GET_FIELD(insn, 27, 31);
3343                     cpu_src2 = gen_load_gpr(dc, rs2);
3344                     cpu_tmp0 = get_temp_tl(dc);
3345                     if (insn & (1 << 12)) {
3346                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3347                     } else {
3348                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3349                     }
3350                     tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3351                 }
3352                 gen_store_gpr(dc, rd, cpu_dst);
3353             } else if (xop == 0x26) { /* srl, V9 srlx */
3354                 cpu_src1 = get_src1(dc, insn);
3355                 if (IS_IMM) {   /* immediate */
3356                     simm = GET_FIELDs(insn, 20, 31);
3357                     if (insn & (1 << 12)) {
3358                         tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3359                     } else {
3360                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3361                         tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3362                     }
3363                 } else {                /* register */
3364                     rs2 = GET_FIELD(insn, 27, 31);
3365                     cpu_src2 = gen_load_gpr(dc, rs2);
3366                     cpu_tmp0 = get_temp_tl(dc);
3367                     if (insn & (1 << 12)) {
3368                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3369                         tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3370                     } else {
3371                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3372                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3373                         tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3374                     }
3375                 }
3376                 gen_store_gpr(dc, rd, cpu_dst);
3377             } else if (xop == 0x27) { /* sra, V9 srax */
3378                 cpu_src1 = get_src1(dc, insn);
3379                 if (IS_IMM) {   /* immediate */
3380                     simm = GET_FIELDs(insn, 20, 31);
3381                     if (insn & (1 << 12)) {
3382                         tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3383                     } else {
3384                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3385                         tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3386                     }
3387                 } else {                /* register */
3388                     rs2 = GET_FIELD(insn, 27, 31);
3389                     cpu_src2 = gen_load_gpr(dc, rs2);
3390                     cpu_tmp0 = get_temp_tl(dc);
3391                     if (insn & (1 << 12)) {
3392                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3393                         tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3394                     } else {
3395                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3396                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3397                         tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3398                     }
3399                 }
3400                 gen_store_gpr(dc, rd, cpu_dst);
3401 #endif
3402             } else if (xop < 0x36) {
3403                 if (xop < 0x20) {
3404                     cpu_src1 = get_src1(dc, insn);
3405                     cpu_src2 = get_src2(dc, insn);
3406                     switch (xop & ~0x10) {
3407                     case 0x0: /* add */
3408                         if (xop & 0x10) {
3409                             gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3410                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3411                             dc->cc_op = CC_OP_ADD;
3412                         } else {
3413                             tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3414                         }
3415                         break;
3416                     case 0x1: /* and */
3417                         tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3418                         if (xop & 0x10) {
3419                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3420                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3421                             dc->cc_op = CC_OP_LOGIC;
3422                         }
3423                         break;
3424                     case 0x2: /* or */
3425                         tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3426                         if (xop & 0x10) {
3427                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3428                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3429                             dc->cc_op = CC_OP_LOGIC;
3430                         }
3431                         break;
3432                     case 0x3: /* xor */
3433                         tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3434                         if (xop & 0x10) {
3435                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3436                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3437                             dc->cc_op = CC_OP_LOGIC;
3438                         }
3439                         break;
3440                     case 0x4: /* sub */
3441                         if (xop & 0x10) {
3442                             gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3443                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3444                             dc->cc_op = CC_OP_SUB;
3445                         } else {
3446                             tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3447                         }
3448                         break;
3449                     case 0x5: /* andn */
3450                         tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3451                         if (xop & 0x10) {
3452                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3453                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3454                             dc->cc_op = CC_OP_LOGIC;
3455                         }
3456                         break;
3457                     case 0x6: /* orn */
3458                         tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3459                         if (xop & 0x10) {
3460                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3461                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3462                             dc->cc_op = CC_OP_LOGIC;
3463                         }
3464                         break;
3465                     case 0x7: /* xorn */
3466                         tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
3467                         if (xop & 0x10) {
3468                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3469                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3470                             dc->cc_op = CC_OP_LOGIC;
3471                         }
3472                         break;
3473                     case 0x8: /* addx, V9 addc */
3474                         gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3475                                         (xop & 0x10));
3476                         break;
3477 #ifdef TARGET_SPARC64
3478                     case 0x9: /* V9 mulx */
3479                         tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3480                         break;
3481 #endif
3482                     case 0xa: /* umul */
3483                         CHECK_IU_FEATURE(dc, MUL);
3484                         gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3485                         if (xop & 0x10) {
3486                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3487                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3488                             dc->cc_op = CC_OP_LOGIC;
3489                         }
3490                         break;
3491                     case 0xb: /* smul */
3492                         CHECK_IU_FEATURE(dc, MUL);
3493                         gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3494                         if (xop & 0x10) {
3495                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3496                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3497                             dc->cc_op = CC_OP_LOGIC;
3498                         }
3499                         break;
3500                     case 0xc: /* subx, V9 subc */
3501                         gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3502                                         (xop & 0x10));
3503                         break;
3504 #ifdef TARGET_SPARC64
3505                     case 0xd: /* V9 udivx */
3506                         gen_helper_udivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3507                         break;
3508 #endif
3509                     case 0xe: /* udiv */
3510                         CHECK_IU_FEATURE(dc, DIV);
3511                         if (xop & 0x10) {
3512                             gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
3513                                                cpu_src2);
3514                             dc->cc_op = CC_OP_DIV;
3515                         } else {
3516                             gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
3517                                             cpu_src2);
3518                         }
3519                         break;
3520                     case 0xf: /* sdiv */
3521                         CHECK_IU_FEATURE(dc, DIV);
3522                         if (xop & 0x10) {
3523                             gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
3524                                                cpu_src2);
3525                             dc->cc_op = CC_OP_DIV;
3526                         } else {
3527                             gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
3528                                             cpu_src2);
3529                         }
3530                         break;
3531                     default:
3532                         goto illegal_insn;
3533                     }
3534                     gen_store_gpr(dc, rd, cpu_dst);
3535                 } else {
3536                     cpu_src1 = get_src1(dc, insn);
3537                     cpu_src2 = get_src2(dc, insn);
3538                     switch (xop) {
3539                     case 0x20: /* taddcc */
3540                         gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3541                         gen_store_gpr(dc, rd, cpu_dst);
3542                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3543                         dc->cc_op = CC_OP_TADD;
3544                         break;
3545                     case 0x21: /* tsubcc */
3546                         gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3547                         gen_store_gpr(dc, rd, cpu_dst);
3548                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3549                         dc->cc_op = CC_OP_TSUB;
3550                         break;
3551                     case 0x22: /* taddcctv */
3552                         gen_helper_taddcctv(cpu_dst, cpu_env,
3553                                             cpu_src1, cpu_src2);
3554                         gen_store_gpr(dc, rd, cpu_dst);
3555                         dc->cc_op = CC_OP_TADDTV;
3556                         break;
3557                     case 0x23: /* tsubcctv */
3558                         gen_helper_tsubcctv(cpu_dst, cpu_env,
3559                                             cpu_src1, cpu_src2);
3560                         gen_store_gpr(dc, rd, cpu_dst);
3561                         dc->cc_op = CC_OP_TSUBTV;
3562                         break;
3563                     case 0x24: /* mulscc */
3564                         update_psr(dc);
3565                         gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3566                         gen_store_gpr(dc, rd, cpu_dst);
3567                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3568                         dc->cc_op = CC_OP_ADD;
3569                         break;
3570 #ifndef TARGET_SPARC64
3571                     case 0x25:  /* sll */
3572                         if (IS_IMM) { /* immediate */
3573                             simm = GET_FIELDs(insn, 20, 31);
3574                             tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3575                         } else { /* register */
3576                             cpu_tmp0 = get_temp_tl(dc);
3577                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3578                             tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3579                         }
3580                         gen_store_gpr(dc, rd, cpu_dst);
3581                         break;
3582                     case 0x26:  /* srl */
3583                         if (IS_IMM) { /* immediate */
3584                             simm = GET_FIELDs(insn, 20, 31);
3585                             tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3586                         } else { /* register */
3587                             cpu_tmp0 = get_temp_tl(dc);
3588                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3589                             tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3590                         }
3591                         gen_store_gpr(dc, rd, cpu_dst);
3592                         break;
3593                     case 0x27:  /* sra */
3594                         if (IS_IMM) { /* immediate */
3595                             simm = GET_FIELDs(insn, 20, 31);
3596                             tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3597                         } else { /* register */
3598                             cpu_tmp0 = get_temp_tl(dc);
3599                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3600                             tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3601                         }
3602                         gen_store_gpr(dc, rd, cpu_dst);
3603                         break;
3604 #endif
3605                     case 0x30:
3606                         {
3607                             cpu_tmp0 = get_temp_tl(dc);
3608                             switch(rd) {
3609                             case 0: /* wry */
3610                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3611                                 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3612                                 break;
3613 #ifndef TARGET_SPARC64
3614                             case 0x01 ... 0x0f: /* undefined in the
3615                                                    SPARCv8 manual, nop
3616                                                    on the microSPARC
3617                                                    II */
3618                             case 0x10 ... 0x1f: /* implementation-dependent
3619                                                    in the SPARCv8
3620                                                    manual, nop on the
3621                                                    microSPARC II */
3622                                 if ((rd == 0x13) && (dc->def->features &
3623                                                      CPU_FEATURE_POWERDOWN)) {
3624                                     /* LEON3 power-down */
3625                                     save_state(dc);
3626                                     gen_helper_power_down(cpu_env);
3627                                 }
3628                                 break;
3629 #else
3630                             case 0x2: /* V9 wrccr */
3631                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3632                                 gen_helper_wrccr(cpu_env, cpu_tmp0);
3633                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3634                                 dc->cc_op = CC_OP_FLAGS;
3635                                 break;
3636                             case 0x3: /* V9 wrasi */
3637                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3638                                 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xff);
3639                                 tcg_gen_trunc_tl_i32(cpu_asi, cpu_tmp0);
3640                                 break;
3641                             case 0x6: /* V9 wrfprs */
3642                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3643                                 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_tmp0);
3644                                 save_state(dc);
3645                                 gen_op_next_insn();
3646                                 tcg_gen_exit_tb(0);
3647                                 dc->is_br = 1;
3648                                 break;
3649                             case 0xf: /* V9 sir, nop if user */
3650 #if !defined(CONFIG_USER_ONLY)
3651                                 if (supervisor(dc)) {
3652                                     ; // XXX
3653                                 }
3654 #endif
3655                                 break;
3656                             case 0x13: /* Graphics Status */
3657                                 if (gen_trap_ifnofpu(dc)) {
3658                                     goto jmp_insn;
3659                                 }
3660                                 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3661                                 break;
3662                             case 0x14: /* Softint set */
3663                                 if (!supervisor(dc))
3664                                     goto illegal_insn;
3665                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3666                                 gen_helper_set_softint(cpu_env, cpu_tmp0);
3667                                 break;
3668                             case 0x15: /* Softint clear */
3669                                 if (!supervisor(dc))
3670                                     goto illegal_insn;
3671                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3672                                 gen_helper_clear_softint(cpu_env, cpu_tmp0);
3673                                 break;
3674                             case 0x16: /* Softint write */
3675                                 if (!supervisor(dc))
3676                                     goto illegal_insn;
3677                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3678                                 gen_helper_write_softint(cpu_env, cpu_tmp0);
3679                                 break;
3680                             case 0x17: /* Tick compare */
3681 #if !defined(CONFIG_USER_ONLY)
3682                                 if (!supervisor(dc))
3683                                     goto illegal_insn;
3684 #endif
3685                                 {
3686                                     TCGv_ptr r_tickptr;
3687
3688                                     tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3689                                                    cpu_src2);
3690                                     r_tickptr = tcg_temp_new_ptr();
3691                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3692                                                    offsetof(CPUSPARCState, tick));
3693                                     gen_helper_tick_set_limit(r_tickptr,
3694                                                               cpu_tick_cmpr);
3695                                     tcg_temp_free_ptr(r_tickptr);
3696                                 }
3697                                 break;
3698                             case 0x18: /* System tick */
3699 #if !defined(CONFIG_USER_ONLY)
3700                                 if (!supervisor(dc))
3701                                     goto illegal_insn;
3702 #endif
3703                                 {
3704                                     TCGv_ptr r_tickptr;
3705
3706                                     tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
3707                                                    cpu_src2);
3708                                     r_tickptr = tcg_temp_new_ptr();
3709                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3710                                                    offsetof(CPUSPARCState, stick));
3711                                     gen_helper_tick_set_count(r_tickptr,
3712                                                               cpu_tmp0);
3713                                     tcg_temp_free_ptr(r_tickptr);
3714                                 }
3715                                 break;
3716                             case 0x19: /* System tick compare */
3717 #if !defined(CONFIG_USER_ONLY)
3718                                 if (!supervisor(dc))
3719                                     goto illegal_insn;
3720 #endif
3721                                 {
3722                                     TCGv_ptr r_tickptr;
3723
3724                                     tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3725                                                    cpu_src2);
3726                                     r_tickptr = tcg_temp_new_ptr();
3727                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3728                                                    offsetof(CPUSPARCState, stick));
3729                                     gen_helper_tick_set_limit(r_tickptr,
3730                                                               cpu_stick_cmpr);
3731                                     tcg_temp_free_ptr(r_tickptr);
3732                                 }
3733                                 break;
3734
3735                             case 0x10: /* Performance Control */
3736                             case 0x11: /* Performance Instrumentation
3737                                           Counter */
3738                             case 0x12: /* Dispatch Control */
3739 #endif
3740                             default:
3741                                 goto illegal_insn;
3742                             }
3743                         }
3744                         break;
3745 #if !defined(CONFIG_USER_ONLY)
3746                     case 0x31: /* wrpsr, V9 saved, restored */
3747                         {
3748                             if (!supervisor(dc))
3749                                 goto priv_insn;
3750 #ifdef TARGET_SPARC64
3751                             switch (rd) {
3752                             case 0:
3753                                 gen_helper_saved(cpu_env);
3754                                 break;
3755                             case 1:
3756                                 gen_helper_restored(cpu_env);
3757                                 break;
3758                             case 2: /* UA2005 allclean */
3759                             case 3: /* UA2005 otherw */
3760                             case 4: /* UA2005 normalw */
3761                             case 5: /* UA2005 invalw */
3762                                 // XXX
3763                             default:
3764                                 goto illegal_insn;
3765                             }
3766 #else
3767                             cpu_tmp0 = get_temp_tl(dc);
3768                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3769                             gen_helper_wrpsr(cpu_env, cpu_tmp0);
3770                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3771                             dc->cc_op = CC_OP_FLAGS;
3772                             save_state(dc);
3773                             gen_op_next_insn();
3774                             tcg_gen_exit_tb(0);
3775                             dc->is_br = 1;
3776 #endif
3777                         }
3778                         break;
3779                     case 0x32: /* wrwim, V9 wrpr */
3780                         {
3781                             if (!supervisor(dc))
3782                                 goto priv_insn;
3783                             cpu_tmp0 = get_temp_tl(dc);
3784                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3785 #ifdef TARGET_SPARC64
3786                             switch (rd) {
3787                             case 0: // tpc
3788                                 {
3789                                     TCGv_ptr r_tsptr;
3790
3791                                     r_tsptr = tcg_temp_new_ptr();
3792                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3793                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3794                                                   offsetof(trap_state, tpc));
3795                                     tcg_temp_free_ptr(r_tsptr);
3796                                 }
3797                                 break;
3798                             case 1: // tnpc
3799                                 {
3800                                     TCGv_ptr r_tsptr;
3801
3802                                     r_tsptr = tcg_temp_new_ptr();
3803                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3804                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3805                                                   offsetof(trap_state, tnpc));
3806                                     tcg_temp_free_ptr(r_tsptr);
3807                                 }
3808                                 break;
3809                             case 2: // tstate
3810                                 {
3811                                     TCGv_ptr r_tsptr;
3812
3813                                     r_tsptr = tcg_temp_new_ptr();
3814                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3815                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3816                                                   offsetof(trap_state,
3817                                                            tstate));
3818                                     tcg_temp_free_ptr(r_tsptr);
3819                                 }
3820                                 break;
3821                             case 3: // tt
3822                                 {
3823                                     TCGv_ptr r_tsptr;
3824
3825                                     r_tsptr = tcg_temp_new_ptr();
3826                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3827                                     tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
3828                                                     offsetof(trap_state, tt));
3829                                     tcg_temp_free_ptr(r_tsptr);
3830                                 }
3831                                 break;
3832                             case 4: // tick
3833                                 {
3834                                     TCGv_ptr r_tickptr;
3835
3836                                     r_tickptr = tcg_temp_new_ptr();
3837                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3838                                                    offsetof(CPUSPARCState, tick));
3839                                     gen_helper_tick_set_count(r_tickptr,
3840                                                               cpu_tmp0);
3841                                     tcg_temp_free_ptr(r_tickptr);
3842                                 }
3843                                 break;
3844                             case 5: // tba
3845                                 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3846                                 break;
3847                             case 6: // pstate
3848                                 save_state(dc);
3849                                 gen_helper_wrpstate(cpu_env, cpu_tmp0);
3850                                 dc->npc = DYNAMIC_PC;
3851                                 break;
3852                             case 7: // tl
3853                                 save_state(dc);
3854                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3855                                                offsetof(CPUSPARCState, tl));
3856                                 dc->npc = DYNAMIC_PC;
3857                                 break;
3858                             case 8: // pil
3859                                 gen_helper_wrpil(cpu_env, cpu_tmp0);
3860                                 break;
3861                             case 9: // cwp
3862                                 gen_helper_wrcwp(cpu_env, cpu_tmp0);
3863                                 break;
3864                             case 10: // cansave
3865                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3866                                                 offsetof(CPUSPARCState,
3867                                                          cansave));
3868                                 break;
3869                             case 11: // canrestore
3870                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3871                                                 offsetof(CPUSPARCState,
3872                                                          canrestore));
3873                                 break;
3874                             case 12: // cleanwin
3875                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3876                                                 offsetof(CPUSPARCState,
3877                                                          cleanwin));
3878                                 break;
3879                             case 13: // otherwin
3880                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3881                                                 offsetof(CPUSPARCState,
3882                                                          otherwin));
3883                                 break;
3884                             case 14: // wstate
3885                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3886                                                 offsetof(CPUSPARCState,
3887                                                          wstate));
3888                                 break;
3889                             case 16: // UA2005 gl
3890                                 CHECK_IU_FEATURE(dc, GL);
3891                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3892                                                 offsetof(CPUSPARCState, gl));
3893                                 break;
3894                             case 26: // UA2005 strand status
3895                                 CHECK_IU_FEATURE(dc, HYPV);
3896                                 if (!hypervisor(dc))
3897                                     goto priv_insn;
3898                                 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3899                                 break;
3900                             default:
3901                                 goto illegal_insn;
3902                             }
3903 #else
3904                             tcg_gen_trunc_tl_i32(cpu_wim, cpu_tmp0);
3905                             if (dc->def->nwindows != 32) {
3906                                 tcg_gen_andi_tl(cpu_wim, cpu_wim,
3907                                                 (1 << dc->def->nwindows) - 1);
3908                             }
3909 #endif
3910                         }
3911                         break;
3912                     case 0x33: /* wrtbr, UA2005 wrhpr */
3913                         {
3914 #ifndef TARGET_SPARC64
3915                             if (!supervisor(dc))
3916                                 goto priv_insn;
3917                             tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3918 #else
3919                             CHECK_IU_FEATURE(dc, HYPV);
3920                             if (!hypervisor(dc))
3921                                 goto priv_insn;
3922                             cpu_tmp0 = get_temp_tl(dc);
3923                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3924                             switch (rd) {
3925                             case 0: // hpstate
3926                                 // XXX gen_op_wrhpstate();
3927                                 save_state(dc);
3928                                 gen_op_next_insn();
3929                                 tcg_gen_exit_tb(0);
3930                                 dc->is_br = 1;
3931                                 break;
3932                             case 1: // htstate
3933                                 // XXX gen_op_wrhtstate();
3934                                 break;
3935                             case 3: // hintp
3936                                 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3937                                 break;
3938                             case 5: // htba
3939                                 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3940                                 break;
3941                             case 31: // hstick_cmpr
3942                                 {
3943                                     TCGv_ptr r_tickptr;
3944
3945                                     tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3946                                     r_tickptr = tcg_temp_new_ptr();
3947                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3948                                                    offsetof(CPUSPARCState, hstick));
3949                                     gen_helper_tick_set_limit(r_tickptr,
3950                                                               cpu_hstick_cmpr);
3951                                     tcg_temp_free_ptr(r_tickptr);
3952                                 }
3953                                 break;
3954                             case 6: // hver readonly
3955                             default:
3956                                 goto illegal_insn;
3957                             }
3958 #endif
3959                         }
3960                         break;
3961 #endif
3962 #ifdef TARGET_SPARC64
3963                     case 0x2c: /* V9 movcc */
3964                         {
3965                             int cc = GET_FIELD_SP(insn, 11, 12);
3966                             int cond = GET_FIELD_SP(insn, 14, 17);
3967                             DisasCompare cmp;
3968                             TCGv dst;
3969
3970                             if (insn & (1 << 18)) {
3971                                 if (cc == 0) {
3972                                     gen_compare(&cmp, 0, cond, dc);
3973                                 } else if (cc == 2) {
3974                                     gen_compare(&cmp, 1, cond, dc);
3975                                 } else {
3976                                     goto illegal_insn;
3977                                 }
3978                             } else {
3979                                 gen_fcompare(&cmp, cc, cond);
3980                             }
3981
3982                             /* The get_src2 above loaded the normal 13-bit
3983                                immediate field, not the 11-bit field we have
3984                                in movcc.  But it did handle the reg case.  */
3985                             if (IS_IMM) {
3986                                 simm = GET_FIELD_SPs(insn, 0, 10);
3987                                 tcg_gen_movi_tl(cpu_src2, simm);
3988                             }
3989
3990                             dst = gen_load_gpr(dc, rd);
3991                             tcg_gen_movcond_tl(cmp.cond, dst,
3992                                                cmp.c1, cmp.c2,
3993                                                cpu_src2, dst);
3994                             free_compare(&cmp);
3995                             gen_store_gpr(dc, rd, dst);
3996                             break;
3997                         }
3998                     case 0x2d: /* V9 sdivx */
3999                         gen_helper_sdivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4000                         gen_store_gpr(dc, rd, cpu_dst);
4001                         break;
4002                     case 0x2e: /* V9 popc */
4003                         gen_helper_popc(cpu_dst, cpu_src2);
4004                         gen_store_gpr(dc, rd, cpu_dst);
4005                         break;
4006                     case 0x2f: /* V9 movr */
4007                         {
4008                             int cond = GET_FIELD_SP(insn, 10, 12);
4009                             DisasCompare cmp;
4010                             TCGv dst;
4011
4012                             gen_compare_reg(&cmp, cond, cpu_src1);
4013
4014                             /* The get_src2 above loaded the normal 13-bit
4015                                immediate field, not the 10-bit field we have
4016                                in movr.  But it did handle the reg case.  */
4017                             if (IS_IMM) {
4018                                 simm = GET_FIELD_SPs(insn, 0, 9);
4019                                 tcg_gen_movi_tl(cpu_src2, simm);
4020                             }
4021
4022                             dst = gen_load_gpr(dc, rd);
4023                             tcg_gen_movcond_tl(cmp.cond, dst,
4024                                                cmp.c1, cmp.c2,
4025                                                cpu_src2, dst);
4026                             free_compare(&cmp);
4027                             gen_store_gpr(dc, rd, dst);
4028                             break;
4029                         }
4030 #endif
4031                     default:
4032                         goto illegal_insn;
4033                     }
4034                 }
4035             } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4036 #ifdef TARGET_SPARC64
4037                 int opf = GET_FIELD_SP(insn, 5, 13);
4038                 rs1 = GET_FIELD(insn, 13, 17);
4039                 rs2 = GET_FIELD(insn, 27, 31);
4040                 if (gen_trap_ifnofpu(dc)) {
4041                     goto jmp_insn;
4042                 }
4043
4044                 switch (opf) {
4045                 case 0x000: /* VIS I edge8cc */
4046                     CHECK_FPU_FEATURE(dc, VIS1);
4047                     cpu_src1 = gen_load_gpr(dc, rs1);
4048                     cpu_src2 = gen_load_gpr(dc, rs2);
4049                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4050                     gen_store_gpr(dc, rd, cpu_dst);
4051                     break;
4052                 case 0x001: /* VIS II edge8n */
4053                     CHECK_FPU_FEATURE(dc, VIS2);
4054                     cpu_src1 = gen_load_gpr(dc, rs1);
4055                     cpu_src2 = gen_load_gpr(dc, rs2);
4056                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4057                     gen_store_gpr(dc, rd, cpu_dst);
4058                     break;
4059                 case 0x002: /* VIS I edge8lcc */
4060                     CHECK_FPU_FEATURE(dc, VIS1);
4061                     cpu_src1 = gen_load_gpr(dc, rs1);
4062                     cpu_src2 = gen_load_gpr(dc, rs2);
4063                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4064                     gen_store_gpr(dc, rd, cpu_dst);
4065                     break;
4066                 case 0x003: /* VIS II edge8ln */
4067                     CHECK_FPU_FEATURE(dc, VIS2);
4068                     cpu_src1 = gen_load_gpr(dc, rs1);
4069                     cpu_src2 = gen_load_gpr(dc, rs2);
4070                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4071                     gen_store_gpr(dc, rd, cpu_dst);
4072                     break;
4073                 case 0x004: /* VIS I edge16cc */
4074                     CHECK_FPU_FEATURE(dc, VIS1);
4075                     cpu_src1 = gen_load_gpr(dc, rs1);
4076                     cpu_src2 = gen_load_gpr(dc, rs2);
4077                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4078                     gen_store_gpr(dc, rd, cpu_dst);
4079                     break;
4080                 case 0x005: /* VIS II edge16n */
4081                     CHECK_FPU_FEATURE(dc, VIS2);
4082                     cpu_src1 = gen_load_gpr(dc, rs1);
4083                     cpu_src2 = gen_load_gpr(dc, rs2);
4084                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4085                     gen_store_gpr(dc, rd, cpu_dst);
4086                     break;
4087                 case 0x006: /* VIS I edge16lcc */
4088                     CHECK_FPU_FEATURE(dc, VIS1);
4089                     cpu_src1 = gen_load_gpr(dc, rs1);
4090                     cpu_src2 = gen_load_gpr(dc, rs2);
4091                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4092                     gen_store_gpr(dc, rd, cpu_dst);
4093                     break;
4094                 case 0x007: /* VIS II edge16ln */
4095                     CHECK_FPU_FEATURE(dc, VIS2);
4096                     cpu_src1 = gen_load_gpr(dc, rs1);
4097                     cpu_src2 = gen_load_gpr(dc, rs2);
4098                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4099                     gen_store_gpr(dc, rd, cpu_dst);
4100                     break;
4101                 case 0x008: /* VIS I edge32cc */
4102                     CHECK_FPU_FEATURE(dc, VIS1);
4103                     cpu_src1 = gen_load_gpr(dc, rs1);
4104                     cpu_src2 = gen_load_gpr(dc, rs2);
4105                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4106                     gen_store_gpr(dc, rd, cpu_dst);
4107                     break;
4108                 case 0x009: /* VIS II edge32n */
4109                     CHECK_FPU_FEATURE(dc, VIS2);
4110                     cpu_src1 = gen_load_gpr(dc, rs1);
4111                     cpu_src2 = gen_load_gpr(dc, rs2);
4112                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4113                     gen_store_gpr(dc, rd, cpu_dst);
4114                     break;
4115                 case 0x00a: /* VIS I edge32lcc */
4116                     CHECK_FPU_FEATURE(dc, VIS1);
4117                     cpu_src1 = gen_load_gpr(dc, rs1);
4118                     cpu_src2 = gen_load_gpr(dc, rs2);
4119                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4120                     gen_store_gpr(dc, rd, cpu_dst);
4121                     break;
4122                 case 0x00b: /* VIS II edge32ln */
4123                     CHECK_FPU_FEATURE(dc, VIS2);
4124                     cpu_src1 = gen_load_gpr(dc, rs1);
4125                     cpu_src2 = gen_load_gpr(dc, rs2);
4126                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4127                     gen_store_gpr(dc, rd, cpu_dst);
4128                     break;
4129                 case 0x010: /* VIS I array8 */
4130                     CHECK_FPU_FEATURE(dc, VIS1);
4131                     cpu_src1 = gen_load_gpr(dc, rs1);
4132                     cpu_src2 = gen_load_gpr(dc, rs2);
4133                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4134                     gen_store_gpr(dc, rd, cpu_dst);
4135                     break;
4136                 case 0x012: /* VIS I array16 */
4137                     CHECK_FPU_FEATURE(dc, VIS1);
4138                     cpu_src1 = gen_load_gpr(dc, rs1);
4139                     cpu_src2 = gen_load_gpr(dc, rs2);
4140                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4141                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4142                     gen_store_gpr(dc, rd, cpu_dst);
4143                     break;
4144                 case 0x014: /* VIS I array32 */
4145                     CHECK_FPU_FEATURE(dc, VIS1);
4146                     cpu_src1 = gen_load_gpr(dc, rs1);
4147                     cpu_src2 = gen_load_gpr(dc, rs2);
4148                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4149                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4150                     gen_store_gpr(dc, rd, cpu_dst);
4151                     break;
4152                 case 0x018: /* VIS I alignaddr */
4153                     CHECK_FPU_FEATURE(dc, VIS1);
4154                     cpu_src1 = gen_load_gpr(dc, rs1);
4155                     cpu_src2 = gen_load_gpr(dc, rs2);
4156                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4157                     gen_store_gpr(dc, rd, cpu_dst);
4158                     break;
4159                 case 0x01a: /* VIS I alignaddrl */
4160                     CHECK_FPU_FEATURE(dc, VIS1);
4161                     cpu_src1 = gen_load_gpr(dc, rs1);
4162                     cpu_src2 = gen_load_gpr(dc, rs2);
4163                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4164                     gen_store_gpr(dc, rd, cpu_dst);
4165                     break;
4166                 case 0x019: /* VIS II bmask */
4167                     CHECK_FPU_FEATURE(dc, VIS2);
4168                     cpu_src1 = gen_load_gpr(dc, rs1);
4169                     cpu_src2 = gen_load_gpr(dc, rs2);
4170                     tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4171                     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4172                     gen_store_gpr(dc, rd, cpu_dst);
4173                     break;
4174                 case 0x020: /* VIS I fcmple16 */
4175                     CHECK_FPU_FEATURE(dc, VIS1);
4176                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4177                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4178                     gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4179                     gen_store_gpr(dc, rd, cpu_dst);
4180                     break;
4181                 case 0x022: /* VIS I fcmpne16 */
4182                     CHECK_FPU_FEATURE(dc, VIS1);
4183                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4184                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4185                     gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4186                     gen_store_gpr(dc, rd, cpu_dst);
4187                     break;
4188                 case 0x024: /* VIS I fcmple32 */
4189                     CHECK_FPU_FEATURE(dc, VIS1);
4190                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4191                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4192                     gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4193                     gen_store_gpr(dc, rd, cpu_dst);
4194                     break;
4195                 case 0x026: /* VIS I fcmpne32 */
4196                     CHECK_FPU_FEATURE(dc, VIS1);
4197                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4198                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4199                     gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4200                     gen_store_gpr(dc, rd, cpu_dst);
4201                     break;
4202                 case 0x028: /* VIS I fcmpgt16 */
4203                     CHECK_FPU_FEATURE(dc, VIS1);
4204                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4205                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4206                     gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4207                     gen_store_gpr(dc, rd, cpu_dst);
4208                     break;
4209                 case 0x02a: /* VIS I fcmpeq16 */
4210                     CHECK_FPU_FEATURE(dc, VIS1);
4211                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4212                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4213                     gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4214                     gen_store_gpr(dc, rd, cpu_dst);
4215                     break;
4216                 case 0x02c: /* VIS I fcmpgt32 */
4217                     CHECK_FPU_FEATURE(dc, VIS1);
4218                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4219                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4220                     gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4221                     gen_store_gpr(dc, rd, cpu_dst);
4222                     break;
4223                 case 0x02e: /* VIS I fcmpeq32 */
4224                     CHECK_FPU_FEATURE(dc, VIS1);
4225                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4226                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4227                     gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4228                     gen_store_gpr(dc, rd, cpu_dst);
4229                     break;
4230                 case 0x031: /* VIS I fmul8x16 */
4231                     CHECK_FPU_FEATURE(dc, VIS1);
4232                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4233                     break;
4234                 case 0x033: /* VIS I fmul8x16au */
4235                     CHECK_FPU_FEATURE(dc, VIS1);
4236                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4237                     break;
4238                 case 0x035: /* VIS I fmul8x16al */
4239                     CHECK_FPU_FEATURE(dc, VIS1);
4240                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4241                     break;
4242                 case 0x036: /* VIS I fmul8sux16 */
4243                     CHECK_FPU_FEATURE(dc, VIS1);
4244                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4245                     break;
4246                 case 0x037: /* VIS I fmul8ulx16 */
4247                     CHECK_FPU_FEATURE(dc, VIS1);
4248                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4249                     break;
4250                 case 0x038: /* VIS I fmuld8sux16 */
4251                     CHECK_FPU_FEATURE(dc, VIS1);
4252                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4253                     break;
4254                 case 0x039: /* VIS I fmuld8ulx16 */
4255                     CHECK_FPU_FEATURE(dc, VIS1);
4256                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4257                     break;
4258                 case 0x03a: /* VIS I fpack32 */
4259                     CHECK_FPU_FEATURE(dc, VIS1);
4260                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4261                     break;
4262                 case 0x03b: /* VIS I fpack16 */
4263                     CHECK_FPU_FEATURE(dc, VIS1);
4264                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4265                     cpu_dst_32 = gen_dest_fpr_F(dc);
4266                     gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4267                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4268                     break;
4269                 case 0x03d: /* VIS I fpackfix */
4270                     CHECK_FPU_FEATURE(dc, VIS1);
4271                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4272                     cpu_dst_32 = gen_dest_fpr_F(dc);
4273                     gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4274                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4275                     break;
4276                 case 0x03e: /* VIS I pdist */
4277                     CHECK_FPU_FEATURE(dc, VIS1);
4278                     gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4279                     break;
4280                 case 0x048: /* VIS I faligndata */
4281                     CHECK_FPU_FEATURE(dc, VIS1);
4282                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4283                     break;
4284                 case 0x04b: /* VIS I fpmerge */
4285                     CHECK_FPU_FEATURE(dc, VIS1);
4286                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4287                     break;
4288                 case 0x04c: /* VIS II bshuffle */
4289                     CHECK_FPU_FEATURE(dc, VIS2);
4290                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4291                     break;
4292                 case 0x04d: /* VIS I fexpand */
4293                     CHECK_FPU_FEATURE(dc, VIS1);
4294                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4295                     break;
4296                 case 0x050: /* VIS I fpadd16 */
4297                     CHECK_FPU_FEATURE(dc, VIS1);
4298                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4299                     break;
4300                 case 0x051: /* VIS I fpadd16s */
4301                     CHECK_FPU_FEATURE(dc, VIS1);
4302                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4303                     break;
4304                 case 0x052: /* VIS I fpadd32 */
4305                     CHECK_FPU_FEATURE(dc, VIS1);
4306                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4307                     break;
4308                 case 0x053: /* VIS I fpadd32s */
4309                     CHECK_FPU_FEATURE(dc, VIS1);
4310                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4311                     break;
4312                 case 0x054: /* VIS I fpsub16 */
4313                     CHECK_FPU_FEATURE(dc, VIS1);
4314                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4315                     break;
4316                 case 0x055: /* VIS I fpsub16s */
4317                     CHECK_FPU_FEATURE(dc, VIS1);
4318                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4319                     break;
4320                 case 0x056: /* VIS I fpsub32 */
4321                     CHECK_FPU_FEATURE(dc, VIS1);
4322                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4323                     break;
4324                 case 0x057: /* VIS I fpsub32s */
4325                     CHECK_FPU_FEATURE(dc, VIS1);
4326                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4327                     break;
4328                 case 0x060: /* VIS I fzero */
4329                     CHECK_FPU_FEATURE(dc, VIS1);
4330                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4331                     tcg_gen_movi_i64(cpu_dst_64, 0);
4332                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4333                     break;
4334                 case 0x061: /* VIS I fzeros */
4335                     CHECK_FPU_FEATURE(dc, VIS1);
4336                     cpu_dst_32 = gen_dest_fpr_F(dc);
4337                     tcg_gen_movi_i32(cpu_dst_32, 0);
4338                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4339                     break;
4340                 case 0x062: /* VIS I fnor */
4341                     CHECK_FPU_FEATURE(dc, VIS1);
4342                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4343                     break;
4344                 case 0x063: /* VIS I fnors */
4345                     CHECK_FPU_FEATURE(dc, VIS1);
4346                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4347                     break;
4348                 case 0x064: /* VIS I fandnot2 */
4349                     CHECK_FPU_FEATURE(dc, VIS1);
4350                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4351                     break;
4352                 case 0x065: /* VIS I fandnot2s */
4353                     CHECK_FPU_FEATURE(dc, VIS1);
4354                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4355                     break;
4356                 case 0x066: /* VIS I fnot2 */
4357                     CHECK_FPU_FEATURE(dc, VIS1);
4358                     gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4359                     break;
4360                 case 0x067: /* VIS I fnot2s */
4361                     CHECK_FPU_FEATURE(dc, VIS1);
4362                     gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4363                     break;
4364                 case 0x068: /* VIS I fandnot1 */
4365                     CHECK_FPU_FEATURE(dc, VIS1);
4366                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4367                     break;
4368                 case 0x069: /* VIS I fandnot1s */
4369                     CHECK_FPU_FEATURE(dc, VIS1);
4370                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4371                     break;
4372                 case 0x06a: /* VIS I fnot1 */
4373                     CHECK_FPU_FEATURE(dc, VIS1);
4374                     gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4375                     break;
4376                 case 0x06b: /* VIS I fnot1s */
4377                     CHECK_FPU_FEATURE(dc, VIS1);
4378                     gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4379                     break;
4380                 case 0x06c: /* VIS I fxor */
4381                     CHECK_FPU_FEATURE(dc, VIS1);
4382                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4383                     break;
4384                 case 0x06d: /* VIS I fxors */
4385                     CHECK_FPU_FEATURE(dc, VIS1);
4386                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4387                     break;
4388                 case 0x06e: /* VIS I fnand */
4389                     CHECK_FPU_FEATURE(dc, VIS1);
4390                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4391                     break;
4392                 case 0x06f: /* VIS I fnands */
4393                     CHECK_FPU_FEATURE(dc, VIS1);
4394                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4395                     break;
4396                 case 0x070: /* VIS I fand */
4397                     CHECK_FPU_FEATURE(dc, VIS1);
4398                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4399                     break;
4400                 case 0x071: /* VIS I fands */
4401                     CHECK_FPU_FEATURE(dc, VIS1);
4402                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4403                     break;
4404                 case 0x072: /* VIS I fxnor */
4405                     CHECK_FPU_FEATURE(dc, VIS1);
4406                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4407                     break;
4408                 case 0x073: /* VIS I fxnors */
4409                     CHECK_FPU_FEATURE(dc, VIS1);
4410                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4411                     break;
4412                 case 0x074: /* VIS I fsrc1 */
4413                     CHECK_FPU_FEATURE(dc, VIS1);
4414                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4415                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4416                     break;
4417                 case 0x075: /* VIS I fsrc1s */
4418                     CHECK_FPU_FEATURE(dc, VIS1);
4419                     cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4420                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4421                     break;
4422                 case 0x076: /* VIS I fornot2 */
4423                     CHECK_FPU_FEATURE(dc, VIS1);
4424                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4425                     break;
4426                 case 0x077: /* VIS I fornot2s */
4427                     CHECK_FPU_FEATURE(dc, VIS1);
4428                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4429                     break;
4430                 case 0x078: /* VIS I fsrc2 */
4431                     CHECK_FPU_FEATURE(dc, VIS1);
4432                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4433                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4434                     break;
4435                 case 0x079: /* VIS I fsrc2s */
4436                     CHECK_FPU_FEATURE(dc, VIS1);
4437                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4438                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4439                     break;
4440                 case 0x07a: /* VIS I fornot1 */
4441                     CHECK_FPU_FEATURE(dc, VIS1);
4442                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4443                     break;
4444                 case 0x07b: /* VIS I fornot1s */
4445                     CHECK_FPU_FEATURE(dc, VIS1);
4446                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
4447                     break;
4448                 case 0x07c: /* VIS I for */
4449                     CHECK_FPU_FEATURE(dc, VIS1);
4450                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
4451                     break;
4452                 case 0x07d: /* VIS I fors */
4453                     CHECK_FPU_FEATURE(dc, VIS1);
4454                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
4455                     break;
4456                 case 0x07e: /* VIS I fone */
4457                     CHECK_FPU_FEATURE(dc, VIS1);
4458                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4459                     tcg_gen_movi_i64(cpu_dst_64, -1);
4460                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4461                     break;
4462                 case 0x07f: /* VIS I fones */
4463                     CHECK_FPU_FEATURE(dc, VIS1);
4464                     cpu_dst_32 = gen_dest_fpr_F(dc);
4465                     tcg_gen_movi_i32(cpu_dst_32, -1);
4466                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4467                     break;
4468                 case 0x080: /* VIS I shutdown */
4469                 case 0x081: /* VIS II siam */
4470                     // XXX
4471                     goto illegal_insn;
4472                 default:
4473                     goto illegal_insn;
4474                 }
4475 #else
4476                 goto ncp_insn;
4477 #endif
4478             } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4479 #ifdef TARGET_SPARC64
4480                 goto illegal_insn;
4481 #else
4482                 goto ncp_insn;
4483 #endif
4484 #ifdef TARGET_SPARC64
4485             } else if (xop == 0x39) { /* V9 return */
4486                 TCGv_i32 r_const;
4487
4488                 save_state(dc);
4489                 cpu_src1 = get_src1(dc, insn);
4490                 cpu_tmp0 = get_temp_tl(dc);
4491                 if (IS_IMM) {   /* immediate */
4492                     simm = GET_FIELDs(insn, 19, 31);
4493                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
4494                 } else {                /* register */
4495                     rs2 = GET_FIELD(insn, 27, 31);
4496                     if (rs2) {
4497                         cpu_src2 = gen_load_gpr(dc, rs2);
4498                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
4499                     } else {
4500                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
4501                     }
4502                 }
4503                 gen_helper_restore(cpu_env);
4504                 gen_mov_pc_npc(dc);
4505                 r_const = tcg_const_i32(3);
4506                 gen_helper_check_align(cpu_env, cpu_tmp0, r_const);
4507                 tcg_temp_free_i32(r_const);
4508                 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
4509                 dc->npc = DYNAMIC_PC;
4510                 goto jmp_insn;
4511 #endif
4512             } else {
4513                 cpu_src1 = get_src1(dc, insn);
4514                 cpu_tmp0 = get_temp_tl(dc);
4515                 if (IS_IMM) {   /* immediate */
4516                     simm = GET_FIELDs(insn, 19, 31);
4517                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
4518                 } else {                /* register */
4519                     rs2 = GET_FIELD(insn, 27, 31);
4520                     if (rs2) {
4521                         cpu_src2 = gen_load_gpr(dc, rs2);
4522                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
4523                     } else {
4524                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
4525                     }
4526                 }
4527                 switch (xop) {
4528                 case 0x38:      /* jmpl */
4529                     {
4530                         TCGv t;
4531                         TCGv_i32 r_const;
4532
4533                         t = gen_dest_gpr(dc, rd);
4534                         tcg_gen_movi_tl(t, dc->pc);
4535                         gen_store_gpr(dc, rd, t);
4536                         gen_mov_pc_npc(dc);
4537                         r_const = tcg_const_i32(3);
4538                         gen_helper_check_align(cpu_env, cpu_tmp0, r_const);
4539                         tcg_temp_free_i32(r_const);
4540                         gen_address_mask(dc, cpu_tmp0);
4541                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
4542                         dc->npc = DYNAMIC_PC;
4543                     }
4544                     goto jmp_insn;
4545 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4546                 case 0x39:      /* rett, V9 return */
4547                     {
4548                         TCGv_i32 r_const;
4549
4550                         if (!supervisor(dc))
4551                             goto priv_insn;
4552                         gen_mov_pc_npc(dc);
4553                         r_const = tcg_const_i32(3);
4554                         gen_helper_check_align(cpu_env, cpu_tmp0, r_const);
4555                         tcg_temp_free_i32(r_const);
4556                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
4557                         dc->npc = DYNAMIC_PC;
4558                         gen_helper_rett(cpu_env);
4559                     }
4560                     goto jmp_insn;
4561 #endif
4562                 case 0x3b: /* flush */
4563                     if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4564                         goto unimp_flush;
4565                     /* nop */
4566                     break;
4567                 case 0x3c:      /* save */
4568                     save_state(dc);
4569                     gen_helper_save(cpu_env);
4570                     gen_store_gpr(dc, rd, cpu_tmp0);
4571                     break;
4572                 case 0x3d:      /* restore */
4573                     save_state(dc);
4574                     gen_helper_restore(cpu_env);
4575                     gen_store_gpr(dc, rd, cpu_tmp0);
4576                     break;
4577 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4578                 case 0x3e:      /* V9 done/retry */
4579                     {
4580                         switch (rd) {
4581                         case 0:
4582                             if (!supervisor(dc))
4583                                 goto priv_insn;
4584                             dc->npc = DYNAMIC_PC;
4585                             dc->pc = DYNAMIC_PC;
4586                             gen_helper_done(cpu_env);
4587                             goto jmp_insn;
4588                         case 1:
4589                             if (!supervisor(dc))
4590                                 goto priv_insn;
4591                             dc->npc = DYNAMIC_PC;
4592                             dc->pc = DYNAMIC_PC;
4593                             gen_helper_retry(cpu_env);
4594                             goto jmp_insn;
4595                         default:
4596                             goto illegal_insn;
4597                         }
4598                     }
4599                     break;
4600 #endif
4601                 default:
4602                     goto illegal_insn;
4603                 }
4604             }
4605             break;
4606         }
4607         break;
4608     case 3:                     /* load/store instructions */
4609         {
4610             unsigned int xop = GET_FIELD(insn, 7, 12);
4611             /* ??? gen_address_mask prevents us from using a source
4612                register directly.  Always generate a temporary.  */
4613             TCGv cpu_addr = get_temp_tl(dc);
4614
4615             tcg_gen_mov_tl(cpu_addr, get_src1(dc, insn));
4616             if (xop == 0x3c || xop == 0x3e) {
4617                 /* V9 casa/casxa : no offset */
4618             } else if (IS_IMM) {     /* immediate */
4619                 simm = GET_FIELDs(insn, 19, 31);
4620                 if (simm != 0) {
4621                     tcg_gen_addi_tl(cpu_addr, cpu_addr, simm);
4622                 }
4623             } else {            /* register */
4624                 rs2 = GET_FIELD(insn, 27, 31);
4625                 if (rs2 != 0) {
4626                     tcg_gen_add_tl(cpu_addr, cpu_addr, gen_load_gpr(dc, rs2));
4627                 }
4628             }
4629             if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4630                 (xop > 0x17 && xop <= 0x1d ) ||
4631                 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4632                 TCGv cpu_val = gen_dest_gpr(dc, rd);
4633
4634                 switch (xop) {
4635                 case 0x0:       /* ld, V9 lduw, load unsigned word */
4636                     gen_address_mask(dc, cpu_addr);
4637                     tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4638                     break;
4639                 case 0x1:       /* ldub, load unsigned byte */
4640                     gen_address_mask(dc, cpu_addr);
4641                     tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4642                     break;
4643                 case 0x2:       /* lduh, load unsigned halfword */
4644                     gen_address_mask(dc, cpu_addr);
4645                     tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4646                     break;
4647                 case 0x3:       /* ldd, load double word */
4648                     if (rd & 1)
4649                         goto illegal_insn;
4650                     else {
4651                         TCGv_i32 r_const;
4652                         TCGv_i64 t64;
4653
4654                         save_state(dc);
4655                         r_const = tcg_const_i32(7);
4656                         /* XXX remove alignment check */
4657                         gen_helper_check_align(cpu_env, cpu_addr, r_const);
4658                         tcg_temp_free_i32(r_const);
4659                         gen_address_mask(dc, cpu_addr);
4660                         t64 = tcg_temp_new_i64();
4661                         tcg_gen_qemu_ld64(t64, cpu_addr, dc->mem_idx);
4662                         tcg_gen_trunc_i64_tl(cpu_val, t64);
4663                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
4664                         gen_store_gpr(dc, rd + 1, cpu_val);
4665                         tcg_gen_shri_i64(t64, t64, 32);
4666                         tcg_gen_trunc_i64_tl(cpu_val, t64);
4667                         tcg_temp_free_i64(t64);
4668                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
4669                     }
4670                     break;
4671                 case 0x9:       /* ldsb, load signed byte */
4672                     gen_address_mask(dc, cpu_addr);
4673                     tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4674                     break;
4675                 case 0xa:       /* ldsh, load signed halfword */
4676                     gen_address_mask(dc, cpu_addr);
4677                     tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4678                     break;
4679                 case 0xd:       /* ldstub -- XXX: should be atomically */
4680                     {
4681                         TCGv r_const;
4682
4683                         gen_address_mask(dc, cpu_addr);
4684                         tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4685                         r_const = tcg_const_tl(0xff);
4686                         tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4687                         tcg_temp_free(r_const);
4688                     }
4689                     break;
4690                 case 0x0f:
4691                     /* swap, swap register with memory. Also atomically */
4692                     {
4693                         TCGv t0 = get_temp_tl(dc);
4694                         CHECK_IU_FEATURE(dc, SWAP);
4695                         cpu_src1 = gen_load_gpr(dc, rd);
4696                         gen_address_mask(dc, cpu_addr);
4697                         tcg_gen_qemu_ld32u(t0, cpu_addr, dc->mem_idx);
4698                         tcg_gen_qemu_st32(cpu_src1, cpu_addr, dc->mem_idx);
4699                         tcg_gen_mov_tl(cpu_val, t0);
4700                     }
4701                     break;
4702 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4703                 case 0x10:      /* lda, V9 lduwa, load word alternate */
4704 #ifndef TARGET_SPARC64
4705                     if (IS_IMM)
4706                         goto illegal_insn;
4707                     if (!supervisor(dc))
4708                         goto priv_insn;
4709 #endif
4710                     save_state(dc);
4711                     gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4712                     break;
4713                 case 0x11:      /* lduba, load unsigned byte alternate */
4714 #ifndef TARGET_SPARC64
4715                     if (IS_IMM)
4716                         goto illegal_insn;
4717                     if (!supervisor(dc))
4718                         goto priv_insn;
4719 #endif
4720                     save_state(dc);
4721                     gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4722                     break;
4723                 case 0x12:      /* lduha, load unsigned halfword alternate */
4724 #ifndef TARGET_SPARC64
4725                     if (IS_IMM)
4726                         goto illegal_insn;
4727                     if (!supervisor(dc))
4728                         goto priv_insn;
4729 #endif
4730                     save_state(dc);
4731                     gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4732                     break;
4733                 case 0x13:      /* ldda, load double word alternate */
4734 #ifndef TARGET_SPARC64
4735                     if (IS_IMM)
4736                         goto illegal_insn;
4737                     if (!supervisor(dc))
4738                         goto priv_insn;
4739 #endif
4740                     if (rd & 1)
4741                         goto illegal_insn;
4742                     save_state(dc);
4743                     gen_ldda_asi(dc, cpu_val, cpu_addr, insn, rd);
4744                     goto skip_move;
4745                 case 0x19:      /* ldsba, load signed byte alternate */
4746 #ifndef TARGET_SPARC64
4747                     if (IS_IMM)
4748                         goto illegal_insn;
4749                     if (!supervisor(dc))
4750                         goto priv_insn;
4751 #endif
4752                     save_state(dc);
4753                     gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4754                     break;
4755                 case 0x1a:      /* ldsha, load signed halfword alternate */
4756 #ifndef TARGET_SPARC64
4757                     if (IS_IMM)
4758                         goto illegal_insn;
4759                     if (!supervisor(dc))
4760                         goto priv_insn;
4761 #endif
4762                     save_state(dc);
4763                     gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4764                     break;
4765                 case 0x1d:      /* ldstuba -- XXX: should be atomically */
4766 #ifndef TARGET_SPARC64
4767                     if (IS_IMM)
4768                         goto illegal_insn;
4769                     if (!supervisor(dc))
4770                         goto priv_insn;
4771 #endif
4772                     save_state(dc);
4773                     gen_ldstub_asi(cpu_val, cpu_addr, insn);
4774                     break;
4775                 case 0x1f:      /* swapa, swap reg with alt. memory. Also
4776                                    atomically */
4777                     CHECK_IU_FEATURE(dc, SWAP);
4778 #ifndef TARGET_SPARC64
4779                     if (IS_IMM)
4780                         goto illegal_insn;
4781                     if (!supervisor(dc))
4782                         goto priv_insn;
4783 #endif
4784                     save_state(dc);
4785                     cpu_src1 = gen_load_gpr(dc, rd);
4786                     gen_swap_asi(cpu_val, cpu_src1, cpu_addr, insn);
4787                     break;
4788
4789 #ifndef TARGET_SPARC64
4790                 case 0x30: /* ldc */
4791                 case 0x31: /* ldcsr */
4792                 case 0x33: /* lddc */
4793                     goto ncp_insn;
4794 #endif
4795 #endif
4796 #ifdef TARGET_SPARC64
4797                 case 0x08: /* V9 ldsw */
4798                     gen_address_mask(dc, cpu_addr);
4799                     tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4800                     break;
4801                 case 0x0b: /* V9 ldx */
4802                     gen_address_mask(dc, cpu_addr);
4803                     tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4804                     break;
4805                 case 0x18: /* V9 ldswa */
4806                     save_state(dc);
4807                     gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4808                     break;
4809                 case 0x1b: /* V9 ldxa */
4810                     save_state(dc);
4811                     gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4812                     break;
4813                 case 0x2d: /* V9 prefetch, no effect */
4814                     goto skip_move;
4815                 case 0x30: /* V9 ldfa */
4816                     if (gen_trap_ifnofpu(dc)) {
4817                         goto jmp_insn;
4818                     }
4819                     save_state(dc);
4820                     gen_ldf_asi(cpu_addr, insn, 4, rd);
4821                     gen_update_fprs_dirty(rd);
4822                     goto skip_move;
4823                 case 0x33: /* V9 lddfa */
4824                     if (gen_trap_ifnofpu(dc)) {
4825                         goto jmp_insn;
4826                     }
4827                     save_state(dc);
4828                     gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4829                     gen_update_fprs_dirty(DFPREG(rd));
4830                     goto skip_move;
4831                 case 0x3d: /* V9 prefetcha, no effect */
4832                     goto skip_move;
4833                 case 0x32: /* V9 ldqfa */
4834                     CHECK_FPU_FEATURE(dc, FLOAT128);
4835                     if (gen_trap_ifnofpu(dc)) {
4836                         goto jmp_insn;
4837                     }
4838                     save_state(dc);
4839                     gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4840                     gen_update_fprs_dirty(QFPREG(rd));
4841                     goto skip_move;
4842 #endif
4843                 default:
4844                     goto illegal_insn;
4845                 }
4846                 gen_store_gpr(dc, rd, cpu_val);
4847 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4848             skip_move: ;
4849 #endif
4850             } else if (xop >= 0x20 && xop < 0x24) {
4851                 TCGv t0;
4852
4853                 if (gen_trap_ifnofpu(dc)) {
4854                     goto jmp_insn;
4855                 }
4856                 save_state(dc);
4857                 switch (xop) {
4858                 case 0x20:      /* ldf, load fpreg */
4859                     gen_address_mask(dc, cpu_addr);
4860                     t0 = get_temp_tl(dc);
4861                     tcg_gen_qemu_ld32u(t0, cpu_addr, dc->mem_idx);
4862                     cpu_dst_32 = gen_dest_fpr_F(dc);
4863                     tcg_gen_trunc_tl_i32(cpu_dst_32, t0);
4864                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4865                     break;
4866                 case 0x21:      /* ldfsr, V9 ldxfsr */
4867 #ifdef TARGET_SPARC64
4868                     gen_address_mask(dc, cpu_addr);
4869                     if (rd == 1) {
4870                         TCGv_i64 t64 = tcg_temp_new_i64();
4871                         tcg_gen_qemu_ld64(t64, cpu_addr, dc->mem_idx);
4872                         gen_helper_ldxfsr(cpu_env, t64);
4873                         tcg_temp_free_i64(t64);
4874                         break;
4875                     }
4876 #endif
4877                     cpu_dst_32 = get_temp_i32(dc);
4878                     t0 = get_temp_tl(dc);
4879                     tcg_gen_qemu_ld32u(t0, cpu_addr, dc->mem_idx);
4880                     tcg_gen_trunc_tl_i32(cpu_dst_32, t0);
4881                     gen_helper_ldfsr(cpu_env, cpu_dst_32);
4882                     break;
4883                 case 0x22:      /* ldqf, load quad fpreg */
4884                     {
4885                         TCGv_i32 r_const;
4886
4887                         CHECK_FPU_FEATURE(dc, FLOAT128);
4888                         r_const = tcg_const_i32(dc->mem_idx);
4889                         gen_address_mask(dc, cpu_addr);
4890                         gen_helper_ldqf(cpu_env, cpu_addr, r_const);
4891                         tcg_temp_free_i32(r_const);
4892                         gen_op_store_QT0_fpr(QFPREG(rd));
4893                         gen_update_fprs_dirty(QFPREG(rd));
4894                     }
4895                     break;
4896                 case 0x23:      /* lddf, load double fpreg */
4897                     gen_address_mask(dc, cpu_addr);
4898                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4899                     tcg_gen_qemu_ld64(cpu_dst_64, cpu_addr, dc->mem_idx);
4900                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4901                     break;
4902                 default:
4903                     goto illegal_insn;
4904                 }
4905             } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4906                        xop == 0xe || xop == 0x1e) {
4907                 TCGv cpu_val = gen_load_gpr(dc, rd);
4908
4909                 switch (xop) {
4910                 case 0x4: /* st, store word */
4911                     gen_address_mask(dc, cpu_addr);
4912                     tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4913                     break;
4914                 case 0x5: /* stb, store byte */
4915                     gen_address_mask(dc, cpu_addr);
4916                     tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4917                     break;
4918                 case 0x6: /* sth, store halfword */
4919                     gen_address_mask(dc, cpu_addr);
4920                     tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4921                     break;
4922                 case 0x7: /* std, store double word */
4923                     if (rd & 1)
4924                         goto illegal_insn;
4925                     else {
4926                         TCGv_i32 r_const;
4927                         TCGv_i64 t64;
4928                         TCGv lo;
4929
4930                         save_state(dc);
4931                         gen_address_mask(dc, cpu_addr);
4932                         r_const = tcg_const_i32(7);
4933                         /* XXX remove alignment check */
4934                         gen_helper_check_align(cpu_env, cpu_addr, r_const);
4935                         tcg_temp_free_i32(r_const);
4936                         lo = gen_load_gpr(dc, rd + 1);
4937
4938                         t64 = tcg_temp_new_i64();
4939                         tcg_gen_concat_tl_i64(t64, lo, cpu_val);
4940                         tcg_gen_qemu_st64(t64, cpu_addr, dc->mem_idx);
4941                         tcg_temp_free_i64(t64);
4942                     }
4943                     break;
4944 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4945                 case 0x14: /* sta, V9 stwa, store word alternate */
4946 #ifndef TARGET_SPARC64
4947                     if (IS_IMM)
4948                         goto illegal_insn;
4949                     if (!supervisor(dc))
4950                         goto priv_insn;
4951 #endif
4952                     save_state(dc);
4953                     gen_st_asi(cpu_val, cpu_addr, insn, 4);
4954                     dc->npc = DYNAMIC_PC;
4955                     break;
4956                 case 0x15: /* stba, store byte alternate */
4957 #ifndef TARGET_SPARC64
4958                     if (IS_IMM)
4959                         goto illegal_insn;
4960                     if (!supervisor(dc))
4961                         goto priv_insn;
4962 #endif
4963                     save_state(dc);
4964                     gen_st_asi(cpu_val, cpu_addr, insn, 1);
4965                     dc->npc = DYNAMIC_PC;
4966                     break;
4967                 case 0x16: /* stha, store halfword alternate */
4968 #ifndef TARGET_SPARC64
4969                     if (IS_IMM)
4970                         goto illegal_insn;
4971                     if (!supervisor(dc))
4972                         goto priv_insn;
4973 #endif
4974                     save_state(dc);
4975                     gen_st_asi(cpu_val, cpu_addr, insn, 2);
4976                     dc->npc = DYNAMIC_PC;
4977                     break;
4978                 case 0x17: /* stda, store double word alternate */
4979 #ifndef TARGET_SPARC64
4980                     if (IS_IMM)
4981                         goto illegal_insn;
4982                     if (!supervisor(dc))
4983                         goto priv_insn;
4984 #endif
4985                     if (rd & 1)
4986                         goto illegal_insn;
4987                     else {
4988                         save_state(dc);
4989                         gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
4990                     }
4991                     break;
4992 #endif
4993 #ifdef TARGET_SPARC64
4994                 case 0x0e: /* V9 stx */
4995                     gen_address_mask(dc, cpu_addr);
4996                     tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4997                     break;
4998                 case 0x1e: /* V9 stxa */
4999                     save_state(dc);
5000                     gen_st_asi(cpu_val, cpu_addr, insn, 8);
5001                     dc->npc = DYNAMIC_PC;
5002                     break;
5003 #endif
5004                 default:
5005                     goto illegal_insn;
5006                 }
5007             } else if (xop > 0x23 && xop < 0x28) {
5008                 if (gen_trap_ifnofpu(dc)) {
5009                     goto jmp_insn;
5010                 }
5011                 save_state(dc);
5012                 switch (xop) {
5013                 case 0x24: /* stf, store fpreg */
5014                     {
5015                         TCGv t = get_temp_tl(dc);
5016                         gen_address_mask(dc, cpu_addr);
5017                         cpu_src1_32 = gen_load_fpr_F(dc, rd);
5018                         tcg_gen_ext_i32_tl(t, cpu_src1_32);
5019                         tcg_gen_qemu_st32(t, cpu_addr, dc->mem_idx);
5020                     }
5021                     break;
5022                 case 0x25: /* stfsr, V9 stxfsr */
5023                     {
5024                         TCGv t = get_temp_tl(dc);
5025
5026                         tcg_gen_ld_tl(t, cpu_env, offsetof(CPUSPARCState, fsr));
5027 #ifdef TARGET_SPARC64
5028                         gen_address_mask(dc, cpu_addr);
5029                         if (rd == 1) {
5030                             tcg_gen_qemu_st64(t, cpu_addr, dc->mem_idx);
5031                             break;
5032                         }
5033 #endif
5034                         tcg_gen_qemu_st32(t, cpu_addr, dc->mem_idx);
5035                     }
5036                     break;
5037                 case 0x26:
5038 #ifdef TARGET_SPARC64
5039                     /* V9 stqf, store quad fpreg */
5040                     {
5041                         TCGv_i32 r_const;
5042
5043                         CHECK_FPU_FEATURE(dc, FLOAT128);
5044                         gen_op_load_fpr_QT0(QFPREG(rd));
5045                         r_const = tcg_const_i32(dc->mem_idx);
5046                         gen_address_mask(dc, cpu_addr);
5047                         gen_helper_stqf(cpu_env, cpu_addr, r_const);
5048                         tcg_temp_free_i32(r_const);
5049                     }
5050                     break;
5051 #else /* !TARGET_SPARC64 */
5052                     /* stdfq, store floating point queue */
5053 #if defined(CONFIG_USER_ONLY)
5054                     goto illegal_insn;
5055 #else
5056                     if (!supervisor(dc))
5057                         goto priv_insn;
5058                     if (gen_trap_ifnofpu(dc)) {
5059                         goto jmp_insn;
5060                     }
5061                     goto nfq_insn;
5062 #endif
5063 #endif
5064                 case 0x27: /* stdf, store double fpreg */
5065                     gen_address_mask(dc, cpu_addr);
5066                     cpu_src1_64 = gen_load_fpr_D(dc, rd);
5067                     tcg_gen_qemu_st64(cpu_src1_64, cpu_addr, dc->mem_idx);
5068                     break;
5069                 default:
5070                     goto illegal_insn;
5071                 }
5072             } else if (xop > 0x33 && xop < 0x3f) {
5073                 save_state(dc);
5074                 switch (xop) {
5075 #ifdef TARGET_SPARC64
5076                 case 0x34: /* V9 stfa */
5077                     if (gen_trap_ifnofpu(dc)) {
5078                         goto jmp_insn;
5079                     }
5080                     gen_stf_asi(cpu_addr, insn, 4, rd);
5081                     break;
5082                 case 0x36: /* V9 stqfa */
5083                     {
5084                         TCGv_i32 r_const;
5085
5086                         CHECK_FPU_FEATURE(dc, FLOAT128);
5087                         if (gen_trap_ifnofpu(dc)) {
5088                             goto jmp_insn;
5089                         }
5090                         r_const = tcg_const_i32(7);
5091                         gen_helper_check_align(cpu_env, cpu_addr, r_const);
5092                         tcg_temp_free_i32(r_const);
5093                         gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
5094                     }
5095                     break;
5096                 case 0x37: /* V9 stdfa */
5097                     if (gen_trap_ifnofpu(dc)) {
5098                         goto jmp_insn;
5099                     }
5100                     gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
5101                     break;
5102                 case 0x3e: /* V9 casxa */
5103                     rs2 = GET_FIELD(insn, 27, 31);
5104                     cpu_src2 = gen_load_gpr(dc, rs2);
5105                     gen_casx_asi(dc, cpu_addr, cpu_src2, insn, rd);
5106                     break;
5107 #else
5108                 case 0x34: /* stc */
5109                 case 0x35: /* stcsr */
5110                 case 0x36: /* stdcq */
5111                 case 0x37: /* stdc */
5112                     goto ncp_insn;
5113 #endif
5114 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5115                 case 0x3c: /* V9 or LEON3 casa */
5116 #ifndef TARGET_SPARC64
5117                     CHECK_IU_FEATURE(dc, CASA);
5118                     if (IS_IMM) {
5119                         goto illegal_insn;
5120                     }
5121                     /* LEON3 allows CASA from user space with ASI 0xa */
5122                     if ((GET_FIELD(insn, 19, 26) != 0xa) && !supervisor(dc)) {
5123                         goto priv_insn;
5124                     }
5125 #endif
5126                     rs2 = GET_FIELD(insn, 27, 31);
5127                     cpu_src2 = gen_load_gpr(dc, rs2);
5128                     gen_cas_asi(dc, cpu_addr, cpu_src2, insn, rd);
5129                     break;
5130 #endif
5131                 default:
5132                     goto illegal_insn;
5133                 }
5134             } else {
5135                 goto illegal_insn;
5136             }
5137         }
5138         break;
5139     }
5140     /* default case for non jump instructions */
5141     if (dc->npc == DYNAMIC_PC) {
5142         dc->pc = DYNAMIC_PC;
5143         gen_op_next_insn();
5144     } else if (dc->npc == JUMP_PC) {
5145         /* we can do a static jump */
5146         gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5147         dc->is_br = 1;
5148     } else {
5149         dc->pc = dc->npc;
5150         dc->npc = dc->npc + 4;
5151     }
5152  jmp_insn:
5153     goto egress;
5154  illegal_insn:
5155     {
5156         TCGv_i32 r_const;
5157
5158         save_state(dc);
5159         r_const = tcg_const_i32(TT_ILL_INSN);
5160         gen_helper_raise_exception(cpu_env, r_const);
5161         tcg_temp_free_i32(r_const);
5162         dc->is_br = 1;
5163     }
5164     goto egress;
5165  unimp_flush:
5166     {
5167         TCGv_i32 r_const;
5168
5169         save_state(dc);
5170         r_const = tcg_const_i32(TT_UNIMP_FLUSH);
5171         gen_helper_raise_exception(cpu_env, r_const);
5172         tcg_temp_free_i32(r_const);
5173         dc->is_br = 1;
5174     }
5175     goto egress;
5176 #if !defined(CONFIG_USER_ONLY)
5177  priv_insn:
5178     {
5179         TCGv_i32 r_const;
5180
5181         save_state(dc);
5182         r_const = tcg_const_i32(TT_PRIV_INSN);
5183         gen_helper_raise_exception(cpu_env, r_const);
5184         tcg_temp_free_i32(r_const);
5185         dc->is_br = 1;
5186     }
5187     goto egress;
5188 #endif
5189  nfpu_insn:
5190     save_state(dc);
5191     gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
5192     dc->is_br = 1;
5193     goto egress;
5194 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5195  nfq_insn:
5196     save_state(dc);
5197     gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
5198     dc->is_br = 1;
5199     goto egress;
5200 #endif
5201 #ifndef TARGET_SPARC64
5202  ncp_insn:
5203     {
5204         TCGv r_const;
5205
5206         save_state(dc);
5207         r_const = tcg_const_i32(TT_NCP_INSN);
5208         gen_helper_raise_exception(cpu_env, r_const);
5209         tcg_temp_free(r_const);
5210         dc->is_br = 1;
5211     }
5212     goto egress;
5213 #endif
5214  egress:
5215     if (dc->n_t32 != 0) {
5216         int i;
5217         for (i = dc->n_t32 - 1; i >= 0; --i) {
5218             tcg_temp_free_i32(dc->t32[i]);
5219         }
5220         dc->n_t32 = 0;
5221     }
5222     if (dc->n_ttl != 0) {
5223         int i;
5224         for (i = dc->n_ttl - 1; i >= 0; --i) {
5225             tcg_temp_free(dc->ttl[i]);
5226         }
5227         dc->n_ttl = 0;
5228     }
5229 }
5230
5231 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5232 {
5233     SPARCCPU *cpu = sparc_env_get_cpu(env);
5234     CPUState *cs = CPU(cpu);
5235     target_ulong pc_start, last_pc;
5236     DisasContext dc1, *dc = &dc1;
5237     int num_insns;
5238     int max_insns;
5239     unsigned int insn;
5240
5241     memset(dc, 0, sizeof(DisasContext));
5242     dc->tb = tb;
5243     pc_start = tb->pc;
5244     dc->pc = pc_start;
5245     last_pc = dc->pc;
5246     dc->npc = (target_ulong) tb->cs_base;
5247     dc->cc_op = CC_OP_DYNAMIC;
5248     dc->mem_idx = cpu_mmu_index(env, false);
5249     dc->def = env->def;
5250     dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5251     dc->address_mask_32bit = tb_am_enabled(tb->flags);
5252     dc->singlestep = (cs->singlestep_enabled || singlestep);
5253
5254     num_insns = 0;
5255     max_insns = tb->cflags & CF_COUNT_MASK;
5256     if (max_insns == 0) {
5257         max_insns = CF_COUNT_MASK;
5258     }
5259     if (max_insns > TCG_MAX_INSNS) {
5260         max_insns = TCG_MAX_INSNS;
5261     }
5262
5263     gen_tb_start(tb);
5264     do {
5265         if (dc->npc & JUMP_PC) {
5266             assert(dc->jump_pc[1] == dc->pc + 4);
5267             tcg_gen_insn_start(dc->pc, dc->jump_pc[0] | JUMP_PC);
5268         } else {
5269             tcg_gen_insn_start(dc->pc, dc->npc);
5270         }
5271         num_insns++;
5272         last_pc = dc->pc;
5273
5274         if (unlikely(cpu_breakpoint_test(cs, dc->pc, BP_ANY))) {
5275             if (dc->pc != pc_start) {
5276                 save_state(dc);
5277             }
5278             gen_helper_debug(cpu_env);
5279             tcg_gen_exit_tb(0);
5280             dc->is_br = 1;
5281             goto exit_gen_loop;
5282         }
5283
5284         if (num_insns == max_insns && (tb->cflags & CF_LAST_IO)) {
5285             gen_io_start();
5286         }
5287
5288         insn = cpu_ldl_code(env, dc->pc);
5289
5290         disas_sparc_insn(dc, insn);
5291
5292         if (dc->is_br)
5293             break;
5294         /* if the next PC is different, we abort now */
5295         if (dc->pc != (last_pc + 4))
5296             break;
5297         /* if we reach a page boundary, we stop generation so that the
5298            PC of a TT_TFAULT exception is always in the right page */
5299         if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5300             break;
5301         /* if single step mode, we generate only one instruction and
5302            generate an exception */
5303         if (dc->singlestep) {
5304             break;
5305         }
5306     } while (!tcg_op_buf_full() &&
5307              (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5308              num_insns < max_insns);
5309
5310  exit_gen_loop:
5311     if (tb->cflags & CF_LAST_IO) {
5312         gen_io_end();
5313     }
5314     if (!dc->is_br) {
5315         if (dc->pc != DYNAMIC_PC &&
5316             (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5317             /* static PC and NPC: we can use direct chaining */
5318             gen_goto_tb(dc, 0, dc->pc, dc->npc);
5319         } else {
5320             if (dc->pc != DYNAMIC_PC) {
5321                 tcg_gen_movi_tl(cpu_pc, dc->pc);
5322             }
5323             save_npc(dc);
5324             tcg_gen_exit_tb(0);
5325         }
5326     }
5327     gen_tb_end(tb, num_insns);
5328
5329     tb->size = last_pc + 4 - pc_start;
5330     tb->icount = num_insns;
5331
5332 #ifdef DEBUG_DISAS
5333     if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)
5334         && qemu_log_in_addr_range(pc_start)) {
5335         qemu_log("--------------\n");
5336         qemu_log("IN: %s\n", lookup_symbol(pc_start));
5337         log_target_disas(cs, pc_start, last_pc + 4 - pc_start, 0);
5338         qemu_log("\n");
5339     }
5340 #endif
5341 }
5342
5343 void gen_intermediate_code_init(CPUSPARCState *env)
5344 {
5345     static int inited;
5346     static const char gregnames[32][4] = {
5347         "g0", "g1", "g2", "g3", "g4", "g5", "g6", "g7",
5348         "o0", "o1", "o2", "o3", "o4", "o5", "o6", "o7",
5349         "l0", "l1", "l2", "l3", "l4", "l5", "l6", "l7",
5350         "i0", "i1", "i2", "i3", "i4", "i5", "i6", "i7",
5351     };
5352     static const char fregnames[32][4] = {
5353         "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5354         "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5355         "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5356         "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5357     };
5358
5359     static const struct { TCGv_i32 *ptr; int off; const char *name; } r32[] = {
5360 #ifdef TARGET_SPARC64
5361         { &cpu_xcc, offsetof(CPUSPARCState, xcc), "xcc" },
5362         { &cpu_asi, offsetof(CPUSPARCState, asi), "asi" },
5363         { &cpu_fprs, offsetof(CPUSPARCState, fprs), "fprs" },
5364         { &cpu_softint, offsetof(CPUSPARCState, softint), "softint" },
5365 #else
5366         { &cpu_wim, offsetof(CPUSPARCState, wim), "wim" },
5367 #endif
5368         { &cpu_cc_op, offsetof(CPUSPARCState, cc_op), "cc_op" },
5369         { &cpu_psr, offsetof(CPUSPARCState, psr), "psr" },
5370     };
5371
5372     static const struct { TCGv *ptr; int off; const char *name; } rtl[] = {
5373 #ifdef TARGET_SPARC64
5374         { &cpu_gsr, offsetof(CPUSPARCState, gsr), "gsr" },
5375         { &cpu_tick_cmpr, offsetof(CPUSPARCState, tick_cmpr), "tick_cmpr" },
5376         { &cpu_stick_cmpr, offsetof(CPUSPARCState, stick_cmpr), "stick_cmpr" },
5377         { &cpu_hstick_cmpr, offsetof(CPUSPARCState, hstick_cmpr),
5378           "hstick_cmpr" },
5379         { &cpu_hintp, offsetof(CPUSPARCState, hintp), "hintp" },
5380         { &cpu_htba, offsetof(CPUSPARCState, htba), "htba" },
5381         { &cpu_hver, offsetof(CPUSPARCState, hver), "hver" },
5382         { &cpu_ssr, offsetof(CPUSPARCState, ssr), "ssr" },
5383         { &cpu_ver, offsetof(CPUSPARCState, version), "ver" },
5384 #endif
5385         { &cpu_cond, offsetof(CPUSPARCState, cond), "cond" },
5386         { &cpu_cc_src, offsetof(CPUSPARCState, cc_src), "cc_src" },
5387         { &cpu_cc_src2, offsetof(CPUSPARCState, cc_src2), "cc_src2" },
5388         { &cpu_cc_dst, offsetof(CPUSPARCState, cc_dst), "cc_dst" },
5389         { &cpu_fsr, offsetof(CPUSPARCState, fsr), "fsr" },
5390         { &cpu_pc, offsetof(CPUSPARCState, pc), "pc" },
5391         { &cpu_npc, offsetof(CPUSPARCState, npc), "npc" },
5392         { &cpu_y, offsetof(CPUSPARCState, y), "y" },
5393 #ifndef CONFIG_USER_ONLY
5394         { &cpu_tbr, offsetof(CPUSPARCState, tbr), "tbr" },
5395 #endif
5396     };
5397
5398     unsigned int i;
5399
5400     /* init various static tables */
5401     if (inited) {
5402         return;
5403     }
5404     inited = 1;
5405
5406     cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5407
5408     cpu_regwptr = tcg_global_mem_new_ptr(cpu_env,
5409                                          offsetof(CPUSPARCState, regwptr),
5410                                          "regwptr");
5411
5412     for (i = 0; i < ARRAY_SIZE(r32); ++i) {
5413         *r32[i].ptr = tcg_global_mem_new_i32(cpu_env, r32[i].off, r32[i].name);
5414     }
5415
5416     for (i = 0; i < ARRAY_SIZE(rtl); ++i) {
5417         *rtl[i].ptr = tcg_global_mem_new(cpu_env, rtl[i].off, rtl[i].name);
5418     }
5419
5420     TCGV_UNUSED(cpu_regs[0]);
5421     for (i = 1; i < 8; ++i) {
5422         cpu_regs[i] = tcg_global_mem_new(cpu_env,
5423                                          offsetof(CPUSPARCState, gregs[i]),
5424                                          gregnames[i]);
5425     }
5426
5427     for (i = 8; i < 32; ++i) {
5428         cpu_regs[i] = tcg_global_mem_new(cpu_regwptr,
5429                                          (i - 8) * sizeof(target_ulong),
5430                                          gregnames[i]);
5431     }
5432
5433     for (i = 0; i < TARGET_DPREGS; i++) {
5434         cpu_fpr[i] = tcg_global_mem_new_i64(cpu_env,
5435                                             offsetof(CPUSPARCState, fpr[i]),
5436                                             fregnames[i]);
5437     }
5438 }
5439
5440 void restore_state_to_opc(CPUSPARCState *env, TranslationBlock *tb,
5441                           target_ulong *data)
5442 {
5443     target_ulong pc = data[0];
5444     target_ulong npc = data[1];
5445
5446     env->pc = pc;
5447     if (npc == DYNAMIC_PC) {
5448         /* dynamic NPC: already stored */
5449     } else if (npc & JUMP_PC) {
5450         /* jump PC: use 'cond' and the jump targets of the translation */
5451         if (env->cond) {
5452             env->npc = npc & ~3;
5453         } else {
5454             env->npc = pc + 4;
5455         }
5456     } else {
5457         env->npc = npc;
5458     }
5459 }
This page took 0.353305 seconds and 4 git commands to generate.