]> Git Repo - qemu.git/blob - target-sparc/translate.c
Merge remote-tracking branch 'stefanha/tracing' into staging
[qemu.git] / target-sparc / translate.c
1 /*
2    SPARC translation
3
4    Copyright (C) 2003 Thomas M. Ogrisegg <[email protected]>
5    Copyright (C) 2003-2005 Fabrice Bellard
6
7    This library is free software; you can redistribute it and/or
8    modify it under the terms of the GNU Lesser General Public
9    License as published by the Free Software Foundation; either
10    version 2 of the License, or (at your option) any later version.
11
12    This library is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15    Lesser General Public License for more details.
16
17    You should have received a copy of the GNU Lesser General Public
18    License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20
21 #include <stdarg.h>
22 #include <stdlib.h>
23 #include <stdio.h>
24 #include <string.h>
25 #include <inttypes.h>
26
27 #include "cpu.h"
28 #include "disas.h"
29 #include "helper.h"
30 #include "tcg-op.h"
31
32 #define GEN_HELPER 1
33 #include "helper.h"
34
35 #define DEBUG_DISAS
36
37 #define DYNAMIC_PC  1 /* dynamic pc value */
38 #define JUMP_PC     2 /* dynamic pc value which takes only two values
39                          according to jump_pc[T2] */
40
41 /* global register indexes */
42 static TCGv_ptr cpu_env, cpu_regwptr;
43 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
44 static TCGv_i32 cpu_cc_op;
45 static TCGv_i32 cpu_psr;
46 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
47 static TCGv cpu_y;
48 #ifndef CONFIG_USER_ONLY
49 static TCGv cpu_tbr;
50 #endif
51 static TCGv cpu_cond;
52 #ifdef TARGET_SPARC64
53 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
54 static TCGv cpu_gsr;
55 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
56 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
57 static TCGv_i32 cpu_softint;
58 #else
59 static TCGv cpu_wim;
60 #endif
61 /* Floating point registers */
62 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
63
64 static target_ulong gen_opc_npc[OPC_BUF_SIZE];
65 static target_ulong gen_opc_jump_pc[2];
66
67 #include "gen-icount.h"
68
69 typedef struct DisasContext {
70     target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
71     target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
72     target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
73     int is_br;
74     int mem_idx;
75     int fpu_enabled;
76     int address_mask_32bit;
77     int singlestep;
78     uint32_t cc_op;  /* current CC operation */
79     struct TranslationBlock *tb;
80     sparc_def_t *def;
81     TCGv_i32 t32[3];
82     TCGv ttl[5];
83     int n_t32;
84     int n_ttl;
85 } DisasContext;
86
87 typedef struct {
88     TCGCond cond;
89     bool is_bool;
90     bool g1, g2;
91     TCGv c1, c2;
92 } DisasCompare;
93
94 // This function uses non-native bit order
95 #define GET_FIELD(X, FROM, TO)                                  \
96     ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
97
98 // This function uses the order in the manuals, i.e. bit 0 is 2^0
99 #define GET_FIELD_SP(X, FROM, TO)               \
100     GET_FIELD(X, 31 - (TO), 31 - (FROM))
101
102 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
103 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
104
105 #ifdef TARGET_SPARC64
106 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
107 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
108 #else
109 #define DFPREG(r) (r & 0x1e)
110 #define QFPREG(r) (r & 0x1c)
111 #endif
112
113 #define UA2005_HTRAP_MASK 0xff
114 #define V8_TRAP_MASK 0x7f
115
116 static int sign_extend(int x, int len)
117 {
118     len = 32 - len;
119     return (x << len) >> len;
120 }
121
122 #define IS_IMM (insn & (1<<13))
123
124 static inline TCGv_i32 get_temp_i32(DisasContext *dc)
125 {
126     TCGv_i32 t;
127     assert(dc->n_t32 < ARRAY_SIZE(dc->t32));
128     dc->t32[dc->n_t32++] = t = tcg_temp_new_i32();
129     return t;
130 }
131
132 static inline TCGv get_temp_tl(DisasContext *dc)
133 {
134     TCGv t;
135     assert(dc->n_ttl < ARRAY_SIZE(dc->ttl));
136     dc->ttl[dc->n_ttl++] = t = tcg_temp_new();
137     return t;
138 }
139
140 static inline void gen_update_fprs_dirty(int rd)
141 {
142 #if defined(TARGET_SPARC64)
143     tcg_gen_ori_i32(cpu_fprs, cpu_fprs, (rd < 32) ? 1 : 2);
144 #endif
145 }
146
147 /* floating point registers moves */
148 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
149 {
150 #if TCG_TARGET_REG_BITS == 32
151     if (src & 1) {
152         return TCGV_LOW(cpu_fpr[src / 2]);
153     } else {
154         return TCGV_HIGH(cpu_fpr[src / 2]);
155     }
156 #else
157     if (src & 1) {
158         return MAKE_TCGV_I32(GET_TCGV_I64(cpu_fpr[src / 2]));
159     } else {
160         TCGv_i32 ret = get_temp_i32(dc);
161         TCGv_i64 t = tcg_temp_new_i64();
162
163         tcg_gen_shri_i64(t, cpu_fpr[src / 2], 32);
164         tcg_gen_trunc_i64_i32(ret, t);
165         tcg_temp_free_i64(t);
166
167         return ret;
168     }
169 #endif
170 }
171
172 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
173 {
174 #if TCG_TARGET_REG_BITS == 32
175     if (dst & 1) {
176         tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
177     } else {
178         tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
179     }
180 #else
181     TCGv_i64 t = MAKE_TCGV_I64(GET_TCGV_I32(v));
182     tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
183                         (dst & 1 ? 0 : 32), 32);
184 #endif
185     gen_update_fprs_dirty(dst);
186 }
187
188 static TCGv_i32 gen_dest_fpr_F(DisasContext *dc)
189 {
190     return get_temp_i32(dc);
191 }
192
193 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
194 {
195     src = DFPREG(src);
196     return cpu_fpr[src / 2];
197 }
198
199 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
200 {
201     dst = DFPREG(dst);
202     tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
203     gen_update_fprs_dirty(dst);
204 }
205
206 static TCGv_i64 gen_dest_fpr_D(DisasContext *dc, unsigned int dst)
207 {
208     return cpu_fpr[DFPREG(dst) / 2];
209 }
210
211 static void gen_op_load_fpr_QT0(unsigned int src)
212 {
213     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
214                    offsetof(CPU_QuadU, ll.upper));
215     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
216                    offsetof(CPU_QuadU, ll.lower));
217 }
218
219 static void gen_op_load_fpr_QT1(unsigned int src)
220 {
221     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
222                    offsetof(CPU_QuadU, ll.upper));
223     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
224                    offsetof(CPU_QuadU, ll.lower));
225 }
226
227 static void gen_op_store_QT0_fpr(unsigned int dst)
228 {
229     tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
230                    offsetof(CPU_QuadU, ll.upper));
231     tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
232                    offsetof(CPU_QuadU, ll.lower));
233 }
234
235 #ifdef TARGET_SPARC64
236 static void gen_move_Q(unsigned int rd, unsigned int rs)
237 {
238     rd = QFPREG(rd);
239     rs = QFPREG(rs);
240
241     tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
242     tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
243     gen_update_fprs_dirty(rd);
244 }
245 #endif
246
247 /* moves */
248 #ifdef CONFIG_USER_ONLY
249 #define supervisor(dc) 0
250 #ifdef TARGET_SPARC64
251 #define hypervisor(dc) 0
252 #endif
253 #else
254 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
255 #ifdef TARGET_SPARC64
256 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
257 #else
258 #endif
259 #endif
260
261 #ifdef TARGET_SPARC64
262 #ifndef TARGET_ABI32
263 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
264 #else
265 #define AM_CHECK(dc) (1)
266 #endif
267 #endif
268
269 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
270 {
271 #ifdef TARGET_SPARC64
272     if (AM_CHECK(dc))
273         tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
274 #endif
275 }
276
277 static inline TCGv gen_load_gpr(DisasContext *dc, int reg)
278 {
279     if (reg == 0 || reg >= 8) {
280         TCGv t = get_temp_tl(dc);
281         if (reg == 0) {
282             tcg_gen_movi_tl(t, 0);
283         } else {
284             tcg_gen_ld_tl(t, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
285         }
286         return t;
287     } else {
288         return cpu_gregs[reg];
289     }
290 }
291
292 static inline void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
293 {
294     if (reg > 0) {
295         if (reg < 8) {
296             tcg_gen_mov_tl(cpu_gregs[reg], v);
297         } else {
298             tcg_gen_st_tl(v, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
299         }
300     }
301 }
302
303 static inline TCGv gen_dest_gpr(DisasContext *dc, int reg)
304 {
305     if (reg == 0 || reg >= 8) {
306         return get_temp_tl(dc);
307     } else {
308         return cpu_gregs[reg];
309     }
310 }
311
312 static inline void gen_goto_tb(DisasContext *s, int tb_num,
313                                target_ulong pc, target_ulong npc)
314 {
315     TranslationBlock *tb;
316
317     tb = s->tb;
318     if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
319         (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
320         !s->singlestep)  {
321         /* jump to same page: we can use a direct jump */
322         tcg_gen_goto_tb(tb_num);
323         tcg_gen_movi_tl(cpu_pc, pc);
324         tcg_gen_movi_tl(cpu_npc, npc);
325         tcg_gen_exit_tb((tcg_target_long)tb + tb_num);
326     } else {
327         /* jump to another page: currently not optimized */
328         tcg_gen_movi_tl(cpu_pc, pc);
329         tcg_gen_movi_tl(cpu_npc, npc);
330         tcg_gen_exit_tb(0);
331     }
332 }
333
334 // XXX suboptimal
335 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
336 {
337     tcg_gen_extu_i32_tl(reg, src);
338     tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
339     tcg_gen_andi_tl(reg, reg, 0x1);
340 }
341
342 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
343 {
344     tcg_gen_extu_i32_tl(reg, src);
345     tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
346     tcg_gen_andi_tl(reg, reg, 0x1);
347 }
348
349 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
350 {
351     tcg_gen_extu_i32_tl(reg, src);
352     tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
353     tcg_gen_andi_tl(reg, reg, 0x1);
354 }
355
356 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
357 {
358     tcg_gen_extu_i32_tl(reg, src);
359     tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
360     tcg_gen_andi_tl(reg, reg, 0x1);
361 }
362
363 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
364 {
365     tcg_gen_mov_tl(cpu_cc_src, src1);
366     tcg_gen_movi_tl(cpu_cc_src2, src2);
367     tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
368     tcg_gen_mov_tl(dst, cpu_cc_dst);
369 }
370
371 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
372 {
373     tcg_gen_mov_tl(cpu_cc_src, src1);
374     tcg_gen_mov_tl(cpu_cc_src2, src2);
375     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
376     tcg_gen_mov_tl(dst, cpu_cc_dst);
377 }
378
379 static TCGv_i32 gen_add32_carry32(void)
380 {
381     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
382
383     /* Carry is computed from a previous add: (dst < src)  */
384 #if TARGET_LONG_BITS == 64
385     cc_src1_32 = tcg_temp_new_i32();
386     cc_src2_32 = tcg_temp_new_i32();
387     tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_dst);
388     tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src);
389 #else
390     cc_src1_32 = cpu_cc_dst;
391     cc_src2_32 = cpu_cc_src;
392 #endif
393
394     carry_32 = tcg_temp_new_i32();
395     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
396
397 #if TARGET_LONG_BITS == 64
398     tcg_temp_free_i32(cc_src1_32);
399     tcg_temp_free_i32(cc_src2_32);
400 #endif
401
402     return carry_32;
403 }
404
405 static TCGv_i32 gen_sub32_carry32(void)
406 {
407     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
408
409     /* Carry is computed from a previous borrow: (src1 < src2)  */
410 #if TARGET_LONG_BITS == 64
411     cc_src1_32 = tcg_temp_new_i32();
412     cc_src2_32 = tcg_temp_new_i32();
413     tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_src);
414     tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src2);
415 #else
416     cc_src1_32 = cpu_cc_src;
417     cc_src2_32 = cpu_cc_src2;
418 #endif
419
420     carry_32 = tcg_temp_new_i32();
421     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
422
423 #if TARGET_LONG_BITS == 64
424     tcg_temp_free_i32(cc_src1_32);
425     tcg_temp_free_i32(cc_src2_32);
426 #endif
427
428     return carry_32;
429 }
430
431 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
432                             TCGv src2, int update_cc)
433 {
434     TCGv_i32 carry_32;
435     TCGv carry;
436
437     switch (dc->cc_op) {
438     case CC_OP_DIV:
439     case CC_OP_LOGIC:
440         /* Carry is known to be zero.  Fall back to plain ADD.  */
441         if (update_cc) {
442             gen_op_add_cc(dst, src1, src2);
443         } else {
444             tcg_gen_add_tl(dst, src1, src2);
445         }
446         return;
447
448     case CC_OP_ADD:
449     case CC_OP_TADD:
450     case CC_OP_TADDTV:
451 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
452         {
453             /* For 32-bit hosts, we can re-use the host's hardware carry
454                generation by using an ADD2 opcode.  We discard the low
455                part of the output.  Ideally we'd combine this operation
456                with the add that generated the carry in the first place.  */
457             TCGv dst_low = tcg_temp_new();
458             tcg_gen_op6_i32(INDEX_op_add2_i32, dst_low, dst,
459                             cpu_cc_src, src1, cpu_cc_src2, src2);
460             tcg_temp_free(dst_low);
461             goto add_done;
462         }
463 #endif
464         carry_32 = gen_add32_carry32();
465         break;
466
467     case CC_OP_SUB:
468     case CC_OP_TSUB:
469     case CC_OP_TSUBTV:
470         carry_32 = gen_sub32_carry32();
471         break;
472
473     default:
474         /* We need external help to produce the carry.  */
475         carry_32 = tcg_temp_new_i32();
476         gen_helper_compute_C_icc(carry_32, cpu_env);
477         break;
478     }
479
480 #if TARGET_LONG_BITS == 64
481     carry = tcg_temp_new();
482     tcg_gen_extu_i32_i64(carry, carry_32);
483 #else
484     carry = carry_32;
485 #endif
486
487     tcg_gen_add_tl(dst, src1, src2);
488     tcg_gen_add_tl(dst, dst, carry);
489
490     tcg_temp_free_i32(carry_32);
491 #if TARGET_LONG_BITS == 64
492     tcg_temp_free(carry);
493 #endif
494
495 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
496  add_done:
497 #endif
498     if (update_cc) {
499         tcg_gen_mov_tl(cpu_cc_src, src1);
500         tcg_gen_mov_tl(cpu_cc_src2, src2);
501         tcg_gen_mov_tl(cpu_cc_dst, dst);
502         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
503         dc->cc_op = CC_OP_ADDX;
504     }
505 }
506
507 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
508 {
509     tcg_gen_mov_tl(cpu_cc_src, src1);
510     tcg_gen_movi_tl(cpu_cc_src2, src2);
511     if (src2 == 0) {
512         tcg_gen_mov_tl(cpu_cc_dst, src1);
513         tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
514         dc->cc_op = CC_OP_LOGIC;
515     } else {
516         tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
517         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
518         dc->cc_op = CC_OP_SUB;
519     }
520     tcg_gen_mov_tl(dst, cpu_cc_dst);
521 }
522
523 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
524 {
525     tcg_gen_mov_tl(cpu_cc_src, src1);
526     tcg_gen_mov_tl(cpu_cc_src2, src2);
527     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
528     tcg_gen_mov_tl(dst, cpu_cc_dst);
529 }
530
531 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
532                             TCGv src2, int update_cc)
533 {
534     TCGv_i32 carry_32;
535     TCGv carry;
536
537     switch (dc->cc_op) {
538     case CC_OP_DIV:
539     case CC_OP_LOGIC:
540         /* Carry is known to be zero.  Fall back to plain SUB.  */
541         if (update_cc) {
542             gen_op_sub_cc(dst, src1, src2);
543         } else {
544             tcg_gen_sub_tl(dst, src1, src2);
545         }
546         return;
547
548     case CC_OP_ADD:
549     case CC_OP_TADD:
550     case CC_OP_TADDTV:
551         carry_32 = gen_add32_carry32();
552         break;
553
554     case CC_OP_SUB:
555     case CC_OP_TSUB:
556     case CC_OP_TSUBTV:
557 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
558         {
559             /* For 32-bit hosts, we can re-use the host's hardware carry
560                generation by using a SUB2 opcode.  We discard the low
561                part of the output.  Ideally we'd combine this operation
562                with the add that generated the carry in the first place.  */
563             TCGv dst_low = tcg_temp_new();
564             tcg_gen_op6_i32(INDEX_op_sub2_i32, dst_low, dst,
565                             cpu_cc_src, src1, cpu_cc_src2, src2);
566             tcg_temp_free(dst_low);
567             goto sub_done;
568         }
569 #endif
570         carry_32 = gen_sub32_carry32();
571         break;
572
573     default:
574         /* We need external help to produce the carry.  */
575         carry_32 = tcg_temp_new_i32();
576         gen_helper_compute_C_icc(carry_32, cpu_env);
577         break;
578     }
579
580 #if TARGET_LONG_BITS == 64
581     carry = tcg_temp_new();
582     tcg_gen_extu_i32_i64(carry, carry_32);
583 #else
584     carry = carry_32;
585 #endif
586
587     tcg_gen_sub_tl(dst, src1, src2);
588     tcg_gen_sub_tl(dst, dst, carry);
589
590     tcg_temp_free_i32(carry_32);
591 #if TARGET_LONG_BITS == 64
592     tcg_temp_free(carry);
593 #endif
594
595 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
596  sub_done:
597 #endif
598     if (update_cc) {
599         tcg_gen_mov_tl(cpu_cc_src, src1);
600         tcg_gen_mov_tl(cpu_cc_src2, src2);
601         tcg_gen_mov_tl(cpu_cc_dst, dst);
602         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
603         dc->cc_op = CC_OP_SUBX;
604     }
605 }
606
607 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
608 {
609     TCGv r_temp, zero, t0;
610
611     r_temp = tcg_temp_new();
612     t0 = tcg_temp_new();
613
614     /* old op:
615     if (!(env->y & 1))
616         T1 = 0;
617     */
618     zero = tcg_const_tl(0);
619     tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
620     tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
621     tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
622     tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
623                        zero, cpu_cc_src2);
624     tcg_temp_free(zero);
625
626     // b2 = T0 & 1;
627     // env->y = (b2 << 31) | (env->y >> 1);
628     tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
629     tcg_gen_shli_tl(r_temp, r_temp, 31);
630     tcg_gen_shri_tl(t0, cpu_y, 1);
631     tcg_gen_andi_tl(t0, t0, 0x7fffffff);
632     tcg_gen_or_tl(t0, t0, r_temp);
633     tcg_gen_andi_tl(cpu_y, t0, 0xffffffff);
634
635     // b1 = N ^ V;
636     gen_mov_reg_N(t0, cpu_psr);
637     gen_mov_reg_V(r_temp, cpu_psr);
638     tcg_gen_xor_tl(t0, t0, r_temp);
639     tcg_temp_free(r_temp);
640
641     // T0 = (b1 << 31) | (T0 >> 1);
642     // src1 = T0;
643     tcg_gen_shli_tl(t0, t0, 31);
644     tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
645     tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
646     tcg_temp_free(t0);
647
648     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
649
650     tcg_gen_mov_tl(dst, cpu_cc_dst);
651 }
652
653 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
654 {
655     TCGv_i32 r_src1, r_src2;
656     TCGv_i64 r_temp, r_temp2;
657
658     r_src1 = tcg_temp_new_i32();
659     r_src2 = tcg_temp_new_i32();
660
661     tcg_gen_trunc_tl_i32(r_src1, src1);
662     tcg_gen_trunc_tl_i32(r_src2, src2);
663
664     r_temp = tcg_temp_new_i64();
665     r_temp2 = tcg_temp_new_i64();
666
667     if (sign_ext) {
668         tcg_gen_ext_i32_i64(r_temp, r_src2);
669         tcg_gen_ext_i32_i64(r_temp2, r_src1);
670     } else {
671         tcg_gen_extu_i32_i64(r_temp, r_src2);
672         tcg_gen_extu_i32_i64(r_temp2, r_src1);
673     }
674
675     tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
676
677     tcg_gen_shri_i64(r_temp, r_temp2, 32);
678     tcg_gen_trunc_i64_tl(cpu_y, r_temp);
679     tcg_temp_free_i64(r_temp);
680     tcg_gen_andi_tl(cpu_y, cpu_y, 0xffffffff);
681
682     tcg_gen_trunc_i64_tl(dst, r_temp2);
683
684     tcg_temp_free_i64(r_temp2);
685
686     tcg_temp_free_i32(r_src1);
687     tcg_temp_free_i32(r_src2);
688 }
689
690 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
691 {
692     /* zero-extend truncated operands before multiplication */
693     gen_op_multiply(dst, src1, src2, 0);
694 }
695
696 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
697 {
698     /* sign-extend truncated operands before multiplication */
699     gen_op_multiply(dst, src1, src2, 1);
700 }
701
702 // 1
703 static inline void gen_op_eval_ba(TCGv dst)
704 {
705     tcg_gen_movi_tl(dst, 1);
706 }
707
708 // Z
709 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
710 {
711     gen_mov_reg_Z(dst, src);
712 }
713
714 // Z | (N ^ V)
715 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
716 {
717     TCGv t0 = tcg_temp_new();
718     gen_mov_reg_N(t0, src);
719     gen_mov_reg_V(dst, src);
720     tcg_gen_xor_tl(dst, dst, t0);
721     gen_mov_reg_Z(t0, src);
722     tcg_gen_or_tl(dst, dst, t0);
723     tcg_temp_free(t0);
724 }
725
726 // N ^ V
727 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
728 {
729     TCGv t0 = tcg_temp_new();
730     gen_mov_reg_V(t0, src);
731     gen_mov_reg_N(dst, src);
732     tcg_gen_xor_tl(dst, dst, t0);
733     tcg_temp_free(t0);
734 }
735
736 // C | Z
737 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
738 {
739     TCGv t0 = tcg_temp_new();
740     gen_mov_reg_Z(t0, src);
741     gen_mov_reg_C(dst, src);
742     tcg_gen_or_tl(dst, dst, t0);
743     tcg_temp_free(t0);
744 }
745
746 // C
747 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
748 {
749     gen_mov_reg_C(dst, src);
750 }
751
752 // V
753 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
754 {
755     gen_mov_reg_V(dst, src);
756 }
757
758 // 0
759 static inline void gen_op_eval_bn(TCGv dst)
760 {
761     tcg_gen_movi_tl(dst, 0);
762 }
763
764 // N
765 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
766 {
767     gen_mov_reg_N(dst, src);
768 }
769
770 // !Z
771 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
772 {
773     gen_mov_reg_Z(dst, src);
774     tcg_gen_xori_tl(dst, dst, 0x1);
775 }
776
777 // !(Z | (N ^ V))
778 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
779 {
780     gen_op_eval_ble(dst, src);
781     tcg_gen_xori_tl(dst, dst, 0x1);
782 }
783
784 // !(N ^ V)
785 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
786 {
787     gen_op_eval_bl(dst, src);
788     tcg_gen_xori_tl(dst, dst, 0x1);
789 }
790
791 // !(C | Z)
792 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
793 {
794     gen_op_eval_bleu(dst, src);
795     tcg_gen_xori_tl(dst, dst, 0x1);
796 }
797
798 // !C
799 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
800 {
801     gen_mov_reg_C(dst, src);
802     tcg_gen_xori_tl(dst, dst, 0x1);
803 }
804
805 // !N
806 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
807 {
808     gen_mov_reg_N(dst, src);
809     tcg_gen_xori_tl(dst, dst, 0x1);
810 }
811
812 // !V
813 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
814 {
815     gen_mov_reg_V(dst, src);
816     tcg_gen_xori_tl(dst, dst, 0x1);
817 }
818
819 /*
820   FPSR bit field FCC1 | FCC0:
821    0 =
822    1 <
823    2 >
824    3 unordered
825 */
826 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
827                                     unsigned int fcc_offset)
828 {
829     tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
830     tcg_gen_andi_tl(reg, reg, 0x1);
831 }
832
833 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
834                                     unsigned int fcc_offset)
835 {
836     tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
837     tcg_gen_andi_tl(reg, reg, 0x1);
838 }
839
840 // !0: FCC0 | FCC1
841 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
842                                     unsigned int fcc_offset)
843 {
844     TCGv t0 = tcg_temp_new();
845     gen_mov_reg_FCC0(dst, src, fcc_offset);
846     gen_mov_reg_FCC1(t0, src, fcc_offset);
847     tcg_gen_or_tl(dst, dst, t0);
848     tcg_temp_free(t0);
849 }
850
851 // 1 or 2: FCC0 ^ FCC1
852 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
853                                     unsigned int fcc_offset)
854 {
855     TCGv t0 = tcg_temp_new();
856     gen_mov_reg_FCC0(dst, src, fcc_offset);
857     gen_mov_reg_FCC1(t0, src, fcc_offset);
858     tcg_gen_xor_tl(dst, dst, t0);
859     tcg_temp_free(t0);
860 }
861
862 // 1 or 3: FCC0
863 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
864                                     unsigned int fcc_offset)
865 {
866     gen_mov_reg_FCC0(dst, src, fcc_offset);
867 }
868
869 // 1: FCC0 & !FCC1
870 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
871                                     unsigned int fcc_offset)
872 {
873     TCGv t0 = tcg_temp_new();
874     gen_mov_reg_FCC0(dst, src, fcc_offset);
875     gen_mov_reg_FCC1(t0, src, fcc_offset);
876     tcg_gen_andc_tl(dst, dst, t0);
877     tcg_temp_free(t0);
878 }
879
880 // 2 or 3: FCC1
881 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
882                                     unsigned int fcc_offset)
883 {
884     gen_mov_reg_FCC1(dst, src, fcc_offset);
885 }
886
887 // 2: !FCC0 & FCC1
888 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
889                                     unsigned int fcc_offset)
890 {
891     TCGv t0 = tcg_temp_new();
892     gen_mov_reg_FCC0(dst, src, fcc_offset);
893     gen_mov_reg_FCC1(t0, src, fcc_offset);
894     tcg_gen_andc_tl(dst, t0, dst);
895     tcg_temp_free(t0);
896 }
897
898 // 3: FCC0 & FCC1
899 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
900                                     unsigned int fcc_offset)
901 {
902     TCGv t0 = tcg_temp_new();
903     gen_mov_reg_FCC0(dst, src, fcc_offset);
904     gen_mov_reg_FCC1(t0, src, fcc_offset);
905     tcg_gen_and_tl(dst, dst, t0);
906     tcg_temp_free(t0);
907 }
908
909 // 0: !(FCC0 | FCC1)
910 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
911                                     unsigned int fcc_offset)
912 {
913     TCGv t0 = tcg_temp_new();
914     gen_mov_reg_FCC0(dst, src, fcc_offset);
915     gen_mov_reg_FCC1(t0, src, fcc_offset);
916     tcg_gen_or_tl(dst, dst, t0);
917     tcg_gen_xori_tl(dst, dst, 0x1);
918     tcg_temp_free(t0);
919 }
920
921 // 0 or 3: !(FCC0 ^ FCC1)
922 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
923                                     unsigned int fcc_offset)
924 {
925     TCGv t0 = tcg_temp_new();
926     gen_mov_reg_FCC0(dst, src, fcc_offset);
927     gen_mov_reg_FCC1(t0, src, fcc_offset);
928     tcg_gen_xor_tl(dst, dst, t0);
929     tcg_gen_xori_tl(dst, dst, 0x1);
930     tcg_temp_free(t0);
931 }
932
933 // 0 or 2: !FCC0
934 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
935                                     unsigned int fcc_offset)
936 {
937     gen_mov_reg_FCC0(dst, src, fcc_offset);
938     tcg_gen_xori_tl(dst, dst, 0x1);
939 }
940
941 // !1: !(FCC0 & !FCC1)
942 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
943                                     unsigned int fcc_offset)
944 {
945     TCGv t0 = tcg_temp_new();
946     gen_mov_reg_FCC0(dst, src, fcc_offset);
947     gen_mov_reg_FCC1(t0, src, fcc_offset);
948     tcg_gen_andc_tl(dst, dst, t0);
949     tcg_gen_xori_tl(dst, dst, 0x1);
950     tcg_temp_free(t0);
951 }
952
953 // 0 or 1: !FCC1
954 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
955                                     unsigned int fcc_offset)
956 {
957     gen_mov_reg_FCC1(dst, src, fcc_offset);
958     tcg_gen_xori_tl(dst, dst, 0x1);
959 }
960
961 // !2: !(!FCC0 & FCC1)
962 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
963                                     unsigned int fcc_offset)
964 {
965     TCGv t0 = tcg_temp_new();
966     gen_mov_reg_FCC0(dst, src, fcc_offset);
967     gen_mov_reg_FCC1(t0, src, fcc_offset);
968     tcg_gen_andc_tl(dst, t0, dst);
969     tcg_gen_xori_tl(dst, dst, 0x1);
970     tcg_temp_free(t0);
971 }
972
973 // !3: !(FCC0 & FCC1)
974 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
975                                     unsigned int fcc_offset)
976 {
977     TCGv t0 = tcg_temp_new();
978     gen_mov_reg_FCC0(dst, src, fcc_offset);
979     gen_mov_reg_FCC1(t0, src, fcc_offset);
980     tcg_gen_and_tl(dst, dst, t0);
981     tcg_gen_xori_tl(dst, dst, 0x1);
982     tcg_temp_free(t0);
983 }
984
985 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
986                                target_ulong pc2, TCGv r_cond)
987 {
988     int l1;
989
990     l1 = gen_new_label();
991
992     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
993
994     gen_goto_tb(dc, 0, pc1, pc1 + 4);
995
996     gen_set_label(l1);
997     gen_goto_tb(dc, 1, pc2, pc2 + 4);
998 }
999
1000 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1001                                 target_ulong pc2, TCGv r_cond)
1002 {
1003     int l1;
1004
1005     l1 = gen_new_label();
1006
1007     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1008
1009     gen_goto_tb(dc, 0, pc2, pc1);
1010
1011     gen_set_label(l1);
1012     gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1013 }
1014
1015 static inline void gen_generic_branch(DisasContext *dc)
1016 {
1017     TCGv npc0 = tcg_const_tl(dc->jump_pc[0]);
1018     TCGv npc1 = tcg_const_tl(dc->jump_pc[1]);
1019     TCGv zero = tcg_const_tl(0);
1020
1021     tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
1022
1023     tcg_temp_free(npc0);
1024     tcg_temp_free(npc1);
1025     tcg_temp_free(zero);
1026 }
1027
1028 /* call this function before using the condition register as it may
1029    have been set for a jump */
1030 static inline void flush_cond(DisasContext *dc)
1031 {
1032     if (dc->npc == JUMP_PC) {
1033         gen_generic_branch(dc);
1034         dc->npc = DYNAMIC_PC;
1035     }
1036 }
1037
1038 static inline void save_npc(DisasContext *dc)
1039 {
1040     if (dc->npc == JUMP_PC) {
1041         gen_generic_branch(dc);
1042         dc->npc = DYNAMIC_PC;
1043     } else if (dc->npc != DYNAMIC_PC) {
1044         tcg_gen_movi_tl(cpu_npc, dc->npc);
1045     }
1046 }
1047
1048 static inline void update_psr(DisasContext *dc)
1049 {
1050     if (dc->cc_op != CC_OP_FLAGS) {
1051         dc->cc_op = CC_OP_FLAGS;
1052         gen_helper_compute_psr(cpu_env);
1053     }
1054 }
1055
1056 static inline void save_state(DisasContext *dc)
1057 {
1058     tcg_gen_movi_tl(cpu_pc, dc->pc);
1059     save_npc(dc);
1060 }
1061
1062 static inline void gen_mov_pc_npc(DisasContext *dc)
1063 {
1064     if (dc->npc == JUMP_PC) {
1065         gen_generic_branch(dc);
1066         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1067         dc->pc = DYNAMIC_PC;
1068     } else if (dc->npc == DYNAMIC_PC) {
1069         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1070         dc->pc = DYNAMIC_PC;
1071     } else {
1072         dc->pc = dc->npc;
1073     }
1074 }
1075
1076 static inline void gen_op_next_insn(void)
1077 {
1078     tcg_gen_mov_tl(cpu_pc, cpu_npc);
1079     tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1080 }
1081
1082 static void free_compare(DisasCompare *cmp)
1083 {
1084     if (!cmp->g1) {
1085         tcg_temp_free(cmp->c1);
1086     }
1087     if (!cmp->g2) {
1088         tcg_temp_free(cmp->c2);
1089     }
1090 }
1091
1092 static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1093                         DisasContext *dc)
1094 {
1095     static int subcc_cond[16] = {
1096         TCG_COND_NEVER,
1097         TCG_COND_EQ,
1098         TCG_COND_LE,
1099         TCG_COND_LT,
1100         TCG_COND_LEU,
1101         TCG_COND_LTU,
1102         -1, /* neg */
1103         -1, /* overflow */
1104         TCG_COND_ALWAYS,
1105         TCG_COND_NE,
1106         TCG_COND_GT,
1107         TCG_COND_GE,
1108         TCG_COND_GTU,
1109         TCG_COND_GEU,
1110         -1, /* pos */
1111         -1, /* no overflow */
1112     };
1113
1114     static int logic_cond[16] = {
1115         TCG_COND_NEVER,
1116         TCG_COND_EQ,     /* eq:  Z */
1117         TCG_COND_LE,     /* le:  Z | (N ^ V) -> Z | N */
1118         TCG_COND_LT,     /* lt:  N ^ V -> N */
1119         TCG_COND_EQ,     /* leu: C | Z -> Z */
1120         TCG_COND_NEVER,  /* ltu: C -> 0 */
1121         TCG_COND_LT,     /* neg: N */
1122         TCG_COND_NEVER,  /* vs:  V -> 0 */
1123         TCG_COND_ALWAYS,
1124         TCG_COND_NE,     /* ne:  !Z */
1125         TCG_COND_GT,     /* gt:  !(Z | (N ^ V)) -> !(Z | N) */
1126         TCG_COND_GE,     /* ge:  !(N ^ V) -> !N */
1127         TCG_COND_NE,     /* gtu: !(C | Z) -> !Z */
1128         TCG_COND_ALWAYS, /* geu: !C -> 1 */
1129         TCG_COND_GE,     /* pos: !N */
1130         TCG_COND_ALWAYS, /* vc:  !V -> 1 */
1131     };
1132
1133     TCGv_i32 r_src;
1134     TCGv r_dst;
1135
1136 #ifdef TARGET_SPARC64
1137     if (xcc) {
1138         r_src = cpu_xcc;
1139     } else {
1140         r_src = cpu_psr;
1141     }
1142 #else
1143     r_src = cpu_psr;
1144 #endif
1145
1146     switch (dc->cc_op) {
1147     case CC_OP_LOGIC:
1148         cmp->cond = logic_cond[cond];
1149     do_compare_dst_0:
1150         cmp->is_bool = false;
1151         cmp->g2 = false;
1152         cmp->c2 = tcg_const_tl(0);
1153 #ifdef TARGET_SPARC64
1154         if (!xcc) {
1155             cmp->g1 = false;
1156             cmp->c1 = tcg_temp_new();
1157             tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1158             break;
1159         }
1160 #endif
1161         cmp->g1 = true;
1162         cmp->c1 = cpu_cc_dst;
1163         break;
1164
1165     case CC_OP_SUB:
1166         switch (cond) {
1167         case 6:  /* neg */
1168         case 14: /* pos */
1169             cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1170             goto do_compare_dst_0;
1171
1172         case 7: /* overflow */
1173         case 15: /* !overflow */
1174             goto do_dynamic;
1175
1176         default:
1177             cmp->cond = subcc_cond[cond];
1178             cmp->is_bool = false;
1179 #ifdef TARGET_SPARC64
1180             if (!xcc) {
1181                 /* Note that sign-extension works for unsigned compares as
1182                    long as both operands are sign-extended.  */
1183                 cmp->g1 = cmp->g2 = false;
1184                 cmp->c1 = tcg_temp_new();
1185                 cmp->c2 = tcg_temp_new();
1186                 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1187                 tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1188                 break;
1189             }
1190 #endif
1191             cmp->g1 = cmp->g2 = true;
1192             cmp->c1 = cpu_cc_src;
1193             cmp->c2 = cpu_cc_src2;
1194             break;
1195         }
1196         break;
1197
1198     default:
1199     do_dynamic:
1200         gen_helper_compute_psr(cpu_env);
1201         dc->cc_op = CC_OP_FLAGS;
1202         /* FALLTHRU */
1203
1204     case CC_OP_FLAGS:
1205         /* We're going to generate a boolean result.  */
1206         cmp->cond = TCG_COND_NE;
1207         cmp->is_bool = true;
1208         cmp->g1 = cmp->g2 = false;
1209         cmp->c1 = r_dst = tcg_temp_new();
1210         cmp->c2 = tcg_const_tl(0);
1211
1212         switch (cond) {
1213         case 0x0:
1214             gen_op_eval_bn(r_dst);
1215             break;
1216         case 0x1:
1217             gen_op_eval_be(r_dst, r_src);
1218             break;
1219         case 0x2:
1220             gen_op_eval_ble(r_dst, r_src);
1221             break;
1222         case 0x3:
1223             gen_op_eval_bl(r_dst, r_src);
1224             break;
1225         case 0x4:
1226             gen_op_eval_bleu(r_dst, r_src);
1227             break;
1228         case 0x5:
1229             gen_op_eval_bcs(r_dst, r_src);
1230             break;
1231         case 0x6:
1232             gen_op_eval_bneg(r_dst, r_src);
1233             break;
1234         case 0x7:
1235             gen_op_eval_bvs(r_dst, r_src);
1236             break;
1237         case 0x8:
1238             gen_op_eval_ba(r_dst);
1239             break;
1240         case 0x9:
1241             gen_op_eval_bne(r_dst, r_src);
1242             break;
1243         case 0xa:
1244             gen_op_eval_bg(r_dst, r_src);
1245             break;
1246         case 0xb:
1247             gen_op_eval_bge(r_dst, r_src);
1248             break;
1249         case 0xc:
1250             gen_op_eval_bgu(r_dst, r_src);
1251             break;
1252         case 0xd:
1253             gen_op_eval_bcc(r_dst, r_src);
1254             break;
1255         case 0xe:
1256             gen_op_eval_bpos(r_dst, r_src);
1257             break;
1258         case 0xf:
1259             gen_op_eval_bvc(r_dst, r_src);
1260             break;
1261         }
1262         break;
1263     }
1264 }
1265
1266 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1267 {
1268     unsigned int offset;
1269     TCGv r_dst;
1270
1271     /* For now we still generate a straight boolean result.  */
1272     cmp->cond = TCG_COND_NE;
1273     cmp->is_bool = true;
1274     cmp->g1 = cmp->g2 = false;
1275     cmp->c1 = r_dst = tcg_temp_new();
1276     cmp->c2 = tcg_const_tl(0);
1277
1278     switch (cc) {
1279     default:
1280     case 0x0:
1281         offset = 0;
1282         break;
1283     case 0x1:
1284         offset = 32 - 10;
1285         break;
1286     case 0x2:
1287         offset = 34 - 10;
1288         break;
1289     case 0x3:
1290         offset = 36 - 10;
1291         break;
1292     }
1293
1294     switch (cond) {
1295     case 0x0:
1296         gen_op_eval_bn(r_dst);
1297         break;
1298     case 0x1:
1299         gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1300         break;
1301     case 0x2:
1302         gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1303         break;
1304     case 0x3:
1305         gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1306         break;
1307     case 0x4:
1308         gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1309         break;
1310     case 0x5:
1311         gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1312         break;
1313     case 0x6:
1314         gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1315         break;
1316     case 0x7:
1317         gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1318         break;
1319     case 0x8:
1320         gen_op_eval_ba(r_dst);
1321         break;
1322     case 0x9:
1323         gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1324         break;
1325     case 0xa:
1326         gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1327         break;
1328     case 0xb:
1329         gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1330         break;
1331     case 0xc:
1332         gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1333         break;
1334     case 0xd:
1335         gen_op_eval_fble(r_dst, cpu_fsr, offset);
1336         break;
1337     case 0xe:
1338         gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1339         break;
1340     case 0xf:
1341         gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1342         break;
1343     }
1344 }
1345
1346 static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1347                      DisasContext *dc)
1348 {
1349     DisasCompare cmp;
1350     gen_compare(&cmp, cc, cond, dc);
1351
1352     /* The interface is to return a boolean in r_dst.  */
1353     if (cmp.is_bool) {
1354         tcg_gen_mov_tl(r_dst, cmp.c1);
1355     } else {
1356         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1357     }
1358
1359     free_compare(&cmp);
1360 }
1361
1362 static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1363 {
1364     DisasCompare cmp;
1365     gen_fcompare(&cmp, cc, cond);
1366
1367     /* The interface is to return a boolean in r_dst.  */
1368     if (cmp.is_bool) {
1369         tcg_gen_mov_tl(r_dst, cmp.c1);
1370     } else {
1371         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1372     }
1373
1374     free_compare(&cmp);
1375 }
1376
1377 #ifdef TARGET_SPARC64
1378 // Inverted logic
1379 static const int gen_tcg_cond_reg[8] = {
1380     -1,
1381     TCG_COND_NE,
1382     TCG_COND_GT,
1383     TCG_COND_GE,
1384     -1,
1385     TCG_COND_EQ,
1386     TCG_COND_LE,
1387     TCG_COND_LT,
1388 };
1389
1390 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1391 {
1392     cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1393     cmp->is_bool = false;
1394     cmp->g1 = true;
1395     cmp->g2 = false;
1396     cmp->c1 = r_src;
1397     cmp->c2 = tcg_const_tl(0);
1398 }
1399
1400 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1401 {
1402     DisasCompare cmp;
1403     gen_compare_reg(&cmp, cond, r_src);
1404
1405     /* The interface is to return a boolean in r_dst.  */
1406     tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1407
1408     free_compare(&cmp);
1409 }
1410 #endif
1411
1412 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1413 {
1414     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1415     target_ulong target = dc->pc + offset;
1416
1417 #ifdef TARGET_SPARC64
1418     if (unlikely(AM_CHECK(dc))) {
1419         target &= 0xffffffffULL;
1420     }
1421 #endif
1422     if (cond == 0x0) {
1423         /* unconditional not taken */
1424         if (a) {
1425             dc->pc = dc->npc + 4;
1426             dc->npc = dc->pc + 4;
1427         } else {
1428             dc->pc = dc->npc;
1429             dc->npc = dc->pc + 4;
1430         }
1431     } else if (cond == 0x8) {
1432         /* unconditional taken */
1433         if (a) {
1434             dc->pc = target;
1435             dc->npc = dc->pc + 4;
1436         } else {
1437             dc->pc = dc->npc;
1438             dc->npc = target;
1439             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1440         }
1441     } else {
1442         flush_cond(dc);
1443         gen_cond(cpu_cond, cc, cond, dc);
1444         if (a) {
1445             gen_branch_a(dc, target, dc->npc, cpu_cond);
1446             dc->is_br = 1;
1447         } else {
1448             dc->pc = dc->npc;
1449             dc->jump_pc[0] = target;
1450             if (unlikely(dc->npc == DYNAMIC_PC)) {
1451                 dc->jump_pc[1] = DYNAMIC_PC;
1452                 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1453             } else {
1454                 dc->jump_pc[1] = dc->npc + 4;
1455                 dc->npc = JUMP_PC;
1456             }
1457         }
1458     }
1459 }
1460
1461 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1462 {
1463     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1464     target_ulong target = dc->pc + offset;
1465
1466 #ifdef TARGET_SPARC64
1467     if (unlikely(AM_CHECK(dc))) {
1468         target &= 0xffffffffULL;
1469     }
1470 #endif
1471     if (cond == 0x0) {
1472         /* unconditional not taken */
1473         if (a) {
1474             dc->pc = dc->npc + 4;
1475             dc->npc = dc->pc + 4;
1476         } else {
1477             dc->pc = dc->npc;
1478             dc->npc = dc->pc + 4;
1479         }
1480     } else if (cond == 0x8) {
1481         /* unconditional taken */
1482         if (a) {
1483             dc->pc = target;
1484             dc->npc = dc->pc + 4;
1485         } else {
1486             dc->pc = dc->npc;
1487             dc->npc = target;
1488             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1489         }
1490     } else {
1491         flush_cond(dc);
1492         gen_fcond(cpu_cond, cc, cond);
1493         if (a) {
1494             gen_branch_a(dc, target, dc->npc, cpu_cond);
1495             dc->is_br = 1;
1496         } else {
1497             dc->pc = dc->npc;
1498             dc->jump_pc[0] = target;
1499             if (unlikely(dc->npc == DYNAMIC_PC)) {
1500                 dc->jump_pc[1] = DYNAMIC_PC;
1501                 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1502             } else {
1503                 dc->jump_pc[1] = dc->npc + 4;
1504                 dc->npc = JUMP_PC;
1505             }
1506         }
1507     }
1508 }
1509
1510 #ifdef TARGET_SPARC64
1511 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1512                           TCGv r_reg)
1513 {
1514     unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1515     target_ulong target = dc->pc + offset;
1516
1517     if (unlikely(AM_CHECK(dc))) {
1518         target &= 0xffffffffULL;
1519     }
1520     flush_cond(dc);
1521     gen_cond_reg(cpu_cond, cond, r_reg);
1522     if (a) {
1523         gen_branch_a(dc, target, dc->npc, cpu_cond);
1524         dc->is_br = 1;
1525     } else {
1526         dc->pc = dc->npc;
1527         dc->jump_pc[0] = target;
1528         if (unlikely(dc->npc == DYNAMIC_PC)) {
1529             dc->jump_pc[1] = DYNAMIC_PC;
1530             tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1531         } else {
1532             dc->jump_pc[1] = dc->npc + 4;
1533             dc->npc = JUMP_PC;
1534         }
1535     }
1536 }
1537
1538 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1539 {
1540     switch (fccno) {
1541     case 0:
1542         gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1543         break;
1544     case 1:
1545         gen_helper_fcmps_fcc1(cpu_env, r_rs1, r_rs2);
1546         break;
1547     case 2:
1548         gen_helper_fcmps_fcc2(cpu_env, r_rs1, r_rs2);
1549         break;
1550     case 3:
1551         gen_helper_fcmps_fcc3(cpu_env, r_rs1, r_rs2);
1552         break;
1553     }
1554 }
1555
1556 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1557 {
1558     switch (fccno) {
1559     case 0:
1560         gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1561         break;
1562     case 1:
1563         gen_helper_fcmpd_fcc1(cpu_env, r_rs1, r_rs2);
1564         break;
1565     case 2:
1566         gen_helper_fcmpd_fcc2(cpu_env, r_rs1, r_rs2);
1567         break;
1568     case 3:
1569         gen_helper_fcmpd_fcc3(cpu_env, r_rs1, r_rs2);
1570         break;
1571     }
1572 }
1573
1574 static inline void gen_op_fcmpq(int fccno)
1575 {
1576     switch (fccno) {
1577     case 0:
1578         gen_helper_fcmpq(cpu_env);
1579         break;
1580     case 1:
1581         gen_helper_fcmpq_fcc1(cpu_env);
1582         break;
1583     case 2:
1584         gen_helper_fcmpq_fcc2(cpu_env);
1585         break;
1586     case 3:
1587         gen_helper_fcmpq_fcc3(cpu_env);
1588         break;
1589     }
1590 }
1591
1592 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1593 {
1594     switch (fccno) {
1595     case 0:
1596         gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1597         break;
1598     case 1:
1599         gen_helper_fcmpes_fcc1(cpu_env, r_rs1, r_rs2);
1600         break;
1601     case 2:
1602         gen_helper_fcmpes_fcc2(cpu_env, r_rs1, r_rs2);
1603         break;
1604     case 3:
1605         gen_helper_fcmpes_fcc3(cpu_env, r_rs1, r_rs2);
1606         break;
1607     }
1608 }
1609
1610 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1611 {
1612     switch (fccno) {
1613     case 0:
1614         gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1615         break;
1616     case 1:
1617         gen_helper_fcmped_fcc1(cpu_env, r_rs1, r_rs2);
1618         break;
1619     case 2:
1620         gen_helper_fcmped_fcc2(cpu_env, r_rs1, r_rs2);
1621         break;
1622     case 3:
1623         gen_helper_fcmped_fcc3(cpu_env, r_rs1, r_rs2);
1624         break;
1625     }
1626 }
1627
1628 static inline void gen_op_fcmpeq(int fccno)
1629 {
1630     switch (fccno) {
1631     case 0:
1632         gen_helper_fcmpeq(cpu_env);
1633         break;
1634     case 1:
1635         gen_helper_fcmpeq_fcc1(cpu_env);
1636         break;
1637     case 2:
1638         gen_helper_fcmpeq_fcc2(cpu_env);
1639         break;
1640     case 3:
1641         gen_helper_fcmpeq_fcc3(cpu_env);
1642         break;
1643     }
1644 }
1645
1646 #else
1647
1648 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1649 {
1650     gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1651 }
1652
1653 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1654 {
1655     gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1656 }
1657
1658 static inline void gen_op_fcmpq(int fccno)
1659 {
1660     gen_helper_fcmpq(cpu_env);
1661 }
1662
1663 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1664 {
1665     gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1666 }
1667
1668 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1669 {
1670     gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1671 }
1672
1673 static inline void gen_op_fcmpeq(int fccno)
1674 {
1675     gen_helper_fcmpeq(cpu_env);
1676 }
1677 #endif
1678
1679 static inline void gen_op_fpexception_im(int fsr_flags)
1680 {
1681     TCGv_i32 r_const;
1682
1683     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1684     tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1685     r_const = tcg_const_i32(TT_FP_EXCP);
1686     gen_helper_raise_exception(cpu_env, r_const);
1687     tcg_temp_free_i32(r_const);
1688 }
1689
1690 static int gen_trap_ifnofpu(DisasContext *dc)
1691 {
1692 #if !defined(CONFIG_USER_ONLY)
1693     if (!dc->fpu_enabled) {
1694         TCGv_i32 r_const;
1695
1696         save_state(dc);
1697         r_const = tcg_const_i32(TT_NFPU_INSN);
1698         gen_helper_raise_exception(cpu_env, r_const);
1699         tcg_temp_free_i32(r_const);
1700         dc->is_br = 1;
1701         return 1;
1702     }
1703 #endif
1704     return 0;
1705 }
1706
1707 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1708 {
1709     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1710 }
1711
1712 static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1713                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1714 {
1715     TCGv_i32 dst, src;
1716
1717     src = gen_load_fpr_F(dc, rs);
1718     dst = gen_dest_fpr_F(dc);
1719
1720     gen(dst, cpu_env, src);
1721
1722     gen_store_fpr_F(dc, rd, dst);
1723 }
1724
1725 static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1726                                  void (*gen)(TCGv_i32, TCGv_i32))
1727 {
1728     TCGv_i32 dst, src;
1729
1730     src = gen_load_fpr_F(dc, rs);
1731     dst = gen_dest_fpr_F(dc);
1732
1733     gen(dst, src);
1734
1735     gen_store_fpr_F(dc, rd, dst);
1736 }
1737
1738 static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1739                         void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1740 {
1741     TCGv_i32 dst, src1, src2;
1742
1743     src1 = gen_load_fpr_F(dc, rs1);
1744     src2 = gen_load_fpr_F(dc, rs2);
1745     dst = gen_dest_fpr_F(dc);
1746
1747     gen(dst, cpu_env, src1, src2);
1748
1749     gen_store_fpr_F(dc, rd, dst);
1750 }
1751
1752 #ifdef TARGET_SPARC64
1753 static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1754                                   void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1755 {
1756     TCGv_i32 dst, src1, src2;
1757
1758     src1 = gen_load_fpr_F(dc, rs1);
1759     src2 = gen_load_fpr_F(dc, rs2);
1760     dst = gen_dest_fpr_F(dc);
1761
1762     gen(dst, src1, src2);
1763
1764     gen_store_fpr_F(dc, rd, dst);
1765 }
1766 #endif
1767
1768 static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1769                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1770 {
1771     TCGv_i64 dst, src;
1772
1773     src = gen_load_fpr_D(dc, rs);
1774     dst = gen_dest_fpr_D(dc, rd);
1775
1776     gen(dst, cpu_env, src);
1777
1778     gen_store_fpr_D(dc, rd, dst);
1779 }
1780
1781 #ifdef TARGET_SPARC64
1782 static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1783                                  void (*gen)(TCGv_i64, TCGv_i64))
1784 {
1785     TCGv_i64 dst, src;
1786
1787     src = gen_load_fpr_D(dc, rs);
1788     dst = gen_dest_fpr_D(dc, rd);
1789
1790     gen(dst, src);
1791
1792     gen_store_fpr_D(dc, rd, dst);
1793 }
1794 #endif
1795
1796 static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1797                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1798 {
1799     TCGv_i64 dst, src1, src2;
1800
1801     src1 = gen_load_fpr_D(dc, rs1);
1802     src2 = gen_load_fpr_D(dc, rs2);
1803     dst = gen_dest_fpr_D(dc, rd);
1804
1805     gen(dst, cpu_env, src1, src2);
1806
1807     gen_store_fpr_D(dc, rd, dst);
1808 }
1809
1810 #ifdef TARGET_SPARC64
1811 static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1812                                   void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1813 {
1814     TCGv_i64 dst, src1, src2;
1815
1816     src1 = gen_load_fpr_D(dc, rs1);
1817     src2 = gen_load_fpr_D(dc, rs2);
1818     dst = gen_dest_fpr_D(dc, rd);
1819
1820     gen(dst, src1, src2);
1821
1822     gen_store_fpr_D(dc, rd, dst);
1823 }
1824
1825 static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1826                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1827 {
1828     TCGv_i64 dst, src1, src2;
1829
1830     src1 = gen_load_fpr_D(dc, rs1);
1831     src2 = gen_load_fpr_D(dc, rs2);
1832     dst = gen_dest_fpr_D(dc, rd);
1833
1834     gen(dst, cpu_gsr, src1, src2);
1835
1836     gen_store_fpr_D(dc, rd, dst);
1837 }
1838
1839 static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1840                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1841 {
1842     TCGv_i64 dst, src0, src1, src2;
1843
1844     src1 = gen_load_fpr_D(dc, rs1);
1845     src2 = gen_load_fpr_D(dc, rs2);
1846     src0 = gen_load_fpr_D(dc, rd);
1847     dst = gen_dest_fpr_D(dc, rd);
1848
1849     gen(dst, src0, src1, src2);
1850
1851     gen_store_fpr_D(dc, rd, dst);
1852 }
1853 #endif
1854
1855 static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1856                               void (*gen)(TCGv_ptr))
1857 {
1858     gen_op_load_fpr_QT1(QFPREG(rs));
1859
1860     gen(cpu_env);
1861
1862     gen_op_store_QT0_fpr(QFPREG(rd));
1863     gen_update_fprs_dirty(QFPREG(rd));
1864 }
1865
1866 #ifdef TARGET_SPARC64
1867 static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1868                                  void (*gen)(TCGv_ptr))
1869 {
1870     gen_op_load_fpr_QT1(QFPREG(rs));
1871
1872     gen(cpu_env);
1873
1874     gen_op_store_QT0_fpr(QFPREG(rd));
1875     gen_update_fprs_dirty(QFPREG(rd));
1876 }
1877 #endif
1878
1879 static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1880                                void (*gen)(TCGv_ptr))
1881 {
1882     gen_op_load_fpr_QT0(QFPREG(rs1));
1883     gen_op_load_fpr_QT1(QFPREG(rs2));
1884
1885     gen(cpu_env);
1886
1887     gen_op_store_QT0_fpr(QFPREG(rd));
1888     gen_update_fprs_dirty(QFPREG(rd));
1889 }
1890
1891 static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1892                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1893 {
1894     TCGv_i64 dst;
1895     TCGv_i32 src1, src2;
1896
1897     src1 = gen_load_fpr_F(dc, rs1);
1898     src2 = gen_load_fpr_F(dc, rs2);
1899     dst = gen_dest_fpr_D(dc, rd);
1900
1901     gen(dst, cpu_env, src1, src2);
1902
1903     gen_store_fpr_D(dc, rd, dst);
1904 }
1905
1906 static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1907                                void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1908 {
1909     TCGv_i64 src1, src2;
1910
1911     src1 = gen_load_fpr_D(dc, rs1);
1912     src2 = gen_load_fpr_D(dc, rs2);
1913
1914     gen(cpu_env, src1, src2);
1915
1916     gen_op_store_QT0_fpr(QFPREG(rd));
1917     gen_update_fprs_dirty(QFPREG(rd));
1918 }
1919
1920 #ifdef TARGET_SPARC64
1921 static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1922                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1923 {
1924     TCGv_i64 dst;
1925     TCGv_i32 src;
1926
1927     src = gen_load_fpr_F(dc, rs);
1928     dst = gen_dest_fpr_D(dc, rd);
1929
1930     gen(dst, cpu_env, src);
1931
1932     gen_store_fpr_D(dc, rd, dst);
1933 }
1934 #endif
1935
1936 static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1937                                  void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1938 {
1939     TCGv_i64 dst;
1940     TCGv_i32 src;
1941
1942     src = gen_load_fpr_F(dc, rs);
1943     dst = gen_dest_fpr_D(dc, rd);
1944
1945     gen(dst, cpu_env, src);
1946
1947     gen_store_fpr_D(dc, rd, dst);
1948 }
1949
1950 static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1951                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1952 {
1953     TCGv_i32 dst;
1954     TCGv_i64 src;
1955
1956     src = gen_load_fpr_D(dc, rs);
1957     dst = gen_dest_fpr_F(dc);
1958
1959     gen(dst, cpu_env, src);
1960
1961     gen_store_fpr_F(dc, rd, dst);
1962 }
1963
1964 static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1965                               void (*gen)(TCGv_i32, TCGv_ptr))
1966 {
1967     TCGv_i32 dst;
1968
1969     gen_op_load_fpr_QT1(QFPREG(rs));
1970     dst = gen_dest_fpr_F(dc);
1971
1972     gen(dst, cpu_env);
1973
1974     gen_store_fpr_F(dc, rd, dst);
1975 }
1976
1977 static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1978                               void (*gen)(TCGv_i64, TCGv_ptr))
1979 {
1980     TCGv_i64 dst;
1981
1982     gen_op_load_fpr_QT1(QFPREG(rs));
1983     dst = gen_dest_fpr_D(dc, rd);
1984
1985     gen(dst, cpu_env);
1986
1987     gen_store_fpr_D(dc, rd, dst);
1988 }
1989
1990 static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1991                                  void (*gen)(TCGv_ptr, TCGv_i32))
1992 {
1993     TCGv_i32 src;
1994
1995     src = gen_load_fpr_F(dc, rs);
1996
1997     gen(cpu_env, src);
1998
1999     gen_op_store_QT0_fpr(QFPREG(rd));
2000     gen_update_fprs_dirty(QFPREG(rd));
2001 }
2002
2003 static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
2004                                  void (*gen)(TCGv_ptr, TCGv_i64))
2005 {
2006     TCGv_i64 src;
2007
2008     src = gen_load_fpr_D(dc, rs);
2009
2010     gen(cpu_env, src);
2011
2012     gen_op_store_QT0_fpr(QFPREG(rd));
2013     gen_update_fprs_dirty(QFPREG(rd));
2014 }
2015
2016 /* asi moves */
2017 #ifdef TARGET_SPARC64
2018 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
2019 {
2020     int asi;
2021     TCGv_i32 r_asi;
2022
2023     if (IS_IMM) {
2024         r_asi = tcg_temp_new_i32();
2025         tcg_gen_mov_i32(r_asi, cpu_asi);
2026     } else {
2027         asi = GET_FIELD(insn, 19, 26);
2028         r_asi = tcg_const_i32(asi);
2029     }
2030     return r_asi;
2031 }
2032
2033 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
2034                               int sign)
2035 {
2036     TCGv_i32 r_asi, r_size, r_sign;
2037
2038     r_asi = gen_get_asi(insn, addr);
2039     r_size = tcg_const_i32(size);
2040     r_sign = tcg_const_i32(sign);
2041     gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_size, r_sign);
2042     tcg_temp_free_i32(r_sign);
2043     tcg_temp_free_i32(r_size);
2044     tcg_temp_free_i32(r_asi);
2045 }
2046
2047 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2048 {
2049     TCGv_i32 r_asi, r_size;
2050
2051     r_asi = gen_get_asi(insn, addr);
2052     r_size = tcg_const_i32(size);
2053     gen_helper_st_asi(cpu_env, addr, src, r_asi, r_size);
2054     tcg_temp_free_i32(r_size);
2055     tcg_temp_free_i32(r_asi);
2056 }
2057
2058 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
2059 {
2060     TCGv_i32 r_asi, r_size, r_rd;
2061
2062     r_asi = gen_get_asi(insn, addr);
2063     r_size = tcg_const_i32(size);
2064     r_rd = tcg_const_i32(rd);
2065     gen_helper_ldf_asi(cpu_env, addr, r_asi, r_size, r_rd);
2066     tcg_temp_free_i32(r_rd);
2067     tcg_temp_free_i32(r_size);
2068     tcg_temp_free_i32(r_asi);
2069 }
2070
2071 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
2072 {
2073     TCGv_i32 r_asi, r_size, r_rd;
2074
2075     r_asi = gen_get_asi(insn, addr);
2076     r_size = tcg_const_i32(size);
2077     r_rd = tcg_const_i32(rd);
2078     gen_helper_stf_asi(cpu_env, addr, r_asi, r_size, r_rd);
2079     tcg_temp_free_i32(r_rd);
2080     tcg_temp_free_i32(r_size);
2081     tcg_temp_free_i32(r_asi);
2082 }
2083
2084 static inline void gen_swap_asi(TCGv dst, TCGv src, TCGv addr, int insn)
2085 {
2086     TCGv_i32 r_asi, r_size, r_sign;
2087     TCGv_i64 t64 = tcg_temp_new_i64();
2088
2089     r_asi = gen_get_asi(insn, addr);
2090     r_size = tcg_const_i32(4);
2091     r_sign = tcg_const_i32(0);
2092     gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2093     tcg_temp_free_i32(r_sign);
2094     gen_helper_st_asi(cpu_env, addr, src, r_asi, r_size);
2095     tcg_temp_free_i32(r_size);
2096     tcg_temp_free_i32(r_asi);
2097     tcg_gen_trunc_i64_tl(dst, t64);
2098     tcg_temp_free_i64(t64);
2099 }
2100
2101 static inline void gen_ldda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2102                                 int insn, int rd)
2103 {
2104     TCGv_i32 r_asi, r_rd;
2105
2106     r_asi = gen_get_asi(insn, addr);
2107     r_rd = tcg_const_i32(rd);
2108     gen_helper_ldda_asi(cpu_env, addr, r_asi, r_rd);
2109     tcg_temp_free_i32(r_rd);
2110     tcg_temp_free_i32(r_asi);
2111 }
2112
2113 static inline void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2114                                 int insn, int rd)
2115 {
2116     TCGv_i32 r_asi, r_size;
2117     TCGv lo = gen_load_gpr(dc, rd + 1);
2118     TCGv_i64 t64 = tcg_temp_new_i64();
2119
2120     tcg_gen_concat_tl_i64(t64, lo, hi);
2121     r_asi = gen_get_asi(insn, addr);
2122     r_size = tcg_const_i32(8);
2123     gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_size);
2124     tcg_temp_free_i32(r_size);
2125     tcg_temp_free_i32(r_asi);
2126     tcg_temp_free_i64(t64);
2127 }
2128
2129 static inline void gen_cas_asi(DisasContext *dc, TCGv addr,
2130                                TCGv val2, int insn, int rd)
2131 {
2132     TCGv val1 = gen_load_gpr(dc, rd);
2133     TCGv dst = gen_dest_gpr(dc, rd);
2134     TCGv_i32 r_asi = gen_get_asi(insn, addr);
2135
2136     gen_helper_cas_asi(dst, cpu_env, addr, val1, val2, r_asi);
2137     tcg_temp_free_i32(r_asi);
2138     gen_store_gpr(dc, rd, dst);
2139 }
2140
2141 static inline void gen_casx_asi(DisasContext *dc, TCGv addr,
2142                                 TCGv val2, int insn, int rd)
2143 {
2144     TCGv val1 = gen_load_gpr(dc, rd);
2145     TCGv dst = gen_dest_gpr(dc, rd);
2146     TCGv_i32 r_asi = gen_get_asi(insn, addr);
2147
2148     gen_helper_casx_asi(dst, cpu_env, addr, val1, val2, r_asi);
2149     tcg_temp_free_i32(r_asi);
2150     gen_store_gpr(dc, rd, dst);
2151 }
2152
2153 #elif !defined(CONFIG_USER_ONLY)
2154
2155 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
2156                               int sign)
2157 {
2158     TCGv_i32 r_asi, r_size, r_sign;
2159     TCGv_i64 t64 = tcg_temp_new_i64();
2160
2161     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2162     r_size = tcg_const_i32(size);
2163     r_sign = tcg_const_i32(sign);
2164     gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2165     tcg_temp_free_i32(r_sign);
2166     tcg_temp_free_i32(r_size);
2167     tcg_temp_free_i32(r_asi);
2168     tcg_gen_trunc_i64_tl(dst, t64);
2169     tcg_temp_free_i64(t64);
2170 }
2171
2172 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2173 {
2174     TCGv_i32 r_asi, r_size;
2175     TCGv_i64 t64 = tcg_temp_new_i64();
2176
2177     tcg_gen_extu_tl_i64(t64, src);
2178     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2179     r_size = tcg_const_i32(size);
2180     gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_size);
2181     tcg_temp_free_i32(r_size);
2182     tcg_temp_free_i32(r_asi);
2183     tcg_temp_free_i64(t64);
2184 }
2185
2186 static inline void gen_swap_asi(TCGv dst, TCGv src, TCGv addr, int insn)
2187 {
2188     TCGv_i32 r_asi, r_size, r_sign;
2189     TCGv_i64 r_val, t64;
2190
2191     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2192     r_size = tcg_const_i32(4);
2193     r_sign = tcg_const_i32(0);
2194     t64 = tcg_temp_new_i64();
2195     gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2196     tcg_temp_free(r_sign);
2197     r_val = tcg_temp_new_i64();
2198     tcg_gen_extu_tl_i64(r_val, src);
2199     gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2200     tcg_temp_free_i64(r_val);
2201     tcg_temp_free_i32(r_size);
2202     tcg_temp_free_i32(r_asi);
2203     tcg_gen_trunc_i64_tl(dst, t64);
2204     tcg_temp_free_i64(t64);
2205 }
2206
2207 static inline void gen_ldda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2208                                 int insn, int rd)
2209 {
2210     TCGv_i32 r_asi, r_size, r_sign;
2211     TCGv t;
2212     TCGv_i64 t64;
2213
2214     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2215     r_size = tcg_const_i32(8);
2216     r_sign = tcg_const_i32(0);
2217     t64 = tcg_temp_new_i64();
2218     gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2219     tcg_temp_free_i32(r_sign);
2220     tcg_temp_free_i32(r_size);
2221     tcg_temp_free_i32(r_asi);
2222
2223     t = gen_dest_gpr(dc, rd + 1);
2224     tcg_gen_trunc_i64_tl(t, t64);
2225     gen_store_gpr(dc, rd + 1, t);
2226
2227     tcg_gen_shri_i64(t64, t64, 32);
2228     tcg_gen_trunc_i64_tl(hi, t64);
2229     tcg_temp_free_i64(t64);
2230     gen_store_gpr(dc, rd, hi);
2231 }
2232
2233 static inline void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2234                                 int insn, int rd)
2235 {
2236     TCGv_i32 r_asi, r_size;
2237     TCGv lo = gen_load_gpr(dc, rd + 1);
2238     TCGv_i64 t64 = tcg_temp_new_i64();
2239
2240     tcg_gen_concat_tl_i64(t64, lo, hi);
2241     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2242     r_size = tcg_const_i32(8);
2243     gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_size);
2244     tcg_temp_free_i32(r_size);
2245     tcg_temp_free_i32(r_asi);
2246     tcg_temp_free_i64(t64);
2247 }
2248 #endif
2249
2250 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2251 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
2252 {
2253     TCGv_i64 r_val;
2254     TCGv_i32 r_asi, r_size;
2255
2256     gen_ld_asi(dst, addr, insn, 1, 0);
2257
2258     r_val = tcg_const_i64(0xffULL);
2259     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2260     r_size = tcg_const_i32(1);
2261     gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2262     tcg_temp_free_i32(r_size);
2263     tcg_temp_free_i32(r_asi);
2264     tcg_temp_free_i64(r_val);
2265 }
2266 #endif
2267
2268 static TCGv get_src1(DisasContext *dc, unsigned int insn)
2269 {
2270     unsigned int rs1 = GET_FIELD(insn, 13, 17);
2271     return gen_load_gpr(dc, rs1);
2272 }
2273
2274 static TCGv get_src2(DisasContext *dc, unsigned int insn)
2275 {
2276     if (IS_IMM) { /* immediate */
2277         target_long simm = GET_FIELDs(insn, 19, 31);
2278         TCGv t = get_temp_tl(dc);
2279         tcg_gen_movi_tl(t, simm);
2280         return t;
2281     } else {      /* register */
2282         unsigned int rs2 = GET_FIELD(insn, 27, 31);
2283         return gen_load_gpr(dc, rs2);
2284     }
2285 }
2286
2287 #ifdef TARGET_SPARC64
2288 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2289 {
2290     TCGv_i32 c32, zero, dst, s1, s2;
2291
2292     /* We have two choices here: extend the 32 bit data and use movcond_i64,
2293        or fold the comparison down to 32 bits and use movcond_i32.  Choose
2294        the later.  */
2295     c32 = tcg_temp_new_i32();
2296     if (cmp->is_bool) {
2297         tcg_gen_trunc_i64_i32(c32, cmp->c1);
2298     } else {
2299         TCGv_i64 c64 = tcg_temp_new_i64();
2300         tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2301         tcg_gen_trunc_i64_i32(c32, c64);
2302         tcg_temp_free_i64(c64);
2303     }
2304
2305     s1 = gen_load_fpr_F(dc, rs);
2306     s2 = gen_load_fpr_F(dc, rd);
2307     dst = gen_dest_fpr_F(dc);
2308     zero = tcg_const_i32(0);
2309
2310     tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2311
2312     tcg_temp_free_i32(c32);
2313     tcg_temp_free_i32(zero);
2314     gen_store_fpr_F(dc, rd, dst);
2315 }
2316
2317 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2318 {
2319     TCGv_i64 dst = gen_dest_fpr_D(dc, rd);
2320     tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2321                         gen_load_fpr_D(dc, rs),
2322                         gen_load_fpr_D(dc, rd));
2323     gen_store_fpr_D(dc, rd, dst);
2324 }
2325
2326 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2327 {
2328     int qd = QFPREG(rd);
2329     int qs = QFPREG(rs);
2330
2331     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2332                         cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2333     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2334                         cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2335
2336     gen_update_fprs_dirty(qd);
2337 }
2338
2339 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
2340 {
2341     TCGv_i32 r_tl = tcg_temp_new_i32();
2342
2343     /* load env->tl into r_tl */
2344     tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2345
2346     /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2347     tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2348
2349     /* calculate offset to current trap state from env->ts, reuse r_tl */
2350     tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2351     tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2352
2353     /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2354     {
2355         TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2356         tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2357         tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2358         tcg_temp_free_ptr(r_tl_tmp);
2359     }
2360
2361     tcg_temp_free_i32(r_tl);
2362 }
2363
2364 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2365                      int width, bool cc, bool left)
2366 {
2367     TCGv lo1, lo2, t1, t2;
2368     uint64_t amask, tabl, tabr;
2369     int shift, imask, omask;
2370
2371     if (cc) {
2372         tcg_gen_mov_tl(cpu_cc_src, s1);
2373         tcg_gen_mov_tl(cpu_cc_src2, s2);
2374         tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2375         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2376         dc->cc_op = CC_OP_SUB;
2377     }
2378
2379     /* Theory of operation: there are two tables, left and right (not to
2380        be confused with the left and right versions of the opcode).  These
2381        are indexed by the low 3 bits of the inputs.  To make things "easy",
2382        these tables are loaded into two constants, TABL and TABR below.
2383        The operation index = (input & imask) << shift calculates the index
2384        into the constant, while val = (table >> index) & omask calculates
2385        the value we're looking for.  */
2386     switch (width) {
2387     case 8:
2388         imask = 0x7;
2389         shift = 3;
2390         omask = 0xff;
2391         if (left) {
2392             tabl = 0x80c0e0f0f8fcfeffULL;
2393             tabr = 0xff7f3f1f0f070301ULL;
2394         } else {
2395             tabl = 0x0103070f1f3f7fffULL;
2396             tabr = 0xfffefcf8f0e0c080ULL;
2397         }
2398         break;
2399     case 16:
2400         imask = 0x6;
2401         shift = 1;
2402         omask = 0xf;
2403         if (left) {
2404             tabl = 0x8cef;
2405             tabr = 0xf731;
2406         } else {
2407             tabl = 0x137f;
2408             tabr = 0xfec8;
2409         }
2410         break;
2411     case 32:
2412         imask = 0x4;
2413         shift = 0;
2414         omask = 0x3;
2415         if (left) {
2416             tabl = (2 << 2) | 3;
2417             tabr = (3 << 2) | 1;
2418         } else {
2419             tabl = (1 << 2) | 3;
2420             tabr = (3 << 2) | 2;
2421         }
2422         break;
2423     default:
2424         abort();
2425     }
2426
2427     lo1 = tcg_temp_new();
2428     lo2 = tcg_temp_new();
2429     tcg_gen_andi_tl(lo1, s1, imask);
2430     tcg_gen_andi_tl(lo2, s2, imask);
2431     tcg_gen_shli_tl(lo1, lo1, shift);
2432     tcg_gen_shli_tl(lo2, lo2, shift);
2433
2434     t1 = tcg_const_tl(tabl);
2435     t2 = tcg_const_tl(tabr);
2436     tcg_gen_shr_tl(lo1, t1, lo1);
2437     tcg_gen_shr_tl(lo2, t2, lo2);
2438     tcg_gen_andi_tl(dst, lo1, omask);
2439     tcg_gen_andi_tl(lo2, lo2, omask);
2440
2441     amask = -8;
2442     if (AM_CHECK(dc)) {
2443         amask &= 0xffffffffULL;
2444     }
2445     tcg_gen_andi_tl(s1, s1, amask);
2446     tcg_gen_andi_tl(s2, s2, amask);
2447
2448     /* We want to compute
2449         dst = (s1 == s2 ? lo1 : lo1 & lo2).
2450        We've already done dst = lo1, so this reduces to
2451         dst &= (s1 == s2 ? -1 : lo2)
2452        Which we perform by
2453         lo2 |= -(s1 == s2)
2454         dst &= lo2
2455     */
2456     tcg_gen_setcond_tl(TCG_COND_EQ, t1, s1, s2);
2457     tcg_gen_neg_tl(t1, t1);
2458     tcg_gen_or_tl(lo2, lo2, t1);
2459     tcg_gen_and_tl(dst, dst, lo2);
2460
2461     tcg_temp_free(lo1);
2462     tcg_temp_free(lo2);
2463     tcg_temp_free(t1);
2464     tcg_temp_free(t2);
2465 }
2466
2467 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2468 {
2469     TCGv tmp = tcg_temp_new();
2470
2471     tcg_gen_add_tl(tmp, s1, s2);
2472     tcg_gen_andi_tl(dst, tmp, -8);
2473     if (left) {
2474         tcg_gen_neg_tl(tmp, tmp);
2475     }
2476     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2477
2478     tcg_temp_free(tmp);
2479 }
2480
2481 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2482 {
2483     TCGv t1, t2, shift;
2484
2485     t1 = tcg_temp_new();
2486     t2 = tcg_temp_new();
2487     shift = tcg_temp_new();
2488
2489     tcg_gen_andi_tl(shift, gsr, 7);
2490     tcg_gen_shli_tl(shift, shift, 3);
2491     tcg_gen_shl_tl(t1, s1, shift);
2492
2493     /* A shift of 64 does not produce 0 in TCG.  Divide this into a
2494        shift of (up to 63) followed by a constant shift of 1.  */
2495     tcg_gen_xori_tl(shift, shift, 63);
2496     tcg_gen_shr_tl(t2, s2, shift);
2497     tcg_gen_shri_tl(t2, t2, 1);
2498
2499     tcg_gen_or_tl(dst, t1, t2);
2500
2501     tcg_temp_free(t1);
2502     tcg_temp_free(t2);
2503     tcg_temp_free(shift);
2504 }
2505 #endif
2506
2507 #define CHECK_IU_FEATURE(dc, FEATURE)                      \
2508     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2509         goto illegal_insn;
2510 #define CHECK_FPU_FEATURE(dc, FEATURE)                     \
2511     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2512         goto nfpu_insn;
2513
2514 /* before an instruction, dc->pc must be static */
2515 static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
2516 {
2517     unsigned int opc, rs1, rs2, rd;
2518     TCGv cpu_src1, cpu_src2;
2519     TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
2520     TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
2521     target_long simm;
2522
2523     if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP | CPU_LOG_TB_OP_OPT))) {
2524         tcg_gen_debug_insn_start(dc->pc);
2525     }
2526
2527     opc = GET_FIELD(insn, 0, 1);
2528     rd = GET_FIELD(insn, 2, 6);
2529
2530     switch (opc) {
2531     case 0:                     /* branches/sethi */
2532         {
2533             unsigned int xop = GET_FIELD(insn, 7, 9);
2534             int32_t target;
2535             switch (xop) {
2536 #ifdef TARGET_SPARC64
2537             case 0x1:           /* V9 BPcc */
2538                 {
2539                     int cc;
2540
2541                     target = GET_FIELD_SP(insn, 0, 18);
2542                     target = sign_extend(target, 19);
2543                     target <<= 2;
2544                     cc = GET_FIELD_SP(insn, 20, 21);
2545                     if (cc == 0)
2546                         do_branch(dc, target, insn, 0);
2547                     else if (cc == 2)
2548                         do_branch(dc, target, insn, 1);
2549                     else
2550                         goto illegal_insn;
2551                     goto jmp_insn;
2552                 }
2553             case 0x3:           /* V9 BPr */
2554                 {
2555                     target = GET_FIELD_SP(insn, 0, 13) |
2556                         (GET_FIELD_SP(insn, 20, 21) << 14);
2557                     target = sign_extend(target, 16);
2558                     target <<= 2;
2559                     cpu_src1 = get_src1(dc, insn);
2560                     do_branch_reg(dc, target, insn, cpu_src1);
2561                     goto jmp_insn;
2562                 }
2563             case 0x5:           /* V9 FBPcc */
2564                 {
2565                     int cc = GET_FIELD_SP(insn, 20, 21);
2566                     if (gen_trap_ifnofpu(dc)) {
2567                         goto jmp_insn;
2568                     }
2569                     target = GET_FIELD_SP(insn, 0, 18);
2570                     target = sign_extend(target, 19);
2571                     target <<= 2;
2572                     do_fbranch(dc, target, insn, cc);
2573                     goto jmp_insn;
2574                 }
2575 #else
2576             case 0x7:           /* CBN+x */
2577                 {
2578                     goto ncp_insn;
2579                 }
2580 #endif
2581             case 0x2:           /* BN+x */
2582                 {
2583                     target = GET_FIELD(insn, 10, 31);
2584                     target = sign_extend(target, 22);
2585                     target <<= 2;
2586                     do_branch(dc, target, insn, 0);
2587                     goto jmp_insn;
2588                 }
2589             case 0x6:           /* FBN+x */
2590                 {
2591                     if (gen_trap_ifnofpu(dc)) {
2592                         goto jmp_insn;
2593                     }
2594                     target = GET_FIELD(insn, 10, 31);
2595                     target = sign_extend(target, 22);
2596                     target <<= 2;
2597                     do_fbranch(dc, target, insn, 0);
2598                     goto jmp_insn;
2599                 }
2600             case 0x4:           /* SETHI */
2601                 /* Special-case %g0 because that's the canonical nop.  */
2602                 if (rd) {
2603                     uint32_t value = GET_FIELD(insn, 10, 31);
2604                     TCGv t = gen_dest_gpr(dc, rd);
2605                     tcg_gen_movi_tl(t, value << 10);
2606                     gen_store_gpr(dc, rd, t);
2607                 }
2608                 break;
2609             case 0x0:           /* UNIMPL */
2610             default:
2611                 goto illegal_insn;
2612             }
2613             break;
2614         }
2615         break;
2616     case 1:                     /*CALL*/
2617         {
2618             target_long target = GET_FIELDs(insn, 2, 31) << 2;
2619             TCGv o7 = gen_dest_gpr(dc, 15);
2620
2621             tcg_gen_movi_tl(o7, dc->pc);
2622             gen_store_gpr(dc, 15, o7);
2623             target += dc->pc;
2624             gen_mov_pc_npc(dc);
2625 #ifdef TARGET_SPARC64
2626             if (unlikely(AM_CHECK(dc))) {
2627                 target &= 0xffffffffULL;
2628             }
2629 #endif
2630             dc->npc = target;
2631         }
2632         goto jmp_insn;
2633     case 2:                     /* FPU & Logical Operations */
2634         {
2635             unsigned int xop = GET_FIELD(insn, 7, 12);
2636             TCGv cpu_dst = get_temp_tl(dc);
2637             TCGv cpu_tmp0;
2638
2639             if (xop == 0x3a) {  /* generate trap */
2640                 int cond = GET_FIELD(insn, 3, 6);
2641                 TCGv_i32 trap;
2642                 int l1 = -1, mask;
2643
2644                 if (cond == 0) {
2645                     /* Trap never.  */
2646                     break;
2647                 }
2648
2649                 save_state(dc);
2650
2651                 if (cond != 8) {
2652                     /* Conditional trap.  */
2653                     DisasCompare cmp;
2654 #ifdef TARGET_SPARC64
2655                     /* V9 icc/xcc */
2656                     int cc = GET_FIELD_SP(insn, 11, 12);
2657                     if (cc == 0) {
2658                         gen_compare(&cmp, 0, cond, dc);
2659                     } else if (cc == 2) {
2660                         gen_compare(&cmp, 1, cond, dc);
2661                     } else {
2662                         goto illegal_insn;
2663                     }
2664 #else
2665                     gen_compare(&cmp, 0, cond, dc);
2666 #endif
2667                     l1 = gen_new_label();
2668                     tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
2669                                       cmp.c1, cmp.c2, l1);
2670                     free_compare(&cmp);
2671                 }
2672
2673                 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
2674                         ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
2675
2676                 /* Don't use the normal temporaries, as they may well have
2677                    gone out of scope with the branch above.  While we're
2678                    doing that we might as well pre-truncate to 32-bit.  */
2679                 trap = tcg_temp_new_i32();
2680
2681                 rs1 = GET_FIELD_SP(insn, 14, 18);
2682                 if (IS_IMM) {
2683                     rs2 = GET_FIELD_SP(insn, 0, 6);
2684                     if (rs1 == 0) {
2685                         tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
2686                         /* Signal that the trap value is fully constant.  */
2687                         mask = 0;
2688                     } else {
2689                         TCGv t1 = gen_load_gpr(dc, rs1);
2690                         tcg_gen_trunc_tl_i32(trap, t1);
2691                         tcg_gen_addi_i32(trap, trap, rs2);
2692                     }
2693                 } else {
2694                     TCGv t1, t2;
2695                     rs2 = GET_FIELD_SP(insn, 0, 4);
2696                     t1 = gen_load_gpr(dc, rs1);
2697                     t2 = gen_load_gpr(dc, rs2);
2698                     tcg_gen_add_tl(t1, t1, t2);
2699                     tcg_gen_trunc_tl_i32(trap, t1);
2700                 }
2701                 if (mask != 0) {
2702                     tcg_gen_andi_i32(trap, trap, mask);
2703                     tcg_gen_addi_i32(trap, trap, TT_TRAP);
2704                 }
2705
2706                 gen_helper_raise_exception(cpu_env, trap);
2707                 tcg_temp_free_i32(trap);
2708
2709                 if (cond == 8) {
2710                     /* An unconditional trap ends the TB.  */
2711                     dc->is_br = 1;
2712                     goto jmp_insn;
2713                 } else {
2714                     /* A conditional trap falls through to the next insn.  */
2715                     gen_set_label(l1);
2716                     break;
2717                 }
2718             } else if (xop == 0x28) {
2719                 rs1 = GET_FIELD(insn, 13, 17);
2720                 switch(rs1) {
2721                 case 0: /* rdy */
2722 #ifndef TARGET_SPARC64
2723                 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2724                                        manual, rdy on the microSPARC
2725                                        II */
2726                 case 0x0f:          /* stbar in the SPARCv8 manual,
2727                                        rdy on the microSPARC II */
2728                 case 0x10 ... 0x1f: /* implementation-dependent in the
2729                                        SPARCv8 manual, rdy on the
2730                                        microSPARC II */
2731                     /* Read Asr17 */
2732                     if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2733                         TCGv t = gen_dest_gpr(dc, rd);
2734                         /* Read Asr17 for a Leon3 monoprocessor */
2735                         tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
2736                         gen_store_gpr(dc, rd, t);
2737                         break;
2738                     }
2739 #endif
2740                     gen_store_gpr(dc, rd, cpu_y);
2741                     break;
2742 #ifdef TARGET_SPARC64
2743                 case 0x2: /* V9 rdccr */
2744                     update_psr(dc);
2745                     gen_helper_rdccr(cpu_dst, cpu_env);
2746                     gen_store_gpr(dc, rd, cpu_dst);
2747                     break;
2748                 case 0x3: /* V9 rdasi */
2749                     tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2750                     gen_store_gpr(dc, rd, cpu_dst);
2751                     break;
2752                 case 0x4: /* V9 rdtick */
2753                     {
2754                         TCGv_ptr r_tickptr;
2755
2756                         r_tickptr = tcg_temp_new_ptr();
2757                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2758                                        offsetof(CPUSPARCState, tick));
2759                         gen_helper_tick_get_count(cpu_dst, r_tickptr);
2760                         tcg_temp_free_ptr(r_tickptr);
2761                         gen_store_gpr(dc, rd, cpu_dst);
2762                     }
2763                     break;
2764                 case 0x5: /* V9 rdpc */
2765                     {
2766                         TCGv t = gen_dest_gpr(dc, rd);
2767                         if (unlikely(AM_CHECK(dc))) {
2768                             tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
2769                         } else {
2770                             tcg_gen_movi_tl(t, dc->pc);
2771                         }
2772                         gen_store_gpr(dc, rd, t);
2773                     }
2774                     break;
2775                 case 0x6: /* V9 rdfprs */
2776                     tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2777                     gen_store_gpr(dc, rd, cpu_dst);
2778                     break;
2779                 case 0xf: /* V9 membar */
2780                     break; /* no effect */
2781                 case 0x13: /* Graphics Status */
2782                     if (gen_trap_ifnofpu(dc)) {
2783                         goto jmp_insn;
2784                     }
2785                     gen_store_gpr(dc, rd, cpu_gsr);
2786                     break;
2787                 case 0x16: /* Softint */
2788                     tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2789                     gen_store_gpr(dc, rd, cpu_dst);
2790                     break;
2791                 case 0x17: /* Tick compare */
2792                     gen_store_gpr(dc, rd, cpu_tick_cmpr);
2793                     break;
2794                 case 0x18: /* System tick */
2795                     {
2796                         TCGv_ptr r_tickptr;
2797
2798                         r_tickptr = tcg_temp_new_ptr();
2799                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2800                                        offsetof(CPUSPARCState, stick));
2801                         gen_helper_tick_get_count(cpu_dst, r_tickptr);
2802                         tcg_temp_free_ptr(r_tickptr);
2803                         gen_store_gpr(dc, rd, cpu_dst);
2804                     }
2805                     break;
2806                 case 0x19: /* System tick compare */
2807                     gen_store_gpr(dc, rd, cpu_stick_cmpr);
2808                     break;
2809                 case 0x10: /* Performance Control */
2810                 case 0x11: /* Performance Instrumentation Counter */
2811                 case 0x12: /* Dispatch Control */
2812                 case 0x14: /* Softint set, WO */
2813                 case 0x15: /* Softint clear, WO */
2814 #endif
2815                 default:
2816                     goto illegal_insn;
2817                 }
2818 #if !defined(CONFIG_USER_ONLY)
2819             } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2820 #ifndef TARGET_SPARC64
2821                 if (!supervisor(dc)) {
2822                     goto priv_insn;
2823                 }
2824                 update_psr(dc);
2825                 gen_helper_rdpsr(cpu_dst, cpu_env);
2826 #else
2827                 CHECK_IU_FEATURE(dc, HYPV);
2828                 if (!hypervisor(dc))
2829                     goto priv_insn;
2830                 rs1 = GET_FIELD(insn, 13, 17);
2831                 switch (rs1) {
2832                 case 0: // hpstate
2833                     // gen_op_rdhpstate();
2834                     break;
2835                 case 1: // htstate
2836                     // gen_op_rdhtstate();
2837                     break;
2838                 case 3: // hintp
2839                     tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2840                     break;
2841                 case 5: // htba
2842                     tcg_gen_mov_tl(cpu_dst, cpu_htba);
2843                     break;
2844                 case 6: // hver
2845                     tcg_gen_mov_tl(cpu_dst, cpu_hver);
2846                     break;
2847                 case 31: // hstick_cmpr
2848                     tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2849                     break;
2850                 default:
2851                     goto illegal_insn;
2852                 }
2853 #endif
2854                 gen_store_gpr(dc, rd, cpu_dst);
2855                 break;
2856             } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2857                 if (!supervisor(dc)) {
2858                     goto priv_insn;
2859                 }
2860                 cpu_tmp0 = get_temp_tl(dc);
2861 #ifdef TARGET_SPARC64
2862                 rs1 = GET_FIELD(insn, 13, 17);
2863                 switch (rs1) {
2864                 case 0: // tpc
2865                     {
2866                         TCGv_ptr r_tsptr;
2867
2868                         r_tsptr = tcg_temp_new_ptr();
2869                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2870                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2871                                       offsetof(trap_state, tpc));
2872                         tcg_temp_free_ptr(r_tsptr);
2873                     }
2874                     break;
2875                 case 1: // tnpc
2876                     {
2877                         TCGv_ptr r_tsptr;
2878
2879                         r_tsptr = tcg_temp_new_ptr();
2880                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2881                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2882                                       offsetof(trap_state, tnpc));
2883                         tcg_temp_free_ptr(r_tsptr);
2884                     }
2885                     break;
2886                 case 2: // tstate
2887                     {
2888                         TCGv_ptr r_tsptr;
2889
2890                         r_tsptr = tcg_temp_new_ptr();
2891                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2892                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2893                                       offsetof(trap_state, tstate));
2894                         tcg_temp_free_ptr(r_tsptr);
2895                     }
2896                     break;
2897                 case 3: // tt
2898                     {
2899                         TCGv_ptr r_tsptr = tcg_temp_new_ptr();
2900
2901                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2902                         tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
2903                                          offsetof(trap_state, tt));
2904                         tcg_temp_free_ptr(r_tsptr);
2905                     }
2906                     break;
2907                 case 4: // tick
2908                     {
2909                         TCGv_ptr r_tickptr;
2910
2911                         r_tickptr = tcg_temp_new_ptr();
2912                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2913                                        offsetof(CPUSPARCState, tick));
2914                         gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2915                         tcg_temp_free_ptr(r_tickptr);
2916                     }
2917                     break;
2918                 case 5: // tba
2919                     tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2920                     break;
2921                 case 6: // pstate
2922                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2923                                      offsetof(CPUSPARCState, pstate));
2924                     break;
2925                 case 7: // tl
2926                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2927                                      offsetof(CPUSPARCState, tl));
2928                     break;
2929                 case 8: // pil
2930                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2931                                      offsetof(CPUSPARCState, psrpil));
2932                     break;
2933                 case 9: // cwp
2934                     gen_helper_rdcwp(cpu_tmp0, cpu_env);
2935                     break;
2936                 case 10: // cansave
2937                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2938                                      offsetof(CPUSPARCState, cansave));
2939                     break;
2940                 case 11: // canrestore
2941                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2942                                      offsetof(CPUSPARCState, canrestore));
2943                     break;
2944                 case 12: // cleanwin
2945                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2946                                      offsetof(CPUSPARCState, cleanwin));
2947                     break;
2948                 case 13: // otherwin
2949                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2950                                      offsetof(CPUSPARCState, otherwin));
2951                     break;
2952                 case 14: // wstate
2953                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2954                                      offsetof(CPUSPARCState, wstate));
2955                     break;
2956                 case 16: // UA2005 gl
2957                     CHECK_IU_FEATURE(dc, GL);
2958                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2959                                      offsetof(CPUSPARCState, gl));
2960                     break;
2961                 case 26: // UA2005 strand status
2962                     CHECK_IU_FEATURE(dc, HYPV);
2963                     if (!hypervisor(dc))
2964                         goto priv_insn;
2965                     tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2966                     break;
2967                 case 31: // ver
2968                     tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2969                     break;
2970                 case 15: // fq
2971                 default:
2972                     goto illegal_insn;
2973                 }
2974 #else
2975                 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2976 #endif
2977                 gen_store_gpr(dc, rd, cpu_tmp0);
2978                 break;
2979             } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2980 #ifdef TARGET_SPARC64
2981                 save_state(dc);
2982                 gen_helper_flushw(cpu_env);
2983 #else
2984                 if (!supervisor(dc))
2985                     goto priv_insn;
2986                 gen_store_gpr(dc, rd, cpu_tbr);
2987 #endif
2988                 break;
2989 #endif
2990             } else if (xop == 0x34) {   /* FPU Operations */
2991                 if (gen_trap_ifnofpu(dc)) {
2992                     goto jmp_insn;
2993                 }
2994                 gen_op_clear_ieee_excp_and_FTT();
2995                 rs1 = GET_FIELD(insn, 13, 17);
2996                 rs2 = GET_FIELD(insn, 27, 31);
2997                 xop = GET_FIELD(insn, 18, 26);
2998                 save_state(dc);
2999                 switch (xop) {
3000                 case 0x1: /* fmovs */
3001                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
3002                     gen_store_fpr_F(dc, rd, cpu_src1_32);
3003                     break;
3004                 case 0x5: /* fnegs */
3005                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
3006                     break;
3007                 case 0x9: /* fabss */
3008                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
3009                     break;
3010                 case 0x29: /* fsqrts */
3011                     CHECK_FPU_FEATURE(dc, FSQRT);
3012                     gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
3013                     break;
3014                 case 0x2a: /* fsqrtd */
3015                     CHECK_FPU_FEATURE(dc, FSQRT);
3016                     gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
3017                     break;
3018                 case 0x2b: /* fsqrtq */
3019                     CHECK_FPU_FEATURE(dc, FLOAT128);
3020                     gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
3021                     break;
3022                 case 0x41: /* fadds */
3023                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
3024                     break;
3025                 case 0x42: /* faddd */
3026                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
3027                     break;
3028                 case 0x43: /* faddq */
3029                     CHECK_FPU_FEATURE(dc, FLOAT128);
3030                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
3031                     break;
3032                 case 0x45: /* fsubs */
3033                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3034                     break;
3035                 case 0x46: /* fsubd */
3036                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3037                     break;
3038                 case 0x47: /* fsubq */
3039                     CHECK_FPU_FEATURE(dc, FLOAT128);
3040                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3041                     break;
3042                 case 0x49: /* fmuls */
3043                     CHECK_FPU_FEATURE(dc, FMUL);
3044                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3045                     break;
3046                 case 0x4a: /* fmuld */
3047                     CHECK_FPU_FEATURE(dc, FMUL);
3048                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3049                     break;
3050                 case 0x4b: /* fmulq */
3051                     CHECK_FPU_FEATURE(dc, FLOAT128);
3052                     CHECK_FPU_FEATURE(dc, FMUL);
3053                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3054                     break;
3055                 case 0x4d: /* fdivs */
3056                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3057                     break;
3058                 case 0x4e: /* fdivd */
3059                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3060                     break;
3061                 case 0x4f: /* fdivq */
3062                     CHECK_FPU_FEATURE(dc, FLOAT128);
3063                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3064                     break;
3065                 case 0x69: /* fsmuld */
3066                     CHECK_FPU_FEATURE(dc, FSMULD);
3067                     gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3068                     break;
3069                 case 0x6e: /* fdmulq */
3070                     CHECK_FPU_FEATURE(dc, FLOAT128);
3071                     gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3072                     break;
3073                 case 0xc4: /* fitos */
3074                     gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3075                     break;
3076                 case 0xc6: /* fdtos */
3077                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3078                     break;
3079                 case 0xc7: /* fqtos */
3080                     CHECK_FPU_FEATURE(dc, FLOAT128);
3081                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3082                     break;
3083                 case 0xc8: /* fitod */
3084                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3085                     break;
3086                 case 0xc9: /* fstod */
3087                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3088                     break;
3089                 case 0xcb: /* fqtod */
3090                     CHECK_FPU_FEATURE(dc, FLOAT128);
3091                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3092                     break;
3093                 case 0xcc: /* fitoq */
3094                     CHECK_FPU_FEATURE(dc, FLOAT128);
3095                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3096                     break;
3097                 case 0xcd: /* fstoq */
3098                     CHECK_FPU_FEATURE(dc, FLOAT128);
3099                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3100                     break;
3101                 case 0xce: /* fdtoq */
3102                     CHECK_FPU_FEATURE(dc, FLOAT128);
3103                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3104                     break;
3105                 case 0xd1: /* fstoi */
3106                     gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3107                     break;
3108                 case 0xd2: /* fdtoi */
3109                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3110                     break;
3111                 case 0xd3: /* fqtoi */
3112                     CHECK_FPU_FEATURE(dc, FLOAT128);
3113                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3114                     break;
3115 #ifdef TARGET_SPARC64
3116                 case 0x2: /* V9 fmovd */
3117                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3118                     gen_store_fpr_D(dc, rd, cpu_src1_64);
3119                     break;
3120                 case 0x3: /* V9 fmovq */
3121                     CHECK_FPU_FEATURE(dc, FLOAT128);
3122                     gen_move_Q(rd, rs2);
3123                     break;
3124                 case 0x6: /* V9 fnegd */
3125                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3126                     break;
3127                 case 0x7: /* V9 fnegq */
3128                     CHECK_FPU_FEATURE(dc, FLOAT128);
3129                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3130                     break;
3131                 case 0xa: /* V9 fabsd */
3132                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3133                     break;
3134                 case 0xb: /* V9 fabsq */
3135                     CHECK_FPU_FEATURE(dc, FLOAT128);
3136                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3137                     break;
3138                 case 0x81: /* V9 fstox */
3139                     gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3140                     break;
3141                 case 0x82: /* V9 fdtox */
3142                     gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3143                     break;
3144                 case 0x83: /* V9 fqtox */
3145                     CHECK_FPU_FEATURE(dc, FLOAT128);
3146                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3147                     break;
3148                 case 0x84: /* V9 fxtos */
3149                     gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3150                     break;
3151                 case 0x88: /* V9 fxtod */
3152                     gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3153                     break;
3154                 case 0x8c: /* V9 fxtoq */
3155                     CHECK_FPU_FEATURE(dc, FLOAT128);
3156                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3157                     break;
3158 #endif
3159                 default:
3160                     goto illegal_insn;
3161                 }
3162             } else if (xop == 0x35) {   /* FPU Operations */
3163 #ifdef TARGET_SPARC64
3164                 int cond;
3165 #endif
3166                 if (gen_trap_ifnofpu(dc)) {
3167                     goto jmp_insn;
3168                 }
3169                 gen_op_clear_ieee_excp_and_FTT();
3170                 rs1 = GET_FIELD(insn, 13, 17);
3171                 rs2 = GET_FIELD(insn, 27, 31);
3172                 xop = GET_FIELD(insn, 18, 26);
3173                 save_state(dc);
3174
3175 #ifdef TARGET_SPARC64
3176 #define FMOVR(sz)                                                  \
3177                 do {                                               \
3178                     DisasCompare cmp;                              \
3179                     cond = GET_FIELD_SP(insn, 10, 12);             \
3180                     cpu_src1 = get_src1(dc, insn);                 \
3181                     gen_compare_reg(&cmp, cond, cpu_src1);         \
3182                     gen_fmov##sz(dc, &cmp, rd, rs2);               \
3183                     free_compare(&cmp);                            \
3184                 } while (0)
3185
3186                 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3187                     FMOVR(s);
3188                     break;
3189                 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3190                     FMOVR(d);
3191                     break;
3192                 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3193                     CHECK_FPU_FEATURE(dc, FLOAT128);
3194                     FMOVR(q);
3195                     break;
3196                 }
3197 #undef FMOVR
3198 #endif
3199                 switch (xop) {
3200 #ifdef TARGET_SPARC64
3201 #define FMOVCC(fcc, sz)                                                 \
3202                     do {                                                \
3203                         DisasCompare cmp;                               \
3204                         cond = GET_FIELD_SP(insn, 14, 17);              \
3205                         gen_fcompare(&cmp, fcc, cond);                  \
3206                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3207                         free_compare(&cmp);                             \
3208                     } while (0)
3209
3210                     case 0x001: /* V9 fmovscc %fcc0 */
3211                         FMOVCC(0, s);
3212                         break;
3213                     case 0x002: /* V9 fmovdcc %fcc0 */
3214                         FMOVCC(0, d);
3215                         break;
3216                     case 0x003: /* V9 fmovqcc %fcc0 */
3217                         CHECK_FPU_FEATURE(dc, FLOAT128);
3218                         FMOVCC(0, q);
3219                         break;
3220                     case 0x041: /* V9 fmovscc %fcc1 */
3221                         FMOVCC(1, s);
3222                         break;
3223                     case 0x042: /* V9 fmovdcc %fcc1 */
3224                         FMOVCC(1, d);
3225                         break;
3226                     case 0x043: /* V9 fmovqcc %fcc1 */
3227                         CHECK_FPU_FEATURE(dc, FLOAT128);
3228                         FMOVCC(1, q);
3229                         break;
3230                     case 0x081: /* V9 fmovscc %fcc2 */
3231                         FMOVCC(2, s);
3232                         break;
3233                     case 0x082: /* V9 fmovdcc %fcc2 */
3234                         FMOVCC(2, d);
3235                         break;
3236                     case 0x083: /* V9 fmovqcc %fcc2 */
3237                         CHECK_FPU_FEATURE(dc, FLOAT128);
3238                         FMOVCC(2, q);
3239                         break;
3240                     case 0x0c1: /* V9 fmovscc %fcc3 */
3241                         FMOVCC(3, s);
3242                         break;
3243                     case 0x0c2: /* V9 fmovdcc %fcc3 */
3244                         FMOVCC(3, d);
3245                         break;
3246                     case 0x0c3: /* V9 fmovqcc %fcc3 */
3247                         CHECK_FPU_FEATURE(dc, FLOAT128);
3248                         FMOVCC(3, q);
3249                         break;
3250 #undef FMOVCC
3251 #define FMOVCC(xcc, sz)                                                 \
3252                     do {                                                \
3253                         DisasCompare cmp;                               \
3254                         cond = GET_FIELD_SP(insn, 14, 17);              \
3255                         gen_compare(&cmp, xcc, cond, dc);               \
3256                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3257                         free_compare(&cmp);                             \
3258                     } while (0)
3259
3260                     case 0x101: /* V9 fmovscc %icc */
3261                         FMOVCC(0, s);
3262                         break;
3263                     case 0x102: /* V9 fmovdcc %icc */
3264                         FMOVCC(0, d);
3265                         break;
3266                     case 0x103: /* V9 fmovqcc %icc */
3267                         CHECK_FPU_FEATURE(dc, FLOAT128);
3268                         FMOVCC(0, q);
3269                         break;
3270                     case 0x181: /* V9 fmovscc %xcc */
3271                         FMOVCC(1, s);
3272                         break;
3273                     case 0x182: /* V9 fmovdcc %xcc */
3274                         FMOVCC(1, d);
3275                         break;
3276                     case 0x183: /* V9 fmovqcc %xcc */
3277                         CHECK_FPU_FEATURE(dc, FLOAT128);
3278                         FMOVCC(1, q);
3279                         break;
3280 #undef FMOVCC
3281 #endif
3282                     case 0x51: /* fcmps, V9 %fcc */
3283                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3284                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3285                         gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3286                         break;
3287                     case 0x52: /* fcmpd, V9 %fcc */
3288                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3289                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3290                         gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3291                         break;
3292                     case 0x53: /* fcmpq, V9 %fcc */
3293                         CHECK_FPU_FEATURE(dc, FLOAT128);
3294                         gen_op_load_fpr_QT0(QFPREG(rs1));
3295                         gen_op_load_fpr_QT1(QFPREG(rs2));
3296                         gen_op_fcmpq(rd & 3);
3297                         break;
3298                     case 0x55: /* fcmpes, V9 %fcc */
3299                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3300                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3301                         gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3302                         break;
3303                     case 0x56: /* fcmped, V9 %fcc */
3304                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3305                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3306                         gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3307                         break;
3308                     case 0x57: /* fcmpeq, V9 %fcc */
3309                         CHECK_FPU_FEATURE(dc, FLOAT128);
3310                         gen_op_load_fpr_QT0(QFPREG(rs1));
3311                         gen_op_load_fpr_QT1(QFPREG(rs2));
3312                         gen_op_fcmpeq(rd & 3);
3313                         break;
3314                     default:
3315                         goto illegal_insn;
3316                 }
3317             } else if (xop == 0x2) {
3318                 TCGv dst = gen_dest_gpr(dc, rd);
3319                 rs1 = GET_FIELD(insn, 13, 17);
3320                 if (rs1 == 0) {
3321                     /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3322                     if (IS_IMM) {       /* immediate */
3323                         simm = GET_FIELDs(insn, 19, 31);
3324                         tcg_gen_movi_tl(dst, simm);
3325                         gen_store_gpr(dc, rd, dst);
3326                     } else {            /* register */
3327                         rs2 = GET_FIELD(insn, 27, 31);
3328                         if (rs2 == 0) {
3329                             tcg_gen_movi_tl(dst, 0);
3330                             gen_store_gpr(dc, rd, dst);
3331                         } else {
3332                             cpu_src2 = gen_load_gpr(dc, rs2);
3333                             gen_store_gpr(dc, rd, cpu_src2);
3334                         }
3335                     }
3336                 } else {
3337                     cpu_src1 = get_src1(dc, insn);
3338                     if (IS_IMM) {       /* immediate */
3339                         simm = GET_FIELDs(insn, 19, 31);
3340                         tcg_gen_ori_tl(dst, cpu_src1, simm);
3341                         gen_store_gpr(dc, rd, dst);
3342                     } else {            /* register */
3343                         rs2 = GET_FIELD(insn, 27, 31);
3344                         if (rs2 == 0) {
3345                             /* mov shortcut:  or x, %g0, y -> mov x, y */
3346                             gen_store_gpr(dc, rd, cpu_src1);
3347                         } else {
3348                             cpu_src2 = gen_load_gpr(dc, rs2);
3349                             tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
3350                             gen_store_gpr(dc, rd, dst);
3351                         }
3352                     }
3353                 }
3354 #ifdef TARGET_SPARC64
3355             } else if (xop == 0x25) { /* sll, V9 sllx */
3356                 cpu_src1 = get_src1(dc, insn);
3357                 if (IS_IMM) {   /* immediate */
3358                     simm = GET_FIELDs(insn, 20, 31);
3359                     if (insn & (1 << 12)) {
3360                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3361                     } else {
3362                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3363                     }
3364                 } else {                /* register */
3365                     rs2 = GET_FIELD(insn, 27, 31);
3366                     cpu_src2 = gen_load_gpr(dc, rs2);
3367                     cpu_tmp0 = get_temp_tl(dc);
3368                     if (insn & (1 << 12)) {
3369                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3370                     } else {
3371                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3372                     }
3373                     tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3374                 }
3375                 gen_store_gpr(dc, rd, cpu_dst);
3376             } else if (xop == 0x26) { /* srl, V9 srlx */
3377                 cpu_src1 = get_src1(dc, insn);
3378                 if (IS_IMM) {   /* immediate */
3379                     simm = GET_FIELDs(insn, 20, 31);
3380                     if (insn & (1 << 12)) {
3381                         tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3382                     } else {
3383                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3384                         tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3385                     }
3386                 } else {                /* register */
3387                     rs2 = GET_FIELD(insn, 27, 31);
3388                     cpu_src2 = gen_load_gpr(dc, rs2);
3389                     cpu_tmp0 = get_temp_tl(dc);
3390                     if (insn & (1 << 12)) {
3391                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3392                         tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3393                     } else {
3394                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3395                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3396                         tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3397                     }
3398                 }
3399                 gen_store_gpr(dc, rd, cpu_dst);
3400             } else if (xop == 0x27) { /* sra, V9 srax */
3401                 cpu_src1 = get_src1(dc, insn);
3402                 if (IS_IMM) {   /* immediate */
3403                     simm = GET_FIELDs(insn, 20, 31);
3404                     if (insn & (1 << 12)) {
3405                         tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3406                     } else {
3407                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3408                         tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3409                     }
3410                 } else {                /* register */
3411                     rs2 = GET_FIELD(insn, 27, 31);
3412                     cpu_src2 = gen_load_gpr(dc, rs2);
3413                     cpu_tmp0 = get_temp_tl(dc);
3414                     if (insn & (1 << 12)) {
3415                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3416                         tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3417                     } else {
3418                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3419                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3420                         tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3421                     }
3422                 }
3423                 gen_store_gpr(dc, rd, cpu_dst);
3424 #endif
3425             } else if (xop < 0x36) {
3426                 if (xop < 0x20) {
3427                     cpu_src1 = get_src1(dc, insn);
3428                     cpu_src2 = get_src2(dc, insn);
3429                     switch (xop & ~0x10) {
3430                     case 0x0: /* add */
3431                         if (xop & 0x10) {
3432                             gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3433                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3434                             dc->cc_op = CC_OP_ADD;
3435                         } else {
3436                             tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3437                         }
3438                         break;
3439                     case 0x1: /* and */
3440                         tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3441                         if (xop & 0x10) {
3442                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3443                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3444                             dc->cc_op = CC_OP_LOGIC;
3445                         }
3446                         break;
3447                     case 0x2: /* or */
3448                         tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3449                         if (xop & 0x10) {
3450                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3451                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3452                             dc->cc_op = CC_OP_LOGIC;
3453                         }
3454                         break;
3455                     case 0x3: /* xor */
3456                         tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3457                         if (xop & 0x10) {
3458                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3459                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3460                             dc->cc_op = CC_OP_LOGIC;
3461                         }
3462                         break;
3463                     case 0x4: /* sub */
3464                         if (xop & 0x10) {
3465                             gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3466                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3467                             dc->cc_op = CC_OP_SUB;
3468                         } else {
3469                             tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3470                         }
3471                         break;
3472                     case 0x5: /* andn */
3473                         tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3474                         if (xop & 0x10) {
3475                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3476                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3477                             dc->cc_op = CC_OP_LOGIC;
3478                         }
3479                         break;
3480                     case 0x6: /* orn */
3481                         tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3482                         if (xop & 0x10) {
3483                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3484                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3485                             dc->cc_op = CC_OP_LOGIC;
3486                         }
3487                         break;
3488                     case 0x7: /* xorn */
3489                         tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
3490                         if (xop & 0x10) {
3491                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3492                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3493                             dc->cc_op = CC_OP_LOGIC;
3494                         }
3495                         break;
3496                     case 0x8: /* addx, V9 addc */
3497                         gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3498                                         (xop & 0x10));
3499                         break;
3500 #ifdef TARGET_SPARC64
3501                     case 0x9: /* V9 mulx */
3502                         tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3503                         break;
3504 #endif
3505                     case 0xa: /* umul */
3506                         CHECK_IU_FEATURE(dc, MUL);
3507                         gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3508                         if (xop & 0x10) {
3509                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3510                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3511                             dc->cc_op = CC_OP_LOGIC;
3512                         }
3513                         break;
3514                     case 0xb: /* smul */
3515                         CHECK_IU_FEATURE(dc, MUL);
3516                         gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3517                         if (xop & 0x10) {
3518                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3519                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3520                             dc->cc_op = CC_OP_LOGIC;
3521                         }
3522                         break;
3523                     case 0xc: /* subx, V9 subc */
3524                         gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3525                                         (xop & 0x10));
3526                         break;
3527 #ifdef TARGET_SPARC64
3528                     case 0xd: /* V9 udivx */
3529                         gen_helper_udivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3530                         break;
3531 #endif
3532                     case 0xe: /* udiv */
3533                         CHECK_IU_FEATURE(dc, DIV);
3534                         if (xop & 0x10) {
3535                             gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
3536                                                cpu_src2);
3537                             dc->cc_op = CC_OP_DIV;
3538                         } else {
3539                             gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
3540                                             cpu_src2);
3541                         }
3542                         break;
3543                     case 0xf: /* sdiv */
3544                         CHECK_IU_FEATURE(dc, DIV);
3545                         if (xop & 0x10) {
3546                             gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
3547                                                cpu_src2);
3548                             dc->cc_op = CC_OP_DIV;
3549                         } else {
3550                             gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
3551                                             cpu_src2);
3552                         }
3553                         break;
3554                     default:
3555                         goto illegal_insn;
3556                     }
3557                     gen_store_gpr(dc, rd, cpu_dst);
3558                 } else {
3559                     cpu_src1 = get_src1(dc, insn);
3560                     cpu_src2 = get_src2(dc, insn);
3561                     switch (xop) {
3562                     case 0x20: /* taddcc */
3563                         gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3564                         gen_store_gpr(dc, rd, cpu_dst);
3565                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3566                         dc->cc_op = CC_OP_TADD;
3567                         break;
3568                     case 0x21: /* tsubcc */
3569                         gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3570                         gen_store_gpr(dc, rd, cpu_dst);
3571                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3572                         dc->cc_op = CC_OP_TSUB;
3573                         break;
3574                     case 0x22: /* taddcctv */
3575                         gen_helper_taddcctv(cpu_dst, cpu_env,
3576                                             cpu_src1, cpu_src2);
3577                         gen_store_gpr(dc, rd, cpu_dst);
3578                         dc->cc_op = CC_OP_TADDTV;
3579                         break;
3580                     case 0x23: /* tsubcctv */
3581                         gen_helper_tsubcctv(cpu_dst, cpu_env,
3582                                             cpu_src1, cpu_src2);
3583                         gen_store_gpr(dc, rd, cpu_dst);
3584                         dc->cc_op = CC_OP_TSUBTV;
3585                         break;
3586                     case 0x24: /* mulscc */
3587                         update_psr(dc);
3588                         gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3589                         gen_store_gpr(dc, rd, cpu_dst);
3590                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3591                         dc->cc_op = CC_OP_ADD;
3592                         break;
3593 #ifndef TARGET_SPARC64
3594                     case 0x25:  /* sll */
3595                         if (IS_IMM) { /* immediate */
3596                             simm = GET_FIELDs(insn, 20, 31);
3597                             tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3598                         } else { /* register */
3599                             cpu_tmp0 = get_temp_tl(dc);
3600                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3601                             tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3602                         }
3603                         gen_store_gpr(dc, rd, cpu_dst);
3604                         break;
3605                     case 0x26:  /* srl */
3606                         if (IS_IMM) { /* immediate */
3607                             simm = GET_FIELDs(insn, 20, 31);
3608                             tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3609                         } else { /* register */
3610                             cpu_tmp0 = get_temp_tl(dc);
3611                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3612                             tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3613                         }
3614                         gen_store_gpr(dc, rd, cpu_dst);
3615                         break;
3616                     case 0x27:  /* sra */
3617                         if (IS_IMM) { /* immediate */
3618                             simm = GET_FIELDs(insn, 20, 31);
3619                             tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3620                         } else { /* register */
3621                             cpu_tmp0 = get_temp_tl(dc);
3622                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3623                             tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3624                         }
3625                         gen_store_gpr(dc, rd, cpu_dst);
3626                         break;
3627 #endif
3628                     case 0x30:
3629                         {
3630                             cpu_tmp0 = get_temp_tl(dc);
3631                             switch(rd) {
3632                             case 0: /* wry */
3633                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3634                                 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3635                                 break;
3636 #ifndef TARGET_SPARC64
3637                             case 0x01 ... 0x0f: /* undefined in the
3638                                                    SPARCv8 manual, nop
3639                                                    on the microSPARC
3640                                                    II */
3641                             case 0x10 ... 0x1f: /* implementation-dependent
3642                                                    in the SPARCv8
3643                                                    manual, nop on the
3644                                                    microSPARC II */
3645                                 break;
3646 #else
3647                             case 0x2: /* V9 wrccr */
3648                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3649                                 gen_helper_wrccr(cpu_env, cpu_tmp0);
3650                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3651                                 dc->cc_op = CC_OP_FLAGS;
3652                                 break;
3653                             case 0x3: /* V9 wrasi */
3654                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3655                                 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xff);
3656                                 tcg_gen_trunc_tl_i32(cpu_asi, cpu_tmp0);
3657                                 break;
3658                             case 0x6: /* V9 wrfprs */
3659                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3660                                 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_tmp0);
3661                                 save_state(dc);
3662                                 gen_op_next_insn();
3663                                 tcg_gen_exit_tb(0);
3664                                 dc->is_br = 1;
3665                                 break;
3666                             case 0xf: /* V9 sir, nop if user */
3667 #if !defined(CONFIG_USER_ONLY)
3668                                 if (supervisor(dc)) {
3669                                     ; // XXX
3670                                 }
3671 #endif
3672                                 break;
3673                             case 0x13: /* Graphics Status */
3674                                 if (gen_trap_ifnofpu(dc)) {
3675                                     goto jmp_insn;
3676                                 }
3677                                 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3678                                 break;
3679                             case 0x14: /* Softint set */
3680                                 if (!supervisor(dc))
3681                                     goto illegal_insn;
3682                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3683                                 gen_helper_set_softint(cpu_env, cpu_tmp0);
3684                                 break;
3685                             case 0x15: /* Softint clear */
3686                                 if (!supervisor(dc))
3687                                     goto illegal_insn;
3688                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3689                                 gen_helper_clear_softint(cpu_env, cpu_tmp0);
3690                                 break;
3691                             case 0x16: /* Softint write */
3692                                 if (!supervisor(dc))
3693                                     goto illegal_insn;
3694                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3695                                 gen_helper_write_softint(cpu_env, cpu_tmp0);
3696                                 break;
3697                             case 0x17: /* Tick compare */
3698 #if !defined(CONFIG_USER_ONLY)
3699                                 if (!supervisor(dc))
3700                                     goto illegal_insn;
3701 #endif
3702                                 {
3703                                     TCGv_ptr r_tickptr;
3704
3705                                     tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3706                                                    cpu_src2);
3707                                     r_tickptr = tcg_temp_new_ptr();
3708                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3709                                                    offsetof(CPUSPARCState, tick));
3710                                     gen_helper_tick_set_limit(r_tickptr,
3711                                                               cpu_tick_cmpr);
3712                                     tcg_temp_free_ptr(r_tickptr);
3713                                 }
3714                                 break;
3715                             case 0x18: /* System tick */
3716 #if !defined(CONFIG_USER_ONLY)
3717                                 if (!supervisor(dc))
3718                                     goto illegal_insn;
3719 #endif
3720                                 {
3721                                     TCGv_ptr r_tickptr;
3722
3723                                     tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
3724                                                    cpu_src2);
3725                                     r_tickptr = tcg_temp_new_ptr();
3726                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3727                                                    offsetof(CPUSPARCState, stick));
3728                                     gen_helper_tick_set_count(r_tickptr,
3729                                                               cpu_tmp0);
3730                                     tcg_temp_free_ptr(r_tickptr);
3731                                 }
3732                                 break;
3733                             case 0x19: /* System tick compare */
3734 #if !defined(CONFIG_USER_ONLY)
3735                                 if (!supervisor(dc))
3736                                     goto illegal_insn;
3737 #endif
3738                                 {
3739                                     TCGv_ptr r_tickptr;
3740
3741                                     tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3742                                                    cpu_src2);
3743                                     r_tickptr = tcg_temp_new_ptr();
3744                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3745                                                    offsetof(CPUSPARCState, stick));
3746                                     gen_helper_tick_set_limit(r_tickptr,
3747                                                               cpu_stick_cmpr);
3748                                     tcg_temp_free_ptr(r_tickptr);
3749                                 }
3750                                 break;
3751
3752                             case 0x10: /* Performance Control */
3753                             case 0x11: /* Performance Instrumentation
3754                                           Counter */
3755                             case 0x12: /* Dispatch Control */
3756 #endif
3757                             default:
3758                                 goto illegal_insn;
3759                             }
3760                         }
3761                         break;
3762 #if !defined(CONFIG_USER_ONLY)
3763                     case 0x31: /* wrpsr, V9 saved, restored */
3764                         {
3765                             if (!supervisor(dc))
3766                                 goto priv_insn;
3767 #ifdef TARGET_SPARC64
3768                             switch (rd) {
3769                             case 0:
3770                                 gen_helper_saved(cpu_env);
3771                                 break;
3772                             case 1:
3773                                 gen_helper_restored(cpu_env);
3774                                 break;
3775                             case 2: /* UA2005 allclean */
3776                             case 3: /* UA2005 otherw */
3777                             case 4: /* UA2005 normalw */
3778                             case 5: /* UA2005 invalw */
3779                                 // XXX
3780                             default:
3781                                 goto illegal_insn;
3782                             }
3783 #else
3784                             cpu_tmp0 = get_temp_tl(dc);
3785                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3786                             gen_helper_wrpsr(cpu_env, cpu_tmp0);
3787                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3788                             dc->cc_op = CC_OP_FLAGS;
3789                             save_state(dc);
3790                             gen_op_next_insn();
3791                             tcg_gen_exit_tb(0);
3792                             dc->is_br = 1;
3793 #endif
3794                         }
3795                         break;
3796                     case 0x32: /* wrwim, V9 wrpr */
3797                         {
3798                             if (!supervisor(dc))
3799                                 goto priv_insn;
3800                             cpu_tmp0 = get_temp_tl(dc);
3801                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3802 #ifdef TARGET_SPARC64
3803                             switch (rd) {
3804                             case 0: // tpc
3805                                 {
3806                                     TCGv_ptr r_tsptr;
3807
3808                                     r_tsptr = tcg_temp_new_ptr();
3809                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3810                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3811                                                   offsetof(trap_state, tpc));
3812                                     tcg_temp_free_ptr(r_tsptr);
3813                                 }
3814                                 break;
3815                             case 1: // tnpc
3816                                 {
3817                                     TCGv_ptr r_tsptr;
3818
3819                                     r_tsptr = tcg_temp_new_ptr();
3820                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3821                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3822                                                   offsetof(trap_state, tnpc));
3823                                     tcg_temp_free_ptr(r_tsptr);
3824                                 }
3825                                 break;
3826                             case 2: // tstate
3827                                 {
3828                                     TCGv_ptr r_tsptr;
3829
3830                                     r_tsptr = tcg_temp_new_ptr();
3831                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3832                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3833                                                   offsetof(trap_state,
3834                                                            tstate));
3835                                     tcg_temp_free_ptr(r_tsptr);
3836                                 }
3837                                 break;
3838                             case 3: // tt
3839                                 {
3840                                     TCGv_ptr r_tsptr;
3841
3842                                     r_tsptr = tcg_temp_new_ptr();
3843                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3844                                     tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
3845                                                     offsetof(trap_state, tt));
3846                                     tcg_temp_free_ptr(r_tsptr);
3847                                 }
3848                                 break;
3849                             case 4: // tick
3850                                 {
3851                                     TCGv_ptr r_tickptr;
3852
3853                                     r_tickptr = tcg_temp_new_ptr();
3854                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3855                                                    offsetof(CPUSPARCState, tick));
3856                                     gen_helper_tick_set_count(r_tickptr,
3857                                                               cpu_tmp0);
3858                                     tcg_temp_free_ptr(r_tickptr);
3859                                 }
3860                                 break;
3861                             case 5: // tba
3862                                 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3863                                 break;
3864                             case 6: // pstate
3865                                 save_state(dc);
3866                                 gen_helper_wrpstate(cpu_env, cpu_tmp0);
3867                                 dc->npc = DYNAMIC_PC;
3868                                 break;
3869                             case 7: // tl
3870                                 save_state(dc);
3871                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3872                                                offsetof(CPUSPARCState, tl));
3873                                 dc->npc = DYNAMIC_PC;
3874                                 break;
3875                             case 8: // pil
3876                                 gen_helper_wrpil(cpu_env, cpu_tmp0);
3877                                 break;
3878                             case 9: // cwp
3879                                 gen_helper_wrcwp(cpu_env, cpu_tmp0);
3880                                 break;
3881                             case 10: // cansave
3882                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3883                                                 offsetof(CPUSPARCState,
3884                                                          cansave));
3885                                 break;
3886                             case 11: // canrestore
3887                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3888                                                 offsetof(CPUSPARCState,
3889                                                          canrestore));
3890                                 break;
3891                             case 12: // cleanwin
3892                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3893                                                 offsetof(CPUSPARCState,
3894                                                          cleanwin));
3895                                 break;
3896                             case 13: // otherwin
3897                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3898                                                 offsetof(CPUSPARCState,
3899                                                          otherwin));
3900                                 break;
3901                             case 14: // wstate
3902                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3903                                                 offsetof(CPUSPARCState,
3904                                                          wstate));
3905                                 break;
3906                             case 16: // UA2005 gl
3907                                 CHECK_IU_FEATURE(dc, GL);
3908                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3909                                                 offsetof(CPUSPARCState, gl));
3910                                 break;
3911                             case 26: // UA2005 strand status
3912                                 CHECK_IU_FEATURE(dc, HYPV);
3913                                 if (!hypervisor(dc))
3914                                     goto priv_insn;
3915                                 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3916                                 break;
3917                             default:
3918                                 goto illegal_insn;
3919                             }
3920 #else
3921                             tcg_gen_trunc_tl_i32(cpu_wim, cpu_tmp0);
3922                             if (dc->def->nwindows != 32) {
3923                                 tcg_gen_andi_tl(cpu_wim, cpu_wim,
3924                                                 (1 << dc->def->nwindows) - 1);
3925                             }
3926 #endif
3927                         }
3928                         break;
3929                     case 0x33: /* wrtbr, UA2005 wrhpr */
3930                         {
3931 #ifndef TARGET_SPARC64
3932                             if (!supervisor(dc))
3933                                 goto priv_insn;
3934                             tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3935 #else
3936                             CHECK_IU_FEATURE(dc, HYPV);
3937                             if (!hypervisor(dc))
3938                                 goto priv_insn;
3939                             cpu_tmp0 = get_temp_tl(dc);
3940                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3941                             switch (rd) {
3942                             case 0: // hpstate
3943                                 // XXX gen_op_wrhpstate();
3944                                 save_state(dc);
3945                                 gen_op_next_insn();
3946                                 tcg_gen_exit_tb(0);
3947                                 dc->is_br = 1;
3948                                 break;
3949                             case 1: // htstate
3950                                 // XXX gen_op_wrhtstate();
3951                                 break;
3952                             case 3: // hintp
3953                                 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3954                                 break;
3955                             case 5: // htba
3956                                 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3957                                 break;
3958                             case 31: // hstick_cmpr
3959                                 {
3960                                     TCGv_ptr r_tickptr;
3961
3962                                     tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3963                                     r_tickptr = tcg_temp_new_ptr();
3964                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3965                                                    offsetof(CPUSPARCState, hstick));
3966                                     gen_helper_tick_set_limit(r_tickptr,
3967                                                               cpu_hstick_cmpr);
3968                                     tcg_temp_free_ptr(r_tickptr);
3969                                 }
3970                                 break;
3971                             case 6: // hver readonly
3972                             default:
3973                                 goto illegal_insn;
3974                             }
3975 #endif
3976                         }
3977                         break;
3978 #endif
3979 #ifdef TARGET_SPARC64
3980                     case 0x2c: /* V9 movcc */
3981                         {
3982                             int cc = GET_FIELD_SP(insn, 11, 12);
3983                             int cond = GET_FIELD_SP(insn, 14, 17);
3984                             DisasCompare cmp;
3985                             TCGv dst;
3986
3987                             if (insn & (1 << 18)) {
3988                                 if (cc == 0) {
3989                                     gen_compare(&cmp, 0, cond, dc);
3990                                 } else if (cc == 2) {
3991                                     gen_compare(&cmp, 1, cond, dc);
3992                                 } else {
3993                                     goto illegal_insn;
3994                                 }
3995                             } else {
3996                                 gen_fcompare(&cmp, cc, cond);
3997                             }
3998
3999                             /* The get_src2 above loaded the normal 13-bit
4000                                immediate field, not the 11-bit field we have
4001                                in movcc.  But it did handle the reg case.  */
4002                             if (IS_IMM) {
4003                                 simm = GET_FIELD_SPs(insn, 0, 10);
4004                                 tcg_gen_movi_tl(cpu_src2, simm);
4005                             }
4006
4007                             dst = gen_load_gpr(dc, rd);
4008                             tcg_gen_movcond_tl(cmp.cond, dst,
4009                                                cmp.c1, cmp.c2,
4010                                                cpu_src2, dst);
4011                             free_compare(&cmp);
4012                             gen_store_gpr(dc, rd, dst);
4013                             break;
4014                         }
4015                     case 0x2d: /* V9 sdivx */
4016                         gen_helper_sdivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4017                         gen_store_gpr(dc, rd, cpu_dst);
4018                         break;
4019                     case 0x2e: /* V9 popc */
4020                         gen_helper_popc(cpu_dst, cpu_src2);
4021                         gen_store_gpr(dc, rd, cpu_dst);
4022                         break;
4023                     case 0x2f: /* V9 movr */
4024                         {
4025                             int cond = GET_FIELD_SP(insn, 10, 12);
4026                             DisasCompare cmp;
4027                             TCGv dst;
4028
4029                             gen_compare_reg(&cmp, cond, cpu_src1);
4030
4031                             /* The get_src2 above loaded the normal 13-bit
4032                                immediate field, not the 10-bit field we have
4033                                in movr.  But it did handle the reg case.  */
4034                             if (IS_IMM) {
4035                                 simm = GET_FIELD_SPs(insn, 0, 9);
4036                                 tcg_gen_movi_tl(cpu_src2, simm);
4037                             }
4038
4039                             dst = gen_load_gpr(dc, rd);
4040                             tcg_gen_movcond_tl(cmp.cond, dst,
4041                                                cmp.c1, cmp.c2,
4042                                                cpu_src2, dst);
4043                             free_compare(&cmp);
4044                             gen_store_gpr(dc, rd, dst);
4045                             break;
4046                         }
4047 #endif
4048                     default:
4049                         goto illegal_insn;
4050                     }
4051                 }
4052             } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4053 #ifdef TARGET_SPARC64
4054                 int opf = GET_FIELD_SP(insn, 5, 13);
4055                 rs1 = GET_FIELD(insn, 13, 17);
4056                 rs2 = GET_FIELD(insn, 27, 31);
4057                 if (gen_trap_ifnofpu(dc)) {
4058                     goto jmp_insn;
4059                 }
4060
4061                 switch (opf) {
4062                 case 0x000: /* VIS I edge8cc */
4063                     CHECK_FPU_FEATURE(dc, VIS1);
4064                     cpu_src1 = gen_load_gpr(dc, rs1);
4065                     cpu_src2 = gen_load_gpr(dc, rs2);
4066                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4067                     gen_store_gpr(dc, rd, cpu_dst);
4068                     break;
4069                 case 0x001: /* VIS II edge8n */
4070                     CHECK_FPU_FEATURE(dc, VIS2);
4071                     cpu_src1 = gen_load_gpr(dc, rs1);
4072                     cpu_src2 = gen_load_gpr(dc, rs2);
4073                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4074                     gen_store_gpr(dc, rd, cpu_dst);
4075                     break;
4076                 case 0x002: /* VIS I edge8lcc */
4077                     CHECK_FPU_FEATURE(dc, VIS1);
4078                     cpu_src1 = gen_load_gpr(dc, rs1);
4079                     cpu_src2 = gen_load_gpr(dc, rs2);
4080                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4081                     gen_store_gpr(dc, rd, cpu_dst);
4082                     break;
4083                 case 0x003: /* VIS II edge8ln */
4084                     CHECK_FPU_FEATURE(dc, VIS2);
4085                     cpu_src1 = gen_load_gpr(dc, rs1);
4086                     cpu_src2 = gen_load_gpr(dc, rs2);
4087                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4088                     gen_store_gpr(dc, rd, cpu_dst);
4089                     break;
4090                 case 0x004: /* VIS I edge16cc */
4091                     CHECK_FPU_FEATURE(dc, VIS1);
4092                     cpu_src1 = gen_load_gpr(dc, rs1);
4093                     cpu_src2 = gen_load_gpr(dc, rs2);
4094                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4095                     gen_store_gpr(dc, rd, cpu_dst);
4096                     break;
4097                 case 0x005: /* VIS II edge16n */
4098                     CHECK_FPU_FEATURE(dc, VIS2);
4099                     cpu_src1 = gen_load_gpr(dc, rs1);
4100                     cpu_src2 = gen_load_gpr(dc, rs2);
4101                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4102                     gen_store_gpr(dc, rd, cpu_dst);
4103                     break;
4104                 case 0x006: /* VIS I edge16lcc */
4105                     CHECK_FPU_FEATURE(dc, VIS1);
4106                     cpu_src1 = gen_load_gpr(dc, rs1);
4107                     cpu_src2 = gen_load_gpr(dc, rs2);
4108                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4109                     gen_store_gpr(dc, rd, cpu_dst);
4110                     break;
4111                 case 0x007: /* VIS II edge16ln */
4112                     CHECK_FPU_FEATURE(dc, VIS2);
4113                     cpu_src1 = gen_load_gpr(dc, rs1);
4114                     cpu_src2 = gen_load_gpr(dc, rs2);
4115                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4116                     gen_store_gpr(dc, rd, cpu_dst);
4117                     break;
4118                 case 0x008: /* VIS I edge32cc */
4119                     CHECK_FPU_FEATURE(dc, VIS1);
4120                     cpu_src1 = gen_load_gpr(dc, rs1);
4121                     cpu_src2 = gen_load_gpr(dc, rs2);
4122                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4123                     gen_store_gpr(dc, rd, cpu_dst);
4124                     break;
4125                 case 0x009: /* VIS II edge32n */
4126                     CHECK_FPU_FEATURE(dc, VIS2);
4127                     cpu_src1 = gen_load_gpr(dc, rs1);
4128                     cpu_src2 = gen_load_gpr(dc, rs2);
4129                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4130                     gen_store_gpr(dc, rd, cpu_dst);
4131                     break;
4132                 case 0x00a: /* VIS I edge32lcc */
4133                     CHECK_FPU_FEATURE(dc, VIS1);
4134                     cpu_src1 = gen_load_gpr(dc, rs1);
4135                     cpu_src2 = gen_load_gpr(dc, rs2);
4136                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4137                     gen_store_gpr(dc, rd, cpu_dst);
4138                     break;
4139                 case 0x00b: /* VIS II edge32ln */
4140                     CHECK_FPU_FEATURE(dc, VIS2);
4141                     cpu_src1 = gen_load_gpr(dc, rs1);
4142                     cpu_src2 = gen_load_gpr(dc, rs2);
4143                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4144                     gen_store_gpr(dc, rd, cpu_dst);
4145                     break;
4146                 case 0x010: /* VIS I array8 */
4147                     CHECK_FPU_FEATURE(dc, VIS1);
4148                     cpu_src1 = gen_load_gpr(dc, rs1);
4149                     cpu_src2 = gen_load_gpr(dc, rs2);
4150                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4151                     gen_store_gpr(dc, rd, cpu_dst);
4152                     break;
4153                 case 0x012: /* VIS I array16 */
4154                     CHECK_FPU_FEATURE(dc, VIS1);
4155                     cpu_src1 = gen_load_gpr(dc, rs1);
4156                     cpu_src2 = gen_load_gpr(dc, rs2);
4157                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4158                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4159                     gen_store_gpr(dc, rd, cpu_dst);
4160                     break;
4161                 case 0x014: /* VIS I array32 */
4162                     CHECK_FPU_FEATURE(dc, VIS1);
4163                     cpu_src1 = gen_load_gpr(dc, rs1);
4164                     cpu_src2 = gen_load_gpr(dc, rs2);
4165                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4166                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4167                     gen_store_gpr(dc, rd, cpu_dst);
4168                     break;
4169                 case 0x018: /* VIS I alignaddr */
4170                     CHECK_FPU_FEATURE(dc, VIS1);
4171                     cpu_src1 = gen_load_gpr(dc, rs1);
4172                     cpu_src2 = gen_load_gpr(dc, rs2);
4173                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4174                     gen_store_gpr(dc, rd, cpu_dst);
4175                     break;
4176                 case 0x01a: /* VIS I alignaddrl */
4177                     CHECK_FPU_FEATURE(dc, VIS1);
4178                     cpu_src1 = gen_load_gpr(dc, rs1);
4179                     cpu_src2 = gen_load_gpr(dc, rs2);
4180                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4181                     gen_store_gpr(dc, rd, cpu_dst);
4182                     break;
4183                 case 0x019: /* VIS II bmask */
4184                     CHECK_FPU_FEATURE(dc, VIS2);
4185                     cpu_src1 = gen_load_gpr(dc, rs1);
4186                     cpu_src2 = gen_load_gpr(dc, rs2);
4187                     tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4188                     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4189                     gen_store_gpr(dc, rd, cpu_dst);
4190                     break;
4191                 case 0x020: /* VIS I fcmple16 */
4192                     CHECK_FPU_FEATURE(dc, VIS1);
4193                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4194                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4195                     gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4196                     gen_store_gpr(dc, rd, cpu_dst);
4197                     break;
4198                 case 0x022: /* VIS I fcmpne16 */
4199                     CHECK_FPU_FEATURE(dc, VIS1);
4200                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4201                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4202                     gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4203                     gen_store_gpr(dc, rd, cpu_dst);
4204                     break;
4205                 case 0x024: /* VIS I fcmple32 */
4206                     CHECK_FPU_FEATURE(dc, VIS1);
4207                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4208                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4209                     gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4210                     gen_store_gpr(dc, rd, cpu_dst);
4211                     break;
4212                 case 0x026: /* VIS I fcmpne32 */
4213                     CHECK_FPU_FEATURE(dc, VIS1);
4214                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4215                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4216                     gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4217                     gen_store_gpr(dc, rd, cpu_dst);
4218                     break;
4219                 case 0x028: /* VIS I fcmpgt16 */
4220                     CHECK_FPU_FEATURE(dc, VIS1);
4221                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4222                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4223                     gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4224                     gen_store_gpr(dc, rd, cpu_dst);
4225                     break;
4226                 case 0x02a: /* VIS I fcmpeq16 */
4227                     CHECK_FPU_FEATURE(dc, VIS1);
4228                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4229                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4230                     gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4231                     gen_store_gpr(dc, rd, cpu_dst);
4232                     break;
4233                 case 0x02c: /* VIS I fcmpgt32 */
4234                     CHECK_FPU_FEATURE(dc, VIS1);
4235                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4236                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4237                     gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4238                     gen_store_gpr(dc, rd, cpu_dst);
4239                     break;
4240                 case 0x02e: /* VIS I fcmpeq32 */
4241                     CHECK_FPU_FEATURE(dc, VIS1);
4242                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4243                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4244                     gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4245                     gen_store_gpr(dc, rd, cpu_dst);
4246                     break;
4247                 case 0x031: /* VIS I fmul8x16 */
4248                     CHECK_FPU_FEATURE(dc, VIS1);
4249                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4250                     break;
4251                 case 0x033: /* VIS I fmul8x16au */
4252                     CHECK_FPU_FEATURE(dc, VIS1);
4253                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4254                     break;
4255                 case 0x035: /* VIS I fmul8x16al */
4256                     CHECK_FPU_FEATURE(dc, VIS1);
4257                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4258                     break;
4259                 case 0x036: /* VIS I fmul8sux16 */
4260                     CHECK_FPU_FEATURE(dc, VIS1);
4261                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4262                     break;
4263                 case 0x037: /* VIS I fmul8ulx16 */
4264                     CHECK_FPU_FEATURE(dc, VIS1);
4265                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4266                     break;
4267                 case 0x038: /* VIS I fmuld8sux16 */
4268                     CHECK_FPU_FEATURE(dc, VIS1);
4269                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4270                     break;
4271                 case 0x039: /* VIS I fmuld8ulx16 */
4272                     CHECK_FPU_FEATURE(dc, VIS1);
4273                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4274                     break;
4275                 case 0x03a: /* VIS I fpack32 */
4276                     CHECK_FPU_FEATURE(dc, VIS1);
4277                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4278                     break;
4279                 case 0x03b: /* VIS I fpack16 */
4280                     CHECK_FPU_FEATURE(dc, VIS1);
4281                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4282                     cpu_dst_32 = gen_dest_fpr_F(dc);
4283                     gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4284                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4285                     break;
4286                 case 0x03d: /* VIS I fpackfix */
4287                     CHECK_FPU_FEATURE(dc, VIS1);
4288                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4289                     cpu_dst_32 = gen_dest_fpr_F(dc);
4290                     gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4291                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4292                     break;
4293                 case 0x03e: /* VIS I pdist */
4294                     CHECK_FPU_FEATURE(dc, VIS1);
4295                     gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4296                     break;
4297                 case 0x048: /* VIS I faligndata */
4298                     CHECK_FPU_FEATURE(dc, VIS1);
4299                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4300                     break;
4301                 case 0x04b: /* VIS I fpmerge */
4302                     CHECK_FPU_FEATURE(dc, VIS1);
4303                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4304                     break;
4305                 case 0x04c: /* VIS II bshuffle */
4306                     CHECK_FPU_FEATURE(dc, VIS2);
4307                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4308                     break;
4309                 case 0x04d: /* VIS I fexpand */
4310                     CHECK_FPU_FEATURE(dc, VIS1);
4311                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4312                     break;
4313                 case 0x050: /* VIS I fpadd16 */
4314                     CHECK_FPU_FEATURE(dc, VIS1);
4315                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4316                     break;
4317                 case 0x051: /* VIS I fpadd16s */
4318                     CHECK_FPU_FEATURE(dc, VIS1);
4319                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4320                     break;
4321                 case 0x052: /* VIS I fpadd32 */
4322                     CHECK_FPU_FEATURE(dc, VIS1);
4323                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4324                     break;
4325                 case 0x053: /* VIS I fpadd32s */
4326                     CHECK_FPU_FEATURE(dc, VIS1);
4327                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4328                     break;
4329                 case 0x054: /* VIS I fpsub16 */
4330                     CHECK_FPU_FEATURE(dc, VIS1);
4331                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4332                     break;
4333                 case 0x055: /* VIS I fpsub16s */
4334                     CHECK_FPU_FEATURE(dc, VIS1);
4335                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4336                     break;
4337                 case 0x056: /* VIS I fpsub32 */
4338                     CHECK_FPU_FEATURE(dc, VIS1);
4339                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4340                     break;
4341                 case 0x057: /* VIS I fpsub32s */
4342                     CHECK_FPU_FEATURE(dc, VIS1);
4343                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4344                     break;
4345                 case 0x060: /* VIS I fzero */
4346                     CHECK_FPU_FEATURE(dc, VIS1);
4347                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4348                     tcg_gen_movi_i64(cpu_dst_64, 0);
4349                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4350                     break;
4351                 case 0x061: /* VIS I fzeros */
4352                     CHECK_FPU_FEATURE(dc, VIS1);
4353                     cpu_dst_32 = gen_dest_fpr_F(dc);
4354                     tcg_gen_movi_i32(cpu_dst_32, 0);
4355                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4356                     break;
4357                 case 0x062: /* VIS I fnor */
4358                     CHECK_FPU_FEATURE(dc, VIS1);
4359                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4360                     break;
4361                 case 0x063: /* VIS I fnors */
4362                     CHECK_FPU_FEATURE(dc, VIS1);
4363                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4364                     break;
4365                 case 0x064: /* VIS I fandnot2 */
4366                     CHECK_FPU_FEATURE(dc, VIS1);
4367                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4368                     break;
4369                 case 0x065: /* VIS I fandnot2s */
4370                     CHECK_FPU_FEATURE(dc, VIS1);
4371                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4372                     break;
4373                 case 0x066: /* VIS I fnot2 */
4374                     CHECK_FPU_FEATURE(dc, VIS1);
4375                     gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4376                     break;
4377                 case 0x067: /* VIS I fnot2s */
4378                     CHECK_FPU_FEATURE(dc, VIS1);
4379                     gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4380                     break;
4381                 case 0x068: /* VIS I fandnot1 */
4382                     CHECK_FPU_FEATURE(dc, VIS1);
4383                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4384                     break;
4385                 case 0x069: /* VIS I fandnot1s */
4386                     CHECK_FPU_FEATURE(dc, VIS1);
4387                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4388                     break;
4389                 case 0x06a: /* VIS I fnot1 */
4390                     CHECK_FPU_FEATURE(dc, VIS1);
4391                     gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4392                     break;
4393                 case 0x06b: /* VIS I fnot1s */
4394                     CHECK_FPU_FEATURE(dc, VIS1);
4395                     gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4396                     break;
4397                 case 0x06c: /* VIS I fxor */
4398                     CHECK_FPU_FEATURE(dc, VIS1);
4399                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4400                     break;
4401                 case 0x06d: /* VIS I fxors */
4402                     CHECK_FPU_FEATURE(dc, VIS1);
4403                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4404                     break;
4405                 case 0x06e: /* VIS I fnand */
4406                     CHECK_FPU_FEATURE(dc, VIS1);
4407                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4408                     break;
4409                 case 0x06f: /* VIS I fnands */
4410                     CHECK_FPU_FEATURE(dc, VIS1);
4411                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4412                     break;
4413                 case 0x070: /* VIS I fand */
4414                     CHECK_FPU_FEATURE(dc, VIS1);
4415                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4416                     break;
4417                 case 0x071: /* VIS I fands */
4418                     CHECK_FPU_FEATURE(dc, VIS1);
4419                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4420                     break;
4421                 case 0x072: /* VIS I fxnor */
4422                     CHECK_FPU_FEATURE(dc, VIS1);
4423                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4424                     break;
4425                 case 0x073: /* VIS I fxnors */
4426                     CHECK_FPU_FEATURE(dc, VIS1);
4427                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4428                     break;
4429                 case 0x074: /* VIS I fsrc1 */
4430                     CHECK_FPU_FEATURE(dc, VIS1);
4431                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4432                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4433                     break;
4434                 case 0x075: /* VIS I fsrc1s */
4435                     CHECK_FPU_FEATURE(dc, VIS1);
4436                     cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4437                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4438                     break;
4439                 case 0x076: /* VIS I fornot2 */
4440                     CHECK_FPU_FEATURE(dc, VIS1);
4441                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4442                     break;
4443                 case 0x077: /* VIS I fornot2s */
4444                     CHECK_FPU_FEATURE(dc, VIS1);
4445                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4446                     break;
4447                 case 0x078: /* VIS I fsrc2 */
4448                     CHECK_FPU_FEATURE(dc, VIS1);
4449                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4450                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4451                     break;
4452                 case 0x079: /* VIS I fsrc2s */
4453                     CHECK_FPU_FEATURE(dc, VIS1);
4454                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4455                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4456                     break;
4457                 case 0x07a: /* VIS I fornot1 */
4458                     CHECK_FPU_FEATURE(dc, VIS1);
4459                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4460                     break;
4461                 case 0x07b: /* VIS I fornot1s */
4462                     CHECK_FPU_FEATURE(dc, VIS1);
4463                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
4464                     break;
4465                 case 0x07c: /* VIS I for */
4466                     CHECK_FPU_FEATURE(dc, VIS1);
4467                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
4468                     break;
4469                 case 0x07d: /* VIS I fors */
4470                     CHECK_FPU_FEATURE(dc, VIS1);
4471                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
4472                     break;
4473                 case 0x07e: /* VIS I fone */
4474                     CHECK_FPU_FEATURE(dc, VIS1);
4475                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4476                     tcg_gen_movi_i64(cpu_dst_64, -1);
4477                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4478                     break;
4479                 case 0x07f: /* VIS I fones */
4480                     CHECK_FPU_FEATURE(dc, VIS1);
4481                     cpu_dst_32 = gen_dest_fpr_F(dc);
4482                     tcg_gen_movi_i32(cpu_dst_32, -1);
4483                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4484                     break;
4485                 case 0x080: /* VIS I shutdown */
4486                 case 0x081: /* VIS II siam */
4487                     // XXX
4488                     goto illegal_insn;
4489                 default:
4490                     goto illegal_insn;
4491                 }
4492 #else
4493                 goto ncp_insn;
4494 #endif
4495             } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4496 #ifdef TARGET_SPARC64
4497                 goto illegal_insn;
4498 #else
4499                 goto ncp_insn;
4500 #endif
4501 #ifdef TARGET_SPARC64
4502             } else if (xop == 0x39) { /* V9 return */
4503                 TCGv_i32 r_const;
4504
4505                 save_state(dc);
4506                 cpu_src1 = get_src1(dc, insn);
4507                 cpu_tmp0 = get_temp_tl(dc);
4508                 if (IS_IMM) {   /* immediate */
4509                     simm = GET_FIELDs(insn, 19, 31);
4510                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
4511                 } else {                /* register */
4512                     rs2 = GET_FIELD(insn, 27, 31);
4513                     if (rs2) {
4514                         cpu_src2 = gen_load_gpr(dc, rs2);
4515                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
4516                     } else {
4517                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
4518                     }
4519                 }
4520                 gen_helper_restore(cpu_env);
4521                 gen_mov_pc_npc(dc);
4522                 r_const = tcg_const_i32(3);
4523                 gen_helper_check_align(cpu_env, cpu_tmp0, r_const);
4524                 tcg_temp_free_i32(r_const);
4525                 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
4526                 dc->npc = DYNAMIC_PC;
4527                 goto jmp_insn;
4528 #endif
4529             } else {
4530                 cpu_src1 = get_src1(dc, insn);
4531                 cpu_tmp0 = get_temp_tl(dc);
4532                 if (IS_IMM) {   /* immediate */
4533                     simm = GET_FIELDs(insn, 19, 31);
4534                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
4535                 } else {                /* register */
4536                     rs2 = GET_FIELD(insn, 27, 31);
4537                     if (rs2) {
4538                         cpu_src2 = gen_load_gpr(dc, rs2);
4539                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
4540                     } else {
4541                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
4542                     }
4543                 }
4544                 switch (xop) {
4545                 case 0x38:      /* jmpl */
4546                     {
4547                         TCGv t;
4548                         TCGv_i32 r_const;
4549
4550                         t = gen_dest_gpr(dc, rd);
4551                         tcg_gen_movi_tl(t, dc->pc);
4552                         gen_store_gpr(dc, rd, t);
4553                         gen_mov_pc_npc(dc);
4554                         r_const = tcg_const_i32(3);
4555                         gen_helper_check_align(cpu_env, cpu_tmp0, r_const);
4556                         tcg_temp_free_i32(r_const);
4557                         gen_address_mask(dc, cpu_tmp0);
4558                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
4559                         dc->npc = DYNAMIC_PC;
4560                     }
4561                     goto jmp_insn;
4562 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4563                 case 0x39:      /* rett, V9 return */
4564                     {
4565                         TCGv_i32 r_const;
4566
4567                         if (!supervisor(dc))
4568                             goto priv_insn;
4569                         gen_mov_pc_npc(dc);
4570                         r_const = tcg_const_i32(3);
4571                         gen_helper_check_align(cpu_env, cpu_tmp0, r_const);
4572                         tcg_temp_free_i32(r_const);
4573                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
4574                         dc->npc = DYNAMIC_PC;
4575                         gen_helper_rett(cpu_env);
4576                     }
4577                     goto jmp_insn;
4578 #endif
4579                 case 0x3b: /* flush */
4580                     if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4581                         goto unimp_flush;
4582                     /* nop */
4583                     break;
4584                 case 0x3c:      /* save */
4585                     save_state(dc);
4586                     gen_helper_save(cpu_env);
4587                     gen_store_gpr(dc, rd, cpu_tmp0);
4588                     break;
4589                 case 0x3d:      /* restore */
4590                     save_state(dc);
4591                     gen_helper_restore(cpu_env);
4592                     gen_store_gpr(dc, rd, cpu_tmp0);
4593                     break;
4594 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4595                 case 0x3e:      /* V9 done/retry */
4596                     {
4597                         switch (rd) {
4598                         case 0:
4599                             if (!supervisor(dc))
4600                                 goto priv_insn;
4601                             dc->npc = DYNAMIC_PC;
4602                             dc->pc = DYNAMIC_PC;
4603                             gen_helper_done(cpu_env);
4604                             goto jmp_insn;
4605                         case 1:
4606                             if (!supervisor(dc))
4607                                 goto priv_insn;
4608                             dc->npc = DYNAMIC_PC;
4609                             dc->pc = DYNAMIC_PC;
4610                             gen_helper_retry(cpu_env);
4611                             goto jmp_insn;
4612                         default:
4613                             goto illegal_insn;
4614                         }
4615                     }
4616                     break;
4617 #endif
4618                 default:
4619                     goto illegal_insn;
4620                 }
4621             }
4622             break;
4623         }
4624         break;
4625     case 3:                     /* load/store instructions */
4626         {
4627             unsigned int xop = GET_FIELD(insn, 7, 12);
4628             /* ??? gen_address_mask prevents us from using a source
4629                register directly.  Always generate a temporary.  */
4630             TCGv cpu_addr = get_temp_tl(dc);
4631
4632             tcg_gen_mov_tl(cpu_addr, get_src1(dc, insn));
4633             if (xop == 0x3c || xop == 0x3e) {
4634                 /* V9 casa/casxa : no offset */
4635             } else if (IS_IMM) {     /* immediate */
4636                 simm = GET_FIELDs(insn, 19, 31);
4637                 if (simm != 0) {
4638                     tcg_gen_addi_tl(cpu_addr, cpu_addr, simm);
4639                 }
4640             } else {            /* register */
4641                 rs2 = GET_FIELD(insn, 27, 31);
4642                 if (rs2 != 0) {
4643                     tcg_gen_add_tl(cpu_addr, cpu_addr, gen_load_gpr(dc, rs2));
4644                 }
4645             }
4646             if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4647                 (xop > 0x17 && xop <= 0x1d ) ||
4648                 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4649                 TCGv cpu_val = gen_dest_gpr(dc, rd);
4650
4651                 switch (xop) {
4652                 case 0x0:       /* ld, V9 lduw, load unsigned word */
4653                     gen_address_mask(dc, cpu_addr);
4654                     tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4655                     break;
4656                 case 0x1:       /* ldub, load unsigned byte */
4657                     gen_address_mask(dc, cpu_addr);
4658                     tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4659                     break;
4660                 case 0x2:       /* lduh, load unsigned halfword */
4661                     gen_address_mask(dc, cpu_addr);
4662                     tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4663                     break;
4664                 case 0x3:       /* ldd, load double word */
4665                     if (rd & 1)
4666                         goto illegal_insn;
4667                     else {
4668                         TCGv_i32 r_const;
4669                         TCGv_i64 t64;
4670
4671                         save_state(dc);
4672                         r_const = tcg_const_i32(7);
4673                         /* XXX remove alignment check */
4674                         gen_helper_check_align(cpu_env, cpu_addr, r_const);
4675                         tcg_temp_free_i32(r_const);
4676                         gen_address_mask(dc, cpu_addr);
4677                         t64 = tcg_temp_new_i64();
4678                         tcg_gen_qemu_ld64(t64, cpu_addr, dc->mem_idx);
4679                         tcg_gen_trunc_i64_tl(cpu_val, t64);
4680                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
4681                         gen_store_gpr(dc, rd + 1, cpu_val);
4682                         tcg_gen_shri_i64(t64, t64, 32);
4683                         tcg_gen_trunc_i64_tl(cpu_val, t64);
4684                         tcg_temp_free_i64(t64);
4685                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
4686                     }
4687                     break;
4688                 case 0x9:       /* ldsb, load signed byte */
4689                     gen_address_mask(dc, cpu_addr);
4690                     tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4691                     break;
4692                 case 0xa:       /* ldsh, load signed halfword */
4693                     gen_address_mask(dc, cpu_addr);
4694                     tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4695                     break;
4696                 case 0xd:       /* ldstub -- XXX: should be atomically */
4697                     {
4698                         TCGv r_const;
4699
4700                         gen_address_mask(dc, cpu_addr);
4701                         tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4702                         r_const = tcg_const_tl(0xff);
4703                         tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4704                         tcg_temp_free(r_const);
4705                     }
4706                     break;
4707                 case 0x0f:
4708                     /* swap, swap register with memory. Also atomically */
4709                     {
4710                         TCGv t0 = get_temp_tl(dc);
4711                         CHECK_IU_FEATURE(dc, SWAP);
4712                         cpu_src1 = gen_load_gpr(dc, rd);
4713                         gen_address_mask(dc, cpu_addr);
4714                         tcg_gen_qemu_ld32u(t0, cpu_addr, dc->mem_idx);
4715                         tcg_gen_qemu_st32(cpu_src1, cpu_addr, dc->mem_idx);
4716                         tcg_gen_mov_tl(cpu_val, t0);
4717                     }
4718                     break;
4719 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4720                 case 0x10:      /* lda, V9 lduwa, load word alternate */
4721 #ifndef TARGET_SPARC64
4722                     if (IS_IMM)
4723                         goto illegal_insn;
4724                     if (!supervisor(dc))
4725                         goto priv_insn;
4726 #endif
4727                     save_state(dc);
4728                     gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4729                     break;
4730                 case 0x11:      /* lduba, load unsigned byte alternate */
4731 #ifndef TARGET_SPARC64
4732                     if (IS_IMM)
4733                         goto illegal_insn;
4734                     if (!supervisor(dc))
4735                         goto priv_insn;
4736 #endif
4737                     save_state(dc);
4738                     gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4739                     break;
4740                 case 0x12:      /* lduha, load unsigned halfword alternate */
4741 #ifndef TARGET_SPARC64
4742                     if (IS_IMM)
4743                         goto illegal_insn;
4744                     if (!supervisor(dc))
4745                         goto priv_insn;
4746 #endif
4747                     save_state(dc);
4748                     gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4749                     break;
4750                 case 0x13:      /* ldda, load double word alternate */
4751 #ifndef TARGET_SPARC64
4752                     if (IS_IMM)
4753                         goto illegal_insn;
4754                     if (!supervisor(dc))
4755                         goto priv_insn;
4756 #endif
4757                     if (rd & 1)
4758                         goto illegal_insn;
4759                     save_state(dc);
4760                     gen_ldda_asi(dc, cpu_val, cpu_addr, insn, rd);
4761                     goto skip_move;
4762                 case 0x19:      /* ldsba, load signed byte alternate */
4763 #ifndef TARGET_SPARC64
4764                     if (IS_IMM)
4765                         goto illegal_insn;
4766                     if (!supervisor(dc))
4767                         goto priv_insn;
4768 #endif
4769                     save_state(dc);
4770                     gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4771                     break;
4772                 case 0x1a:      /* ldsha, load signed halfword alternate */
4773 #ifndef TARGET_SPARC64
4774                     if (IS_IMM)
4775                         goto illegal_insn;
4776                     if (!supervisor(dc))
4777                         goto priv_insn;
4778 #endif
4779                     save_state(dc);
4780                     gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4781                     break;
4782                 case 0x1d:      /* ldstuba -- XXX: should be atomically */
4783 #ifndef TARGET_SPARC64
4784                     if (IS_IMM)
4785                         goto illegal_insn;
4786                     if (!supervisor(dc))
4787                         goto priv_insn;
4788 #endif
4789                     save_state(dc);
4790                     gen_ldstub_asi(cpu_val, cpu_addr, insn);
4791                     break;
4792                 case 0x1f:      /* swapa, swap reg with alt. memory. Also
4793                                    atomically */
4794                     CHECK_IU_FEATURE(dc, SWAP);
4795 #ifndef TARGET_SPARC64
4796                     if (IS_IMM)
4797                         goto illegal_insn;
4798                     if (!supervisor(dc))
4799                         goto priv_insn;
4800 #endif
4801                     save_state(dc);
4802                     cpu_src1 = gen_load_gpr(dc, rd);
4803                     gen_swap_asi(cpu_val, cpu_src1, cpu_addr, insn);
4804                     break;
4805
4806 #ifndef TARGET_SPARC64
4807                 case 0x30: /* ldc */
4808                 case 0x31: /* ldcsr */
4809                 case 0x33: /* lddc */
4810                     goto ncp_insn;
4811 #endif
4812 #endif
4813 #ifdef TARGET_SPARC64
4814                 case 0x08: /* V9 ldsw */
4815                     gen_address_mask(dc, cpu_addr);
4816                     tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4817                     break;
4818                 case 0x0b: /* V9 ldx */
4819                     gen_address_mask(dc, cpu_addr);
4820                     tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4821                     break;
4822                 case 0x18: /* V9 ldswa */
4823                     save_state(dc);
4824                     gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4825                     break;
4826                 case 0x1b: /* V9 ldxa */
4827                     save_state(dc);
4828                     gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4829                     break;
4830                 case 0x2d: /* V9 prefetch, no effect */
4831                     goto skip_move;
4832                 case 0x30: /* V9 ldfa */
4833                     if (gen_trap_ifnofpu(dc)) {
4834                         goto jmp_insn;
4835                     }
4836                     save_state(dc);
4837                     gen_ldf_asi(cpu_addr, insn, 4, rd);
4838                     gen_update_fprs_dirty(rd);
4839                     goto skip_move;
4840                 case 0x33: /* V9 lddfa */
4841                     if (gen_trap_ifnofpu(dc)) {
4842                         goto jmp_insn;
4843                     }
4844                     save_state(dc);
4845                     gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4846                     gen_update_fprs_dirty(DFPREG(rd));
4847                     goto skip_move;
4848                 case 0x3d: /* V9 prefetcha, no effect */
4849                     goto skip_move;
4850                 case 0x32: /* V9 ldqfa */
4851                     CHECK_FPU_FEATURE(dc, FLOAT128);
4852                     if (gen_trap_ifnofpu(dc)) {
4853                         goto jmp_insn;
4854                     }
4855                     save_state(dc);
4856                     gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4857                     gen_update_fprs_dirty(QFPREG(rd));
4858                     goto skip_move;
4859 #endif
4860                 default:
4861                     goto illegal_insn;
4862                 }
4863                 gen_store_gpr(dc, rd, cpu_val);
4864 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4865             skip_move: ;
4866 #endif
4867             } else if (xop >= 0x20 && xop < 0x24) {
4868                 TCGv t0;
4869
4870                 if (gen_trap_ifnofpu(dc)) {
4871                     goto jmp_insn;
4872                 }
4873                 save_state(dc);
4874                 switch (xop) {
4875                 case 0x20:      /* ldf, load fpreg */
4876                     gen_address_mask(dc, cpu_addr);
4877                     t0 = get_temp_tl(dc);
4878                     tcg_gen_qemu_ld32u(t0, cpu_addr, dc->mem_idx);
4879                     cpu_dst_32 = gen_dest_fpr_F(dc);
4880                     tcg_gen_trunc_tl_i32(cpu_dst_32, t0);
4881                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4882                     break;
4883                 case 0x21:      /* ldfsr, V9 ldxfsr */
4884 #ifdef TARGET_SPARC64
4885                     gen_address_mask(dc, cpu_addr);
4886                     if (rd == 1) {
4887                         TCGv_i64 t64 = tcg_temp_new_i64();
4888                         tcg_gen_qemu_ld64(t64, cpu_addr, dc->mem_idx);
4889                         gen_helper_ldxfsr(cpu_env, t64);
4890                         tcg_temp_free_i64(t64);
4891                         break;
4892                     }
4893 #endif
4894                     cpu_dst_32 = get_temp_i32(dc);
4895                     t0 = get_temp_tl(dc);
4896                     tcg_gen_qemu_ld32u(t0, cpu_addr, dc->mem_idx);
4897                     tcg_gen_trunc_tl_i32(cpu_dst_32, t0);
4898                     gen_helper_ldfsr(cpu_env, cpu_dst_32);
4899                     break;
4900                 case 0x22:      /* ldqf, load quad fpreg */
4901                     {
4902                         TCGv_i32 r_const;
4903
4904                         CHECK_FPU_FEATURE(dc, FLOAT128);
4905                         r_const = tcg_const_i32(dc->mem_idx);
4906                         gen_address_mask(dc, cpu_addr);
4907                         gen_helper_ldqf(cpu_env, cpu_addr, r_const);
4908                         tcg_temp_free_i32(r_const);
4909                         gen_op_store_QT0_fpr(QFPREG(rd));
4910                         gen_update_fprs_dirty(QFPREG(rd));
4911                     }
4912                     break;
4913                 case 0x23:      /* lddf, load double fpreg */
4914                     gen_address_mask(dc, cpu_addr);
4915                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4916                     tcg_gen_qemu_ld64(cpu_dst_64, cpu_addr, dc->mem_idx);
4917                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4918                     break;
4919                 default:
4920                     goto illegal_insn;
4921                 }
4922             } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4923                        xop == 0xe || xop == 0x1e) {
4924                 TCGv cpu_val = gen_load_gpr(dc, rd);
4925
4926                 switch (xop) {
4927                 case 0x4: /* st, store word */
4928                     gen_address_mask(dc, cpu_addr);
4929                     tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4930                     break;
4931                 case 0x5: /* stb, store byte */
4932                     gen_address_mask(dc, cpu_addr);
4933                     tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4934                     break;
4935                 case 0x6: /* sth, store halfword */
4936                     gen_address_mask(dc, cpu_addr);
4937                     tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4938                     break;
4939                 case 0x7: /* std, store double word */
4940                     if (rd & 1)
4941                         goto illegal_insn;
4942                     else {
4943                         TCGv_i32 r_const;
4944                         TCGv_i64 t64;
4945                         TCGv lo;
4946
4947                         save_state(dc);
4948                         gen_address_mask(dc, cpu_addr);
4949                         r_const = tcg_const_i32(7);
4950                         /* XXX remove alignment check */
4951                         gen_helper_check_align(cpu_env, cpu_addr, r_const);
4952                         tcg_temp_free_i32(r_const);
4953                         lo = gen_load_gpr(dc, rd + 1);
4954
4955                         t64 = tcg_temp_new_i64();
4956                         tcg_gen_concat_tl_i64(t64, lo, cpu_val);
4957                         tcg_gen_qemu_st64(t64, cpu_addr, dc->mem_idx);
4958                         tcg_temp_free_i64(t64);
4959                     }
4960                     break;
4961 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4962                 case 0x14: /* sta, V9 stwa, store word alternate */
4963 #ifndef TARGET_SPARC64
4964                     if (IS_IMM)
4965                         goto illegal_insn;
4966                     if (!supervisor(dc))
4967                         goto priv_insn;
4968 #endif
4969                     save_state(dc);
4970                     gen_st_asi(cpu_val, cpu_addr, insn, 4);
4971                     dc->npc = DYNAMIC_PC;
4972                     break;
4973                 case 0x15: /* stba, store byte alternate */
4974 #ifndef TARGET_SPARC64
4975                     if (IS_IMM)
4976                         goto illegal_insn;
4977                     if (!supervisor(dc))
4978                         goto priv_insn;
4979 #endif
4980                     save_state(dc);
4981                     gen_st_asi(cpu_val, cpu_addr, insn, 1);
4982                     dc->npc = DYNAMIC_PC;
4983                     break;
4984                 case 0x16: /* stha, store halfword alternate */
4985 #ifndef TARGET_SPARC64
4986                     if (IS_IMM)
4987                         goto illegal_insn;
4988                     if (!supervisor(dc))
4989                         goto priv_insn;
4990 #endif
4991                     save_state(dc);
4992                     gen_st_asi(cpu_val, cpu_addr, insn, 2);
4993                     dc->npc = DYNAMIC_PC;
4994                     break;
4995                 case 0x17: /* stda, store double word alternate */
4996 #ifndef TARGET_SPARC64
4997                     if (IS_IMM)
4998                         goto illegal_insn;
4999                     if (!supervisor(dc))
5000                         goto priv_insn;
5001 #endif
5002                     if (rd & 1)
5003                         goto illegal_insn;
5004                     else {
5005                         save_state(dc);
5006                         gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
5007                     }
5008                     break;
5009 #endif
5010 #ifdef TARGET_SPARC64
5011                 case 0x0e: /* V9 stx */
5012                     gen_address_mask(dc, cpu_addr);
5013                     tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
5014                     break;
5015                 case 0x1e: /* V9 stxa */
5016                     save_state(dc);
5017                     gen_st_asi(cpu_val, cpu_addr, insn, 8);
5018                     dc->npc = DYNAMIC_PC;
5019                     break;
5020 #endif
5021                 default:
5022                     goto illegal_insn;
5023                 }
5024             } else if (xop > 0x23 && xop < 0x28) {
5025                 if (gen_trap_ifnofpu(dc)) {
5026                     goto jmp_insn;
5027                 }
5028                 save_state(dc);
5029                 switch (xop) {
5030                 case 0x24: /* stf, store fpreg */
5031                     {
5032                         TCGv t = get_temp_tl(dc);
5033                         gen_address_mask(dc, cpu_addr);
5034                         cpu_src1_32 = gen_load_fpr_F(dc, rd);
5035                         tcg_gen_ext_i32_tl(t, cpu_src1_32);
5036                         tcg_gen_qemu_st32(t, cpu_addr, dc->mem_idx);
5037                     }
5038                     break;
5039                 case 0x25: /* stfsr, V9 stxfsr */
5040                     {
5041                         TCGv t = get_temp_tl(dc);
5042
5043                         tcg_gen_ld_tl(t, cpu_env, offsetof(CPUSPARCState, fsr));
5044 #ifdef TARGET_SPARC64
5045                         gen_address_mask(dc, cpu_addr);
5046                         if (rd == 1) {
5047                             tcg_gen_qemu_st64(t, cpu_addr, dc->mem_idx);
5048                             break;
5049                         }
5050 #endif
5051                         tcg_gen_qemu_st32(t, cpu_addr, dc->mem_idx);
5052                     }
5053                     break;
5054                 case 0x26:
5055 #ifdef TARGET_SPARC64
5056                     /* V9 stqf, store quad fpreg */
5057                     {
5058                         TCGv_i32 r_const;
5059
5060                         CHECK_FPU_FEATURE(dc, FLOAT128);
5061                         gen_op_load_fpr_QT0(QFPREG(rd));
5062                         r_const = tcg_const_i32(dc->mem_idx);
5063                         gen_address_mask(dc, cpu_addr);
5064                         gen_helper_stqf(cpu_env, cpu_addr, r_const);
5065                         tcg_temp_free_i32(r_const);
5066                     }
5067                     break;
5068 #else /* !TARGET_SPARC64 */
5069                     /* stdfq, store floating point queue */
5070 #if defined(CONFIG_USER_ONLY)
5071                     goto illegal_insn;
5072 #else
5073                     if (!supervisor(dc))
5074                         goto priv_insn;
5075                     if (gen_trap_ifnofpu(dc)) {
5076                         goto jmp_insn;
5077                     }
5078                     goto nfq_insn;
5079 #endif
5080 #endif
5081                 case 0x27: /* stdf, store double fpreg */
5082                     gen_address_mask(dc, cpu_addr);
5083                     cpu_src1_64 = gen_load_fpr_D(dc, rd);
5084                     tcg_gen_qemu_st64(cpu_src1_64, cpu_addr, dc->mem_idx);
5085                     break;
5086                 default:
5087                     goto illegal_insn;
5088                 }
5089             } else if (xop > 0x33 && xop < 0x3f) {
5090                 save_state(dc);
5091                 switch (xop) {
5092 #ifdef TARGET_SPARC64
5093                 case 0x34: /* V9 stfa */
5094                     if (gen_trap_ifnofpu(dc)) {
5095                         goto jmp_insn;
5096                     }
5097                     gen_stf_asi(cpu_addr, insn, 4, rd);
5098                     break;
5099                 case 0x36: /* V9 stqfa */
5100                     {
5101                         TCGv_i32 r_const;
5102
5103                         CHECK_FPU_FEATURE(dc, FLOAT128);
5104                         if (gen_trap_ifnofpu(dc)) {
5105                             goto jmp_insn;
5106                         }
5107                         r_const = tcg_const_i32(7);
5108                         gen_helper_check_align(cpu_env, cpu_addr, r_const);
5109                         tcg_temp_free_i32(r_const);
5110                         gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
5111                     }
5112                     break;
5113                 case 0x37: /* V9 stdfa */
5114                     if (gen_trap_ifnofpu(dc)) {
5115                         goto jmp_insn;
5116                     }
5117                     gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
5118                     break;
5119                 case 0x3c: /* V9 casa */
5120                     rs2 = GET_FIELD(insn, 27, 31);
5121                     cpu_src2 = gen_load_gpr(dc, rs2);
5122                     gen_cas_asi(dc, cpu_addr, cpu_src2, insn, rd);
5123                     break;
5124                 case 0x3e: /* V9 casxa */
5125                     rs2 = GET_FIELD(insn, 27, 31);
5126                     cpu_src2 = gen_load_gpr(dc, rs2);
5127                     gen_casx_asi(dc, cpu_addr, cpu_src2, insn, rd);
5128                     break;
5129 #else
5130                 case 0x34: /* stc */
5131                 case 0x35: /* stcsr */
5132                 case 0x36: /* stdcq */
5133                 case 0x37: /* stdc */
5134                     goto ncp_insn;
5135 #endif
5136                 default:
5137                     goto illegal_insn;
5138                 }
5139             } else {
5140                 goto illegal_insn;
5141             }
5142         }
5143         break;
5144     }
5145     /* default case for non jump instructions */
5146     if (dc->npc == DYNAMIC_PC) {
5147         dc->pc = DYNAMIC_PC;
5148         gen_op_next_insn();
5149     } else if (dc->npc == JUMP_PC) {
5150         /* we can do a static jump */
5151         gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5152         dc->is_br = 1;
5153     } else {
5154         dc->pc = dc->npc;
5155         dc->npc = dc->npc + 4;
5156     }
5157  jmp_insn:
5158     goto egress;
5159  illegal_insn:
5160     {
5161         TCGv_i32 r_const;
5162
5163         save_state(dc);
5164         r_const = tcg_const_i32(TT_ILL_INSN);
5165         gen_helper_raise_exception(cpu_env, r_const);
5166         tcg_temp_free_i32(r_const);
5167         dc->is_br = 1;
5168     }
5169     goto egress;
5170  unimp_flush:
5171     {
5172         TCGv_i32 r_const;
5173
5174         save_state(dc);
5175         r_const = tcg_const_i32(TT_UNIMP_FLUSH);
5176         gen_helper_raise_exception(cpu_env, r_const);
5177         tcg_temp_free_i32(r_const);
5178         dc->is_br = 1;
5179     }
5180     goto egress;
5181 #if !defined(CONFIG_USER_ONLY)
5182  priv_insn:
5183     {
5184         TCGv_i32 r_const;
5185
5186         save_state(dc);
5187         r_const = tcg_const_i32(TT_PRIV_INSN);
5188         gen_helper_raise_exception(cpu_env, r_const);
5189         tcg_temp_free_i32(r_const);
5190         dc->is_br = 1;
5191     }
5192     goto egress;
5193 #endif
5194  nfpu_insn:
5195     save_state(dc);
5196     gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
5197     dc->is_br = 1;
5198     goto egress;
5199 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5200  nfq_insn:
5201     save_state(dc);
5202     gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
5203     dc->is_br = 1;
5204     goto egress;
5205 #endif
5206 #ifndef TARGET_SPARC64
5207  ncp_insn:
5208     {
5209         TCGv r_const;
5210
5211         save_state(dc);
5212         r_const = tcg_const_i32(TT_NCP_INSN);
5213         gen_helper_raise_exception(cpu_env, r_const);
5214         tcg_temp_free(r_const);
5215         dc->is_br = 1;
5216     }
5217     goto egress;
5218 #endif
5219  egress:
5220     if (dc->n_t32 != 0) {
5221         int i;
5222         for (i = dc->n_t32 - 1; i >= 0; --i) {
5223             tcg_temp_free_i32(dc->t32[i]);
5224         }
5225         dc->n_t32 = 0;
5226     }
5227     if (dc->n_ttl != 0) {
5228         int i;
5229         for (i = dc->n_ttl - 1; i >= 0; --i) {
5230             tcg_temp_free(dc->ttl[i]);
5231         }
5232         dc->n_ttl = 0;
5233     }
5234 }
5235
5236 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
5237                                                   int spc, CPUSPARCState *env)
5238 {
5239     target_ulong pc_start, last_pc;
5240     uint16_t *gen_opc_end;
5241     DisasContext dc1, *dc = &dc1;
5242     CPUBreakpoint *bp;
5243     int j, lj = -1;
5244     int num_insns;
5245     int max_insns;
5246     unsigned int insn;
5247
5248     memset(dc, 0, sizeof(DisasContext));
5249     dc->tb = tb;
5250     pc_start = tb->pc;
5251     dc->pc = pc_start;
5252     last_pc = dc->pc;
5253     dc->npc = (target_ulong) tb->cs_base;
5254     dc->cc_op = CC_OP_DYNAMIC;
5255     dc->mem_idx = cpu_mmu_index(env);
5256     dc->def = env->def;
5257     dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5258     dc->address_mask_32bit = tb_am_enabled(tb->flags);
5259     dc->singlestep = (env->singlestep_enabled || singlestep);
5260     gen_opc_end = tcg_ctx.gen_opc_buf + OPC_MAX_SIZE;
5261
5262     num_insns = 0;
5263     max_insns = tb->cflags & CF_COUNT_MASK;
5264     if (max_insns == 0)
5265         max_insns = CF_COUNT_MASK;
5266     gen_icount_start();
5267     do {
5268         if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
5269             QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
5270                 if (bp->pc == dc->pc) {
5271                     if (dc->pc != pc_start)
5272                         save_state(dc);
5273                     gen_helper_debug(cpu_env);
5274                     tcg_gen_exit_tb(0);
5275                     dc->is_br = 1;
5276                     goto exit_gen_loop;
5277                 }
5278             }
5279         }
5280         if (spc) {
5281             qemu_log("Search PC...\n");
5282             j = tcg_ctx.gen_opc_ptr - tcg_ctx.gen_opc_buf;
5283             if (lj < j) {
5284                 lj++;
5285                 while (lj < j)
5286                     gen_opc_instr_start[lj++] = 0;
5287                 gen_opc_pc[lj] = dc->pc;
5288                 gen_opc_npc[lj] = dc->npc;
5289                 gen_opc_instr_start[lj] = 1;
5290                 gen_opc_icount[lj] = num_insns;
5291             }
5292         }
5293         if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
5294             gen_io_start();
5295         last_pc = dc->pc;
5296         insn = cpu_ldl_code(env, dc->pc);
5297
5298         disas_sparc_insn(dc, insn);
5299         num_insns++;
5300
5301         if (dc->is_br)
5302             break;
5303         /* if the next PC is different, we abort now */
5304         if (dc->pc != (last_pc + 4))
5305             break;
5306         /* if we reach a page boundary, we stop generation so that the
5307            PC of a TT_TFAULT exception is always in the right page */
5308         if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5309             break;
5310         /* if single step mode, we generate only one instruction and
5311            generate an exception */
5312         if (dc->singlestep) {
5313             break;
5314         }
5315     } while ((tcg_ctx.gen_opc_ptr < gen_opc_end) &&
5316              (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5317              num_insns < max_insns);
5318
5319  exit_gen_loop:
5320     if (tb->cflags & CF_LAST_IO) {
5321         gen_io_end();
5322     }
5323     if (!dc->is_br) {
5324         if (dc->pc != DYNAMIC_PC &&
5325             (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5326             /* static PC and NPC: we can use direct chaining */
5327             gen_goto_tb(dc, 0, dc->pc, dc->npc);
5328         } else {
5329             if (dc->pc != DYNAMIC_PC) {
5330                 tcg_gen_movi_tl(cpu_pc, dc->pc);
5331             }
5332             save_npc(dc);
5333             tcg_gen_exit_tb(0);
5334         }
5335     }
5336     gen_icount_end(tb, num_insns);
5337     *tcg_ctx.gen_opc_ptr = INDEX_op_end;
5338     if (spc) {
5339         j = tcg_ctx.gen_opc_ptr - tcg_ctx.gen_opc_buf;
5340         lj++;
5341         while (lj <= j)
5342             gen_opc_instr_start[lj++] = 0;
5343 #if 0
5344         log_page_dump();
5345 #endif
5346         gen_opc_jump_pc[0] = dc->jump_pc[0];
5347         gen_opc_jump_pc[1] = dc->jump_pc[1];
5348     } else {
5349         tb->size = last_pc + 4 - pc_start;
5350         tb->icount = num_insns;
5351     }
5352 #ifdef DEBUG_DISAS
5353     if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5354         qemu_log("--------------\n");
5355         qemu_log("IN: %s\n", lookup_symbol(pc_start));
5356         log_target_disas(env, pc_start, last_pc + 4 - pc_start, 0);
5357         qemu_log("\n");
5358     }
5359 #endif
5360 }
5361
5362 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5363 {
5364     gen_intermediate_code_internal(tb, 0, env);
5365 }
5366
5367 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5368 {
5369     gen_intermediate_code_internal(tb, 1, env);
5370 }
5371
5372 void gen_intermediate_code_init(CPUSPARCState *env)
5373 {
5374     unsigned int i;
5375     static int inited;
5376     static const char * const gregnames[8] = {
5377         NULL, // g0 not used
5378         "g1",
5379         "g2",
5380         "g3",
5381         "g4",
5382         "g5",
5383         "g6",
5384         "g7",
5385     };
5386     static const char * const fregnames[32] = {
5387         "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5388         "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5389         "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5390         "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5391     };
5392
5393     /* init various static tables */
5394     if (!inited) {
5395         inited = 1;
5396
5397         cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5398         cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5399                                              offsetof(CPUSPARCState, regwptr),
5400                                              "regwptr");
5401 #ifdef TARGET_SPARC64
5402         cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, xcc),
5403                                          "xcc");
5404         cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, asi),
5405                                          "asi");
5406         cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, fprs),
5407                                           "fprs");
5408         cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, gsr),
5409                                      "gsr");
5410         cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5411                                            offsetof(CPUSPARCState, tick_cmpr),
5412                                            "tick_cmpr");
5413         cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5414                                             offsetof(CPUSPARCState, stick_cmpr),
5415                                             "stick_cmpr");
5416         cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5417                                              offsetof(CPUSPARCState, hstick_cmpr),
5418                                              "hstick_cmpr");
5419         cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hintp),
5420                                        "hintp");
5421         cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, htba),
5422                                       "htba");
5423         cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hver),
5424                                       "hver");
5425         cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5426                                      offsetof(CPUSPARCState, ssr), "ssr");
5427         cpu_ver = tcg_global_mem_new(TCG_AREG0,
5428                                      offsetof(CPUSPARCState, version), "ver");
5429         cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5430                                              offsetof(CPUSPARCState, softint),
5431                                              "softint");
5432 #else
5433         cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, wim),
5434                                      "wim");
5435 #endif
5436         cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cond),
5437                                       "cond");
5438         cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_src),
5439                                         "cc_src");
5440         cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5441                                          offsetof(CPUSPARCState, cc_src2),
5442                                          "cc_src2");
5443         cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_dst),
5444                                         "cc_dst");
5445         cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, cc_op),
5446                                            "cc_op");
5447         cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, psr),
5448                                          "psr");
5449         cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, fsr),
5450                                      "fsr");
5451         cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, pc),
5452                                     "pc");
5453         cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, npc),
5454                                      "npc");
5455         cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, y), "y");
5456 #ifndef CONFIG_USER_ONLY
5457         cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, tbr),
5458                                      "tbr");
5459 #endif
5460         for (i = 1; i < 8; i++) {
5461             cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5462                                               offsetof(CPUSPARCState, gregs[i]),
5463                                               gregnames[i]);
5464         }
5465         for (i = 0; i < TARGET_DPREGS; i++) {
5466             cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
5467                                                 offsetof(CPUSPARCState, fpr[i]),
5468                                                 fregnames[i]);
5469         }
5470
5471         /* register helpers */
5472
5473 #define GEN_HELPER 2
5474 #include "helper.h"
5475     }
5476 }
5477
5478 void restore_state_to_opc(CPUSPARCState *env, TranslationBlock *tb, int pc_pos)
5479 {
5480     target_ulong npc;
5481     env->pc = gen_opc_pc[pc_pos];
5482     npc = gen_opc_npc[pc_pos];
5483     if (npc == 1) {
5484         /* dynamic NPC: already stored */
5485     } else if (npc == 2) {
5486         /* jump PC: use 'cond' and the jump targets of the translation */
5487         if (env->cond) {
5488             env->npc = gen_opc_jump_pc[0];
5489         } else {
5490             env->npc = gen_opc_jump_pc[1];
5491         }
5492     } else {
5493         env->npc = npc;
5494     }
5495 }
This page took 0.344285 seconds and 4 git commands to generate.