]> Git Repo - qemu.git/blob - target-sparc/translate.c
Merge remote-tracking branch 'afaerber/tags/qom-cpu-for-anthony' into staging
[qemu.git] / target-sparc / translate.c
1 /*
2    SPARC translation
3
4    Copyright (C) 2003 Thomas M. Ogrisegg <[email protected]>
5    Copyright (C) 2003-2005 Fabrice Bellard
6
7    This library is free software; you can redistribute it and/or
8    modify it under the terms of the GNU Lesser General Public
9    License as published by the Free Software Foundation; either
10    version 2 of the License, or (at your option) any later version.
11
12    This library is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15    Lesser General Public License for more details.
16
17    You should have received a copy of the GNU Lesser General Public
18    License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20
21 #include <stdarg.h>
22 #include <stdlib.h>
23 #include <stdio.h>
24 #include <string.h>
25 #include <inttypes.h>
26
27 #include "cpu.h"
28 #include "disas/disas.h"
29 #include "helper.h"
30 #include "tcg-op.h"
31
32 #define GEN_HELPER 1
33 #include "helper.h"
34
35 #define DEBUG_DISAS
36
37 #define DYNAMIC_PC  1 /* dynamic pc value */
38 #define JUMP_PC     2 /* dynamic pc value which takes only two values
39                          according to jump_pc[T2] */
40
41 /* global register indexes */
42 static TCGv_ptr cpu_env, cpu_regwptr;
43 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
44 static TCGv_i32 cpu_cc_op;
45 static TCGv_i32 cpu_psr;
46 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
47 static TCGv cpu_y;
48 #ifndef CONFIG_USER_ONLY
49 static TCGv cpu_tbr;
50 #endif
51 static TCGv cpu_cond;
52 #ifdef TARGET_SPARC64
53 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
54 static TCGv cpu_gsr;
55 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
56 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
57 static TCGv_i32 cpu_softint;
58 #else
59 static TCGv cpu_wim;
60 #endif
61 /* Floating point registers */
62 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
63
64 static target_ulong gen_opc_npc[OPC_BUF_SIZE];
65 static target_ulong gen_opc_jump_pc[2];
66
67 #include "exec/gen-icount.h"
68
69 typedef struct DisasContext {
70     target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
71     target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
72     target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
73     int is_br;
74     int mem_idx;
75     int fpu_enabled;
76     int address_mask_32bit;
77     int singlestep;
78     uint32_t cc_op;  /* current CC operation */
79     struct TranslationBlock *tb;
80     sparc_def_t *def;
81     TCGv_i32 t32[3];
82     TCGv ttl[5];
83     int n_t32;
84     int n_ttl;
85 } DisasContext;
86
87 typedef struct {
88     TCGCond cond;
89     bool is_bool;
90     bool g1, g2;
91     TCGv c1, c2;
92 } DisasCompare;
93
94 // This function uses non-native bit order
95 #define GET_FIELD(X, FROM, TO)                                  \
96     ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
97
98 // This function uses the order in the manuals, i.e. bit 0 is 2^0
99 #define GET_FIELD_SP(X, FROM, TO)               \
100     GET_FIELD(X, 31 - (TO), 31 - (FROM))
101
102 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
103 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
104
105 #ifdef TARGET_SPARC64
106 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
107 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
108 #else
109 #define DFPREG(r) (r & 0x1e)
110 #define QFPREG(r) (r & 0x1c)
111 #endif
112
113 #define UA2005_HTRAP_MASK 0xff
114 #define V8_TRAP_MASK 0x7f
115
116 static int sign_extend(int x, int len)
117 {
118     len = 32 - len;
119     return (x << len) >> len;
120 }
121
122 #define IS_IMM (insn & (1<<13))
123
124 static inline TCGv_i32 get_temp_i32(DisasContext *dc)
125 {
126     TCGv_i32 t;
127     assert(dc->n_t32 < ARRAY_SIZE(dc->t32));
128     dc->t32[dc->n_t32++] = t = tcg_temp_new_i32();
129     return t;
130 }
131
132 static inline TCGv get_temp_tl(DisasContext *dc)
133 {
134     TCGv t;
135     assert(dc->n_ttl < ARRAY_SIZE(dc->ttl));
136     dc->ttl[dc->n_ttl++] = t = tcg_temp_new();
137     return t;
138 }
139
140 static inline void gen_update_fprs_dirty(int rd)
141 {
142 #if defined(TARGET_SPARC64)
143     tcg_gen_ori_i32(cpu_fprs, cpu_fprs, (rd < 32) ? 1 : 2);
144 #endif
145 }
146
147 /* floating point registers moves */
148 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
149 {
150 #if TCG_TARGET_REG_BITS == 32
151     if (src & 1) {
152         return TCGV_LOW(cpu_fpr[src / 2]);
153     } else {
154         return TCGV_HIGH(cpu_fpr[src / 2]);
155     }
156 #else
157     if (src & 1) {
158         return MAKE_TCGV_I32(GET_TCGV_I64(cpu_fpr[src / 2]));
159     } else {
160         TCGv_i32 ret = get_temp_i32(dc);
161         TCGv_i64 t = tcg_temp_new_i64();
162
163         tcg_gen_shri_i64(t, cpu_fpr[src / 2], 32);
164         tcg_gen_trunc_i64_i32(ret, t);
165         tcg_temp_free_i64(t);
166
167         return ret;
168     }
169 #endif
170 }
171
172 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
173 {
174 #if TCG_TARGET_REG_BITS == 32
175     if (dst & 1) {
176         tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
177     } else {
178         tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
179     }
180 #else
181     TCGv_i64 t = MAKE_TCGV_I64(GET_TCGV_I32(v));
182     tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
183                         (dst & 1 ? 0 : 32), 32);
184 #endif
185     gen_update_fprs_dirty(dst);
186 }
187
188 static TCGv_i32 gen_dest_fpr_F(DisasContext *dc)
189 {
190     return get_temp_i32(dc);
191 }
192
193 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
194 {
195     src = DFPREG(src);
196     return cpu_fpr[src / 2];
197 }
198
199 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
200 {
201     dst = DFPREG(dst);
202     tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
203     gen_update_fprs_dirty(dst);
204 }
205
206 static TCGv_i64 gen_dest_fpr_D(DisasContext *dc, unsigned int dst)
207 {
208     return cpu_fpr[DFPREG(dst) / 2];
209 }
210
211 static void gen_op_load_fpr_QT0(unsigned int src)
212 {
213     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
214                    offsetof(CPU_QuadU, ll.upper));
215     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
216                    offsetof(CPU_QuadU, ll.lower));
217 }
218
219 static void gen_op_load_fpr_QT1(unsigned int src)
220 {
221     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
222                    offsetof(CPU_QuadU, ll.upper));
223     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
224                    offsetof(CPU_QuadU, ll.lower));
225 }
226
227 static void gen_op_store_QT0_fpr(unsigned int dst)
228 {
229     tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
230                    offsetof(CPU_QuadU, ll.upper));
231     tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
232                    offsetof(CPU_QuadU, ll.lower));
233 }
234
235 #ifdef TARGET_SPARC64
236 static void gen_move_Q(unsigned int rd, unsigned int rs)
237 {
238     rd = QFPREG(rd);
239     rs = QFPREG(rs);
240
241     tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
242     tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
243     gen_update_fprs_dirty(rd);
244 }
245 #endif
246
247 /* moves */
248 #ifdef CONFIG_USER_ONLY
249 #define supervisor(dc) 0
250 #ifdef TARGET_SPARC64
251 #define hypervisor(dc) 0
252 #endif
253 #else
254 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
255 #ifdef TARGET_SPARC64
256 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
257 #else
258 #endif
259 #endif
260
261 #ifdef TARGET_SPARC64
262 #ifndef TARGET_ABI32
263 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
264 #else
265 #define AM_CHECK(dc) (1)
266 #endif
267 #endif
268
269 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
270 {
271 #ifdef TARGET_SPARC64
272     if (AM_CHECK(dc))
273         tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
274 #endif
275 }
276
277 static inline TCGv gen_load_gpr(DisasContext *dc, int reg)
278 {
279     if (reg == 0 || reg >= 8) {
280         TCGv t = get_temp_tl(dc);
281         if (reg == 0) {
282             tcg_gen_movi_tl(t, 0);
283         } else {
284             tcg_gen_ld_tl(t, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
285         }
286         return t;
287     } else {
288         return cpu_gregs[reg];
289     }
290 }
291
292 static inline void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
293 {
294     if (reg > 0) {
295         if (reg < 8) {
296             tcg_gen_mov_tl(cpu_gregs[reg], v);
297         } else {
298             tcg_gen_st_tl(v, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
299         }
300     }
301 }
302
303 static inline TCGv gen_dest_gpr(DisasContext *dc, int reg)
304 {
305     if (reg == 0 || reg >= 8) {
306         return get_temp_tl(dc);
307     } else {
308         return cpu_gregs[reg];
309     }
310 }
311
312 static inline void gen_goto_tb(DisasContext *s, int tb_num,
313                                target_ulong pc, target_ulong npc)
314 {
315     TranslationBlock *tb;
316
317     tb = s->tb;
318     if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
319         (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
320         !s->singlestep)  {
321         /* jump to same page: we can use a direct jump */
322         tcg_gen_goto_tb(tb_num);
323         tcg_gen_movi_tl(cpu_pc, pc);
324         tcg_gen_movi_tl(cpu_npc, npc);
325         tcg_gen_exit_tb((tcg_target_long)tb + tb_num);
326     } else {
327         /* jump to another page: currently not optimized */
328         tcg_gen_movi_tl(cpu_pc, pc);
329         tcg_gen_movi_tl(cpu_npc, npc);
330         tcg_gen_exit_tb(0);
331     }
332 }
333
334 // XXX suboptimal
335 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
336 {
337     tcg_gen_extu_i32_tl(reg, src);
338     tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
339     tcg_gen_andi_tl(reg, reg, 0x1);
340 }
341
342 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
343 {
344     tcg_gen_extu_i32_tl(reg, src);
345     tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
346     tcg_gen_andi_tl(reg, reg, 0x1);
347 }
348
349 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
350 {
351     tcg_gen_extu_i32_tl(reg, src);
352     tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
353     tcg_gen_andi_tl(reg, reg, 0x1);
354 }
355
356 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
357 {
358     tcg_gen_extu_i32_tl(reg, src);
359     tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
360     tcg_gen_andi_tl(reg, reg, 0x1);
361 }
362
363 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
364 {
365     tcg_gen_mov_tl(cpu_cc_src, src1);
366     tcg_gen_movi_tl(cpu_cc_src2, src2);
367     tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
368     tcg_gen_mov_tl(dst, cpu_cc_dst);
369 }
370
371 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
372 {
373     tcg_gen_mov_tl(cpu_cc_src, src1);
374     tcg_gen_mov_tl(cpu_cc_src2, src2);
375     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
376     tcg_gen_mov_tl(dst, cpu_cc_dst);
377 }
378
379 static TCGv_i32 gen_add32_carry32(void)
380 {
381     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
382
383     /* Carry is computed from a previous add: (dst < src)  */
384 #if TARGET_LONG_BITS == 64
385     cc_src1_32 = tcg_temp_new_i32();
386     cc_src2_32 = tcg_temp_new_i32();
387     tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_dst);
388     tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src);
389 #else
390     cc_src1_32 = cpu_cc_dst;
391     cc_src2_32 = cpu_cc_src;
392 #endif
393
394     carry_32 = tcg_temp_new_i32();
395     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
396
397 #if TARGET_LONG_BITS == 64
398     tcg_temp_free_i32(cc_src1_32);
399     tcg_temp_free_i32(cc_src2_32);
400 #endif
401
402     return carry_32;
403 }
404
405 static TCGv_i32 gen_sub32_carry32(void)
406 {
407     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
408
409     /* Carry is computed from a previous borrow: (src1 < src2)  */
410 #if TARGET_LONG_BITS == 64
411     cc_src1_32 = tcg_temp_new_i32();
412     cc_src2_32 = tcg_temp_new_i32();
413     tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_src);
414     tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src2);
415 #else
416     cc_src1_32 = cpu_cc_src;
417     cc_src2_32 = cpu_cc_src2;
418 #endif
419
420     carry_32 = tcg_temp_new_i32();
421     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
422
423 #if TARGET_LONG_BITS == 64
424     tcg_temp_free_i32(cc_src1_32);
425     tcg_temp_free_i32(cc_src2_32);
426 #endif
427
428     return carry_32;
429 }
430
431 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
432                             TCGv src2, int update_cc)
433 {
434     TCGv_i32 carry_32;
435     TCGv carry;
436
437     switch (dc->cc_op) {
438     case CC_OP_DIV:
439     case CC_OP_LOGIC:
440         /* Carry is known to be zero.  Fall back to plain ADD.  */
441         if (update_cc) {
442             gen_op_add_cc(dst, src1, src2);
443         } else {
444             tcg_gen_add_tl(dst, src1, src2);
445         }
446         return;
447
448     case CC_OP_ADD:
449     case CC_OP_TADD:
450     case CC_OP_TADDTV:
451         if (TARGET_LONG_BITS == 32) {
452             /* We can re-use the host's hardware carry generation by using
453                an ADD2 opcode.  We discard the low part of the output.
454                Ideally we'd combine this operation with the add that
455                generated the carry in the first place.  */
456             carry = tcg_temp_new();
457             tcg_gen_add2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
458             tcg_temp_free(carry);
459             goto add_done;
460         }
461         carry_32 = gen_add32_carry32();
462         break;
463
464     case CC_OP_SUB:
465     case CC_OP_TSUB:
466     case CC_OP_TSUBTV:
467         carry_32 = gen_sub32_carry32();
468         break;
469
470     default:
471         /* We need external help to produce the carry.  */
472         carry_32 = tcg_temp_new_i32();
473         gen_helper_compute_C_icc(carry_32, cpu_env);
474         break;
475     }
476
477 #if TARGET_LONG_BITS == 64
478     carry = tcg_temp_new();
479     tcg_gen_extu_i32_i64(carry, carry_32);
480 #else
481     carry = carry_32;
482 #endif
483
484     tcg_gen_add_tl(dst, src1, src2);
485     tcg_gen_add_tl(dst, dst, carry);
486
487     tcg_temp_free_i32(carry_32);
488 #if TARGET_LONG_BITS == 64
489     tcg_temp_free(carry);
490 #endif
491
492  add_done:
493     if (update_cc) {
494         tcg_gen_mov_tl(cpu_cc_src, src1);
495         tcg_gen_mov_tl(cpu_cc_src2, src2);
496         tcg_gen_mov_tl(cpu_cc_dst, dst);
497         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
498         dc->cc_op = CC_OP_ADDX;
499     }
500 }
501
502 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
503 {
504     tcg_gen_mov_tl(cpu_cc_src, src1);
505     tcg_gen_movi_tl(cpu_cc_src2, src2);
506     if (src2 == 0) {
507         tcg_gen_mov_tl(cpu_cc_dst, src1);
508         tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
509         dc->cc_op = CC_OP_LOGIC;
510     } else {
511         tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
512         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
513         dc->cc_op = CC_OP_SUB;
514     }
515     tcg_gen_mov_tl(dst, cpu_cc_dst);
516 }
517
518 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
519 {
520     tcg_gen_mov_tl(cpu_cc_src, src1);
521     tcg_gen_mov_tl(cpu_cc_src2, src2);
522     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
523     tcg_gen_mov_tl(dst, cpu_cc_dst);
524 }
525
526 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
527                             TCGv src2, int update_cc)
528 {
529     TCGv_i32 carry_32;
530     TCGv carry;
531
532     switch (dc->cc_op) {
533     case CC_OP_DIV:
534     case CC_OP_LOGIC:
535         /* Carry is known to be zero.  Fall back to plain SUB.  */
536         if (update_cc) {
537             gen_op_sub_cc(dst, src1, src2);
538         } else {
539             tcg_gen_sub_tl(dst, src1, src2);
540         }
541         return;
542
543     case CC_OP_ADD:
544     case CC_OP_TADD:
545     case CC_OP_TADDTV:
546         carry_32 = gen_add32_carry32();
547         break;
548
549     case CC_OP_SUB:
550     case CC_OP_TSUB:
551     case CC_OP_TSUBTV:
552         if (TARGET_LONG_BITS == 32) {
553             /* We can re-use the host's hardware carry generation by using
554                a SUB2 opcode.  We discard the low part of the output.
555                Ideally we'd combine this operation with the add that
556                generated the carry in the first place.  */
557             carry = tcg_temp_new();
558             tcg_gen_sub2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
559             tcg_temp_free(carry);
560             goto sub_done;
561         }
562         carry_32 = gen_sub32_carry32();
563         break;
564
565     default:
566         /* We need external help to produce the carry.  */
567         carry_32 = tcg_temp_new_i32();
568         gen_helper_compute_C_icc(carry_32, cpu_env);
569         break;
570     }
571
572 #if TARGET_LONG_BITS == 64
573     carry = tcg_temp_new();
574     tcg_gen_extu_i32_i64(carry, carry_32);
575 #else
576     carry = carry_32;
577 #endif
578
579     tcg_gen_sub_tl(dst, src1, src2);
580     tcg_gen_sub_tl(dst, dst, carry);
581
582     tcg_temp_free_i32(carry_32);
583 #if TARGET_LONG_BITS == 64
584     tcg_temp_free(carry);
585 #endif
586
587  sub_done:
588     if (update_cc) {
589         tcg_gen_mov_tl(cpu_cc_src, src1);
590         tcg_gen_mov_tl(cpu_cc_src2, src2);
591         tcg_gen_mov_tl(cpu_cc_dst, dst);
592         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
593         dc->cc_op = CC_OP_SUBX;
594     }
595 }
596
597 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
598 {
599     TCGv r_temp, zero, t0;
600
601     r_temp = tcg_temp_new();
602     t0 = tcg_temp_new();
603
604     /* old op:
605     if (!(env->y & 1))
606         T1 = 0;
607     */
608     zero = tcg_const_tl(0);
609     tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
610     tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
611     tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
612     tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
613                        zero, cpu_cc_src2);
614     tcg_temp_free(zero);
615
616     // b2 = T0 & 1;
617     // env->y = (b2 << 31) | (env->y >> 1);
618     tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
619     tcg_gen_shli_tl(r_temp, r_temp, 31);
620     tcg_gen_shri_tl(t0, cpu_y, 1);
621     tcg_gen_andi_tl(t0, t0, 0x7fffffff);
622     tcg_gen_or_tl(t0, t0, r_temp);
623     tcg_gen_andi_tl(cpu_y, t0, 0xffffffff);
624
625     // b1 = N ^ V;
626     gen_mov_reg_N(t0, cpu_psr);
627     gen_mov_reg_V(r_temp, cpu_psr);
628     tcg_gen_xor_tl(t0, t0, r_temp);
629     tcg_temp_free(r_temp);
630
631     // T0 = (b1 << 31) | (T0 >> 1);
632     // src1 = T0;
633     tcg_gen_shli_tl(t0, t0, 31);
634     tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
635     tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
636     tcg_temp_free(t0);
637
638     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
639
640     tcg_gen_mov_tl(dst, cpu_cc_dst);
641 }
642
643 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
644 {
645 #if TARGET_LONG_BITS == 32
646     if (sign_ext) {
647         tcg_gen_muls2_tl(dst, cpu_y, src1, src2);
648     } else {
649         tcg_gen_mulu2_tl(dst, cpu_y, src1, src2);
650     }
651 #else
652     TCGv t0 = tcg_temp_new_i64();
653     TCGv t1 = tcg_temp_new_i64();
654
655     if (sign_ext) {
656         tcg_gen_ext32s_i64(t0, src1);
657         tcg_gen_ext32s_i64(t1, src2);
658     } else {
659         tcg_gen_ext32u_i64(t0, src1);
660         tcg_gen_ext32u_i64(t1, src2);
661     }
662
663     tcg_gen_mul_i64(dst, t0, t1);
664     tcg_temp_free(t0);
665     tcg_temp_free(t1);
666
667     tcg_gen_shri_i64(cpu_y, dst, 32);
668 #endif
669 }
670
671 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
672 {
673     /* zero-extend truncated operands before multiplication */
674     gen_op_multiply(dst, src1, src2, 0);
675 }
676
677 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
678 {
679     /* sign-extend truncated operands before multiplication */
680     gen_op_multiply(dst, src1, src2, 1);
681 }
682
683 // 1
684 static inline void gen_op_eval_ba(TCGv dst)
685 {
686     tcg_gen_movi_tl(dst, 1);
687 }
688
689 // Z
690 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
691 {
692     gen_mov_reg_Z(dst, src);
693 }
694
695 // Z | (N ^ V)
696 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
697 {
698     TCGv t0 = tcg_temp_new();
699     gen_mov_reg_N(t0, src);
700     gen_mov_reg_V(dst, src);
701     tcg_gen_xor_tl(dst, dst, t0);
702     gen_mov_reg_Z(t0, src);
703     tcg_gen_or_tl(dst, dst, t0);
704     tcg_temp_free(t0);
705 }
706
707 // N ^ V
708 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
709 {
710     TCGv t0 = tcg_temp_new();
711     gen_mov_reg_V(t0, src);
712     gen_mov_reg_N(dst, src);
713     tcg_gen_xor_tl(dst, dst, t0);
714     tcg_temp_free(t0);
715 }
716
717 // C | Z
718 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
719 {
720     TCGv t0 = tcg_temp_new();
721     gen_mov_reg_Z(t0, src);
722     gen_mov_reg_C(dst, src);
723     tcg_gen_or_tl(dst, dst, t0);
724     tcg_temp_free(t0);
725 }
726
727 // C
728 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
729 {
730     gen_mov_reg_C(dst, src);
731 }
732
733 // V
734 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
735 {
736     gen_mov_reg_V(dst, src);
737 }
738
739 // 0
740 static inline void gen_op_eval_bn(TCGv dst)
741 {
742     tcg_gen_movi_tl(dst, 0);
743 }
744
745 // N
746 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
747 {
748     gen_mov_reg_N(dst, src);
749 }
750
751 // !Z
752 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
753 {
754     gen_mov_reg_Z(dst, src);
755     tcg_gen_xori_tl(dst, dst, 0x1);
756 }
757
758 // !(Z | (N ^ V))
759 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
760 {
761     gen_op_eval_ble(dst, src);
762     tcg_gen_xori_tl(dst, dst, 0x1);
763 }
764
765 // !(N ^ V)
766 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
767 {
768     gen_op_eval_bl(dst, src);
769     tcg_gen_xori_tl(dst, dst, 0x1);
770 }
771
772 // !(C | Z)
773 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
774 {
775     gen_op_eval_bleu(dst, src);
776     tcg_gen_xori_tl(dst, dst, 0x1);
777 }
778
779 // !C
780 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
781 {
782     gen_mov_reg_C(dst, src);
783     tcg_gen_xori_tl(dst, dst, 0x1);
784 }
785
786 // !N
787 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
788 {
789     gen_mov_reg_N(dst, src);
790     tcg_gen_xori_tl(dst, dst, 0x1);
791 }
792
793 // !V
794 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
795 {
796     gen_mov_reg_V(dst, src);
797     tcg_gen_xori_tl(dst, dst, 0x1);
798 }
799
800 /*
801   FPSR bit field FCC1 | FCC0:
802    0 =
803    1 <
804    2 >
805    3 unordered
806 */
807 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
808                                     unsigned int fcc_offset)
809 {
810     tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
811     tcg_gen_andi_tl(reg, reg, 0x1);
812 }
813
814 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
815                                     unsigned int fcc_offset)
816 {
817     tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
818     tcg_gen_andi_tl(reg, reg, 0x1);
819 }
820
821 // !0: FCC0 | FCC1
822 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
823                                     unsigned int fcc_offset)
824 {
825     TCGv t0 = tcg_temp_new();
826     gen_mov_reg_FCC0(dst, src, fcc_offset);
827     gen_mov_reg_FCC1(t0, src, fcc_offset);
828     tcg_gen_or_tl(dst, dst, t0);
829     tcg_temp_free(t0);
830 }
831
832 // 1 or 2: FCC0 ^ FCC1
833 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
834                                     unsigned int fcc_offset)
835 {
836     TCGv t0 = tcg_temp_new();
837     gen_mov_reg_FCC0(dst, src, fcc_offset);
838     gen_mov_reg_FCC1(t0, src, fcc_offset);
839     tcg_gen_xor_tl(dst, dst, t0);
840     tcg_temp_free(t0);
841 }
842
843 // 1 or 3: FCC0
844 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
845                                     unsigned int fcc_offset)
846 {
847     gen_mov_reg_FCC0(dst, src, fcc_offset);
848 }
849
850 // 1: FCC0 & !FCC1
851 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
852                                     unsigned int fcc_offset)
853 {
854     TCGv t0 = tcg_temp_new();
855     gen_mov_reg_FCC0(dst, src, fcc_offset);
856     gen_mov_reg_FCC1(t0, src, fcc_offset);
857     tcg_gen_andc_tl(dst, dst, t0);
858     tcg_temp_free(t0);
859 }
860
861 // 2 or 3: FCC1
862 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
863                                     unsigned int fcc_offset)
864 {
865     gen_mov_reg_FCC1(dst, src, fcc_offset);
866 }
867
868 // 2: !FCC0 & FCC1
869 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
870                                     unsigned int fcc_offset)
871 {
872     TCGv t0 = tcg_temp_new();
873     gen_mov_reg_FCC0(dst, src, fcc_offset);
874     gen_mov_reg_FCC1(t0, src, fcc_offset);
875     tcg_gen_andc_tl(dst, t0, dst);
876     tcg_temp_free(t0);
877 }
878
879 // 3: FCC0 & FCC1
880 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
881                                     unsigned int fcc_offset)
882 {
883     TCGv t0 = tcg_temp_new();
884     gen_mov_reg_FCC0(dst, src, fcc_offset);
885     gen_mov_reg_FCC1(t0, src, fcc_offset);
886     tcg_gen_and_tl(dst, dst, t0);
887     tcg_temp_free(t0);
888 }
889
890 // 0: !(FCC0 | FCC1)
891 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
892                                     unsigned int fcc_offset)
893 {
894     TCGv t0 = tcg_temp_new();
895     gen_mov_reg_FCC0(dst, src, fcc_offset);
896     gen_mov_reg_FCC1(t0, src, fcc_offset);
897     tcg_gen_or_tl(dst, dst, t0);
898     tcg_gen_xori_tl(dst, dst, 0x1);
899     tcg_temp_free(t0);
900 }
901
902 // 0 or 3: !(FCC0 ^ FCC1)
903 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
904                                     unsigned int fcc_offset)
905 {
906     TCGv t0 = tcg_temp_new();
907     gen_mov_reg_FCC0(dst, src, fcc_offset);
908     gen_mov_reg_FCC1(t0, src, fcc_offset);
909     tcg_gen_xor_tl(dst, dst, t0);
910     tcg_gen_xori_tl(dst, dst, 0x1);
911     tcg_temp_free(t0);
912 }
913
914 // 0 or 2: !FCC0
915 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
916                                     unsigned int fcc_offset)
917 {
918     gen_mov_reg_FCC0(dst, src, fcc_offset);
919     tcg_gen_xori_tl(dst, dst, 0x1);
920 }
921
922 // !1: !(FCC0 & !FCC1)
923 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
924                                     unsigned int fcc_offset)
925 {
926     TCGv t0 = tcg_temp_new();
927     gen_mov_reg_FCC0(dst, src, fcc_offset);
928     gen_mov_reg_FCC1(t0, src, fcc_offset);
929     tcg_gen_andc_tl(dst, dst, t0);
930     tcg_gen_xori_tl(dst, dst, 0x1);
931     tcg_temp_free(t0);
932 }
933
934 // 0 or 1: !FCC1
935 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
936                                     unsigned int fcc_offset)
937 {
938     gen_mov_reg_FCC1(dst, src, fcc_offset);
939     tcg_gen_xori_tl(dst, dst, 0x1);
940 }
941
942 // !2: !(!FCC0 & FCC1)
943 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
944                                     unsigned int fcc_offset)
945 {
946     TCGv t0 = tcg_temp_new();
947     gen_mov_reg_FCC0(dst, src, fcc_offset);
948     gen_mov_reg_FCC1(t0, src, fcc_offset);
949     tcg_gen_andc_tl(dst, t0, dst);
950     tcg_gen_xori_tl(dst, dst, 0x1);
951     tcg_temp_free(t0);
952 }
953
954 // !3: !(FCC0 & FCC1)
955 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
956                                     unsigned int fcc_offset)
957 {
958     TCGv t0 = tcg_temp_new();
959     gen_mov_reg_FCC0(dst, src, fcc_offset);
960     gen_mov_reg_FCC1(t0, src, fcc_offset);
961     tcg_gen_and_tl(dst, dst, t0);
962     tcg_gen_xori_tl(dst, dst, 0x1);
963     tcg_temp_free(t0);
964 }
965
966 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
967                                target_ulong pc2, TCGv r_cond)
968 {
969     int l1;
970
971     l1 = gen_new_label();
972
973     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
974
975     gen_goto_tb(dc, 0, pc1, pc1 + 4);
976
977     gen_set_label(l1);
978     gen_goto_tb(dc, 1, pc2, pc2 + 4);
979 }
980
981 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
982                                 target_ulong pc2, TCGv r_cond)
983 {
984     int l1;
985
986     l1 = gen_new_label();
987
988     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
989
990     gen_goto_tb(dc, 0, pc2, pc1);
991
992     gen_set_label(l1);
993     gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
994 }
995
996 static inline void gen_generic_branch(DisasContext *dc)
997 {
998     TCGv npc0 = tcg_const_tl(dc->jump_pc[0]);
999     TCGv npc1 = tcg_const_tl(dc->jump_pc[1]);
1000     TCGv zero = tcg_const_tl(0);
1001
1002     tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
1003
1004     tcg_temp_free(npc0);
1005     tcg_temp_free(npc1);
1006     tcg_temp_free(zero);
1007 }
1008
1009 /* call this function before using the condition register as it may
1010    have been set for a jump */
1011 static inline void flush_cond(DisasContext *dc)
1012 {
1013     if (dc->npc == JUMP_PC) {
1014         gen_generic_branch(dc);
1015         dc->npc = DYNAMIC_PC;
1016     }
1017 }
1018
1019 static inline void save_npc(DisasContext *dc)
1020 {
1021     if (dc->npc == JUMP_PC) {
1022         gen_generic_branch(dc);
1023         dc->npc = DYNAMIC_PC;
1024     } else if (dc->npc != DYNAMIC_PC) {
1025         tcg_gen_movi_tl(cpu_npc, dc->npc);
1026     }
1027 }
1028
1029 static inline void update_psr(DisasContext *dc)
1030 {
1031     if (dc->cc_op != CC_OP_FLAGS) {
1032         dc->cc_op = CC_OP_FLAGS;
1033         gen_helper_compute_psr(cpu_env);
1034     }
1035 }
1036
1037 static inline void save_state(DisasContext *dc)
1038 {
1039     tcg_gen_movi_tl(cpu_pc, dc->pc);
1040     save_npc(dc);
1041 }
1042
1043 static inline void gen_mov_pc_npc(DisasContext *dc)
1044 {
1045     if (dc->npc == JUMP_PC) {
1046         gen_generic_branch(dc);
1047         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1048         dc->pc = DYNAMIC_PC;
1049     } else if (dc->npc == DYNAMIC_PC) {
1050         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1051         dc->pc = DYNAMIC_PC;
1052     } else {
1053         dc->pc = dc->npc;
1054     }
1055 }
1056
1057 static inline void gen_op_next_insn(void)
1058 {
1059     tcg_gen_mov_tl(cpu_pc, cpu_npc);
1060     tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1061 }
1062
1063 static void free_compare(DisasCompare *cmp)
1064 {
1065     if (!cmp->g1) {
1066         tcg_temp_free(cmp->c1);
1067     }
1068     if (!cmp->g2) {
1069         tcg_temp_free(cmp->c2);
1070     }
1071 }
1072
1073 static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1074                         DisasContext *dc)
1075 {
1076     static int subcc_cond[16] = {
1077         TCG_COND_NEVER,
1078         TCG_COND_EQ,
1079         TCG_COND_LE,
1080         TCG_COND_LT,
1081         TCG_COND_LEU,
1082         TCG_COND_LTU,
1083         -1, /* neg */
1084         -1, /* overflow */
1085         TCG_COND_ALWAYS,
1086         TCG_COND_NE,
1087         TCG_COND_GT,
1088         TCG_COND_GE,
1089         TCG_COND_GTU,
1090         TCG_COND_GEU,
1091         -1, /* pos */
1092         -1, /* no overflow */
1093     };
1094
1095     static int logic_cond[16] = {
1096         TCG_COND_NEVER,
1097         TCG_COND_EQ,     /* eq:  Z */
1098         TCG_COND_LE,     /* le:  Z | (N ^ V) -> Z | N */
1099         TCG_COND_LT,     /* lt:  N ^ V -> N */
1100         TCG_COND_EQ,     /* leu: C | Z -> Z */
1101         TCG_COND_NEVER,  /* ltu: C -> 0 */
1102         TCG_COND_LT,     /* neg: N */
1103         TCG_COND_NEVER,  /* vs:  V -> 0 */
1104         TCG_COND_ALWAYS,
1105         TCG_COND_NE,     /* ne:  !Z */
1106         TCG_COND_GT,     /* gt:  !(Z | (N ^ V)) -> !(Z | N) */
1107         TCG_COND_GE,     /* ge:  !(N ^ V) -> !N */
1108         TCG_COND_NE,     /* gtu: !(C | Z) -> !Z */
1109         TCG_COND_ALWAYS, /* geu: !C -> 1 */
1110         TCG_COND_GE,     /* pos: !N */
1111         TCG_COND_ALWAYS, /* vc:  !V -> 1 */
1112     };
1113
1114     TCGv_i32 r_src;
1115     TCGv r_dst;
1116
1117 #ifdef TARGET_SPARC64
1118     if (xcc) {
1119         r_src = cpu_xcc;
1120     } else {
1121         r_src = cpu_psr;
1122     }
1123 #else
1124     r_src = cpu_psr;
1125 #endif
1126
1127     switch (dc->cc_op) {
1128     case CC_OP_LOGIC:
1129         cmp->cond = logic_cond[cond];
1130     do_compare_dst_0:
1131         cmp->is_bool = false;
1132         cmp->g2 = false;
1133         cmp->c2 = tcg_const_tl(0);
1134 #ifdef TARGET_SPARC64
1135         if (!xcc) {
1136             cmp->g1 = false;
1137             cmp->c1 = tcg_temp_new();
1138             tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1139             break;
1140         }
1141 #endif
1142         cmp->g1 = true;
1143         cmp->c1 = cpu_cc_dst;
1144         break;
1145
1146     case CC_OP_SUB:
1147         switch (cond) {
1148         case 6:  /* neg */
1149         case 14: /* pos */
1150             cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1151             goto do_compare_dst_0;
1152
1153         case 7: /* overflow */
1154         case 15: /* !overflow */
1155             goto do_dynamic;
1156
1157         default:
1158             cmp->cond = subcc_cond[cond];
1159             cmp->is_bool = false;
1160 #ifdef TARGET_SPARC64
1161             if (!xcc) {
1162                 /* Note that sign-extension works for unsigned compares as
1163                    long as both operands are sign-extended.  */
1164                 cmp->g1 = cmp->g2 = false;
1165                 cmp->c1 = tcg_temp_new();
1166                 cmp->c2 = tcg_temp_new();
1167                 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1168                 tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1169                 break;
1170             }
1171 #endif
1172             cmp->g1 = cmp->g2 = true;
1173             cmp->c1 = cpu_cc_src;
1174             cmp->c2 = cpu_cc_src2;
1175             break;
1176         }
1177         break;
1178
1179     default:
1180     do_dynamic:
1181         gen_helper_compute_psr(cpu_env);
1182         dc->cc_op = CC_OP_FLAGS;
1183         /* FALLTHRU */
1184
1185     case CC_OP_FLAGS:
1186         /* We're going to generate a boolean result.  */
1187         cmp->cond = TCG_COND_NE;
1188         cmp->is_bool = true;
1189         cmp->g1 = cmp->g2 = false;
1190         cmp->c1 = r_dst = tcg_temp_new();
1191         cmp->c2 = tcg_const_tl(0);
1192
1193         switch (cond) {
1194         case 0x0:
1195             gen_op_eval_bn(r_dst);
1196             break;
1197         case 0x1:
1198             gen_op_eval_be(r_dst, r_src);
1199             break;
1200         case 0x2:
1201             gen_op_eval_ble(r_dst, r_src);
1202             break;
1203         case 0x3:
1204             gen_op_eval_bl(r_dst, r_src);
1205             break;
1206         case 0x4:
1207             gen_op_eval_bleu(r_dst, r_src);
1208             break;
1209         case 0x5:
1210             gen_op_eval_bcs(r_dst, r_src);
1211             break;
1212         case 0x6:
1213             gen_op_eval_bneg(r_dst, r_src);
1214             break;
1215         case 0x7:
1216             gen_op_eval_bvs(r_dst, r_src);
1217             break;
1218         case 0x8:
1219             gen_op_eval_ba(r_dst);
1220             break;
1221         case 0x9:
1222             gen_op_eval_bne(r_dst, r_src);
1223             break;
1224         case 0xa:
1225             gen_op_eval_bg(r_dst, r_src);
1226             break;
1227         case 0xb:
1228             gen_op_eval_bge(r_dst, r_src);
1229             break;
1230         case 0xc:
1231             gen_op_eval_bgu(r_dst, r_src);
1232             break;
1233         case 0xd:
1234             gen_op_eval_bcc(r_dst, r_src);
1235             break;
1236         case 0xe:
1237             gen_op_eval_bpos(r_dst, r_src);
1238             break;
1239         case 0xf:
1240             gen_op_eval_bvc(r_dst, r_src);
1241             break;
1242         }
1243         break;
1244     }
1245 }
1246
1247 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1248 {
1249     unsigned int offset;
1250     TCGv r_dst;
1251
1252     /* For now we still generate a straight boolean result.  */
1253     cmp->cond = TCG_COND_NE;
1254     cmp->is_bool = true;
1255     cmp->g1 = cmp->g2 = false;
1256     cmp->c1 = r_dst = tcg_temp_new();
1257     cmp->c2 = tcg_const_tl(0);
1258
1259     switch (cc) {
1260     default:
1261     case 0x0:
1262         offset = 0;
1263         break;
1264     case 0x1:
1265         offset = 32 - 10;
1266         break;
1267     case 0x2:
1268         offset = 34 - 10;
1269         break;
1270     case 0x3:
1271         offset = 36 - 10;
1272         break;
1273     }
1274
1275     switch (cond) {
1276     case 0x0:
1277         gen_op_eval_bn(r_dst);
1278         break;
1279     case 0x1:
1280         gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1281         break;
1282     case 0x2:
1283         gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1284         break;
1285     case 0x3:
1286         gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1287         break;
1288     case 0x4:
1289         gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1290         break;
1291     case 0x5:
1292         gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1293         break;
1294     case 0x6:
1295         gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1296         break;
1297     case 0x7:
1298         gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1299         break;
1300     case 0x8:
1301         gen_op_eval_ba(r_dst);
1302         break;
1303     case 0x9:
1304         gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1305         break;
1306     case 0xa:
1307         gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1308         break;
1309     case 0xb:
1310         gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1311         break;
1312     case 0xc:
1313         gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1314         break;
1315     case 0xd:
1316         gen_op_eval_fble(r_dst, cpu_fsr, offset);
1317         break;
1318     case 0xe:
1319         gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1320         break;
1321     case 0xf:
1322         gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1323         break;
1324     }
1325 }
1326
1327 static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1328                      DisasContext *dc)
1329 {
1330     DisasCompare cmp;
1331     gen_compare(&cmp, cc, cond, dc);
1332
1333     /* The interface is to return a boolean in r_dst.  */
1334     if (cmp.is_bool) {
1335         tcg_gen_mov_tl(r_dst, cmp.c1);
1336     } else {
1337         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1338     }
1339
1340     free_compare(&cmp);
1341 }
1342
1343 static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1344 {
1345     DisasCompare cmp;
1346     gen_fcompare(&cmp, cc, cond);
1347
1348     /* The interface is to return a boolean in r_dst.  */
1349     if (cmp.is_bool) {
1350         tcg_gen_mov_tl(r_dst, cmp.c1);
1351     } else {
1352         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1353     }
1354
1355     free_compare(&cmp);
1356 }
1357
1358 #ifdef TARGET_SPARC64
1359 // Inverted logic
1360 static const int gen_tcg_cond_reg[8] = {
1361     -1,
1362     TCG_COND_NE,
1363     TCG_COND_GT,
1364     TCG_COND_GE,
1365     -1,
1366     TCG_COND_EQ,
1367     TCG_COND_LE,
1368     TCG_COND_LT,
1369 };
1370
1371 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1372 {
1373     cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1374     cmp->is_bool = false;
1375     cmp->g1 = true;
1376     cmp->g2 = false;
1377     cmp->c1 = r_src;
1378     cmp->c2 = tcg_const_tl(0);
1379 }
1380
1381 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1382 {
1383     DisasCompare cmp;
1384     gen_compare_reg(&cmp, cond, r_src);
1385
1386     /* The interface is to return a boolean in r_dst.  */
1387     tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1388
1389     free_compare(&cmp);
1390 }
1391 #endif
1392
1393 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1394 {
1395     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1396     target_ulong target = dc->pc + offset;
1397
1398 #ifdef TARGET_SPARC64
1399     if (unlikely(AM_CHECK(dc))) {
1400         target &= 0xffffffffULL;
1401     }
1402 #endif
1403     if (cond == 0x0) {
1404         /* unconditional not taken */
1405         if (a) {
1406             dc->pc = dc->npc + 4;
1407             dc->npc = dc->pc + 4;
1408         } else {
1409             dc->pc = dc->npc;
1410             dc->npc = dc->pc + 4;
1411         }
1412     } else if (cond == 0x8) {
1413         /* unconditional taken */
1414         if (a) {
1415             dc->pc = target;
1416             dc->npc = dc->pc + 4;
1417         } else {
1418             dc->pc = dc->npc;
1419             dc->npc = target;
1420             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1421         }
1422     } else {
1423         flush_cond(dc);
1424         gen_cond(cpu_cond, cc, cond, dc);
1425         if (a) {
1426             gen_branch_a(dc, target, dc->npc, cpu_cond);
1427             dc->is_br = 1;
1428         } else {
1429             dc->pc = dc->npc;
1430             dc->jump_pc[0] = target;
1431             if (unlikely(dc->npc == DYNAMIC_PC)) {
1432                 dc->jump_pc[1] = DYNAMIC_PC;
1433                 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1434             } else {
1435                 dc->jump_pc[1] = dc->npc + 4;
1436                 dc->npc = JUMP_PC;
1437             }
1438         }
1439     }
1440 }
1441
1442 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1443 {
1444     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1445     target_ulong target = dc->pc + offset;
1446
1447 #ifdef TARGET_SPARC64
1448     if (unlikely(AM_CHECK(dc))) {
1449         target &= 0xffffffffULL;
1450     }
1451 #endif
1452     if (cond == 0x0) {
1453         /* unconditional not taken */
1454         if (a) {
1455             dc->pc = dc->npc + 4;
1456             dc->npc = dc->pc + 4;
1457         } else {
1458             dc->pc = dc->npc;
1459             dc->npc = dc->pc + 4;
1460         }
1461     } else if (cond == 0x8) {
1462         /* unconditional taken */
1463         if (a) {
1464             dc->pc = target;
1465             dc->npc = dc->pc + 4;
1466         } else {
1467             dc->pc = dc->npc;
1468             dc->npc = target;
1469             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1470         }
1471     } else {
1472         flush_cond(dc);
1473         gen_fcond(cpu_cond, cc, cond);
1474         if (a) {
1475             gen_branch_a(dc, target, dc->npc, cpu_cond);
1476             dc->is_br = 1;
1477         } else {
1478             dc->pc = dc->npc;
1479             dc->jump_pc[0] = target;
1480             if (unlikely(dc->npc == DYNAMIC_PC)) {
1481                 dc->jump_pc[1] = DYNAMIC_PC;
1482                 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1483             } else {
1484                 dc->jump_pc[1] = dc->npc + 4;
1485                 dc->npc = JUMP_PC;
1486             }
1487         }
1488     }
1489 }
1490
1491 #ifdef TARGET_SPARC64
1492 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1493                           TCGv r_reg)
1494 {
1495     unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1496     target_ulong target = dc->pc + offset;
1497
1498     if (unlikely(AM_CHECK(dc))) {
1499         target &= 0xffffffffULL;
1500     }
1501     flush_cond(dc);
1502     gen_cond_reg(cpu_cond, cond, r_reg);
1503     if (a) {
1504         gen_branch_a(dc, target, dc->npc, cpu_cond);
1505         dc->is_br = 1;
1506     } else {
1507         dc->pc = dc->npc;
1508         dc->jump_pc[0] = target;
1509         if (unlikely(dc->npc == DYNAMIC_PC)) {
1510             dc->jump_pc[1] = DYNAMIC_PC;
1511             tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1512         } else {
1513             dc->jump_pc[1] = dc->npc + 4;
1514             dc->npc = JUMP_PC;
1515         }
1516     }
1517 }
1518
1519 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1520 {
1521     switch (fccno) {
1522     case 0:
1523         gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1524         break;
1525     case 1:
1526         gen_helper_fcmps_fcc1(cpu_env, r_rs1, r_rs2);
1527         break;
1528     case 2:
1529         gen_helper_fcmps_fcc2(cpu_env, r_rs1, r_rs2);
1530         break;
1531     case 3:
1532         gen_helper_fcmps_fcc3(cpu_env, r_rs1, r_rs2);
1533         break;
1534     }
1535 }
1536
1537 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1538 {
1539     switch (fccno) {
1540     case 0:
1541         gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1542         break;
1543     case 1:
1544         gen_helper_fcmpd_fcc1(cpu_env, r_rs1, r_rs2);
1545         break;
1546     case 2:
1547         gen_helper_fcmpd_fcc2(cpu_env, r_rs1, r_rs2);
1548         break;
1549     case 3:
1550         gen_helper_fcmpd_fcc3(cpu_env, r_rs1, r_rs2);
1551         break;
1552     }
1553 }
1554
1555 static inline void gen_op_fcmpq(int fccno)
1556 {
1557     switch (fccno) {
1558     case 0:
1559         gen_helper_fcmpq(cpu_env);
1560         break;
1561     case 1:
1562         gen_helper_fcmpq_fcc1(cpu_env);
1563         break;
1564     case 2:
1565         gen_helper_fcmpq_fcc2(cpu_env);
1566         break;
1567     case 3:
1568         gen_helper_fcmpq_fcc3(cpu_env);
1569         break;
1570     }
1571 }
1572
1573 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1574 {
1575     switch (fccno) {
1576     case 0:
1577         gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1578         break;
1579     case 1:
1580         gen_helper_fcmpes_fcc1(cpu_env, r_rs1, r_rs2);
1581         break;
1582     case 2:
1583         gen_helper_fcmpes_fcc2(cpu_env, r_rs1, r_rs2);
1584         break;
1585     case 3:
1586         gen_helper_fcmpes_fcc3(cpu_env, r_rs1, r_rs2);
1587         break;
1588     }
1589 }
1590
1591 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1592 {
1593     switch (fccno) {
1594     case 0:
1595         gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1596         break;
1597     case 1:
1598         gen_helper_fcmped_fcc1(cpu_env, r_rs1, r_rs2);
1599         break;
1600     case 2:
1601         gen_helper_fcmped_fcc2(cpu_env, r_rs1, r_rs2);
1602         break;
1603     case 3:
1604         gen_helper_fcmped_fcc3(cpu_env, r_rs1, r_rs2);
1605         break;
1606     }
1607 }
1608
1609 static inline void gen_op_fcmpeq(int fccno)
1610 {
1611     switch (fccno) {
1612     case 0:
1613         gen_helper_fcmpeq(cpu_env);
1614         break;
1615     case 1:
1616         gen_helper_fcmpeq_fcc1(cpu_env);
1617         break;
1618     case 2:
1619         gen_helper_fcmpeq_fcc2(cpu_env);
1620         break;
1621     case 3:
1622         gen_helper_fcmpeq_fcc3(cpu_env);
1623         break;
1624     }
1625 }
1626
1627 #else
1628
1629 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1630 {
1631     gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1632 }
1633
1634 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1635 {
1636     gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1637 }
1638
1639 static inline void gen_op_fcmpq(int fccno)
1640 {
1641     gen_helper_fcmpq(cpu_env);
1642 }
1643
1644 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1645 {
1646     gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1647 }
1648
1649 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1650 {
1651     gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1652 }
1653
1654 static inline void gen_op_fcmpeq(int fccno)
1655 {
1656     gen_helper_fcmpeq(cpu_env);
1657 }
1658 #endif
1659
1660 static inline void gen_op_fpexception_im(int fsr_flags)
1661 {
1662     TCGv_i32 r_const;
1663
1664     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1665     tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1666     r_const = tcg_const_i32(TT_FP_EXCP);
1667     gen_helper_raise_exception(cpu_env, r_const);
1668     tcg_temp_free_i32(r_const);
1669 }
1670
1671 static int gen_trap_ifnofpu(DisasContext *dc)
1672 {
1673 #if !defined(CONFIG_USER_ONLY)
1674     if (!dc->fpu_enabled) {
1675         TCGv_i32 r_const;
1676
1677         save_state(dc);
1678         r_const = tcg_const_i32(TT_NFPU_INSN);
1679         gen_helper_raise_exception(cpu_env, r_const);
1680         tcg_temp_free_i32(r_const);
1681         dc->is_br = 1;
1682         return 1;
1683     }
1684 #endif
1685     return 0;
1686 }
1687
1688 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1689 {
1690     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1691 }
1692
1693 static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1694                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1695 {
1696     TCGv_i32 dst, src;
1697
1698     src = gen_load_fpr_F(dc, rs);
1699     dst = gen_dest_fpr_F(dc);
1700
1701     gen(dst, cpu_env, src);
1702
1703     gen_store_fpr_F(dc, rd, dst);
1704 }
1705
1706 static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1707                                  void (*gen)(TCGv_i32, TCGv_i32))
1708 {
1709     TCGv_i32 dst, src;
1710
1711     src = gen_load_fpr_F(dc, rs);
1712     dst = gen_dest_fpr_F(dc);
1713
1714     gen(dst, src);
1715
1716     gen_store_fpr_F(dc, rd, dst);
1717 }
1718
1719 static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1720                         void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1721 {
1722     TCGv_i32 dst, src1, src2;
1723
1724     src1 = gen_load_fpr_F(dc, rs1);
1725     src2 = gen_load_fpr_F(dc, rs2);
1726     dst = gen_dest_fpr_F(dc);
1727
1728     gen(dst, cpu_env, src1, src2);
1729
1730     gen_store_fpr_F(dc, rd, dst);
1731 }
1732
1733 #ifdef TARGET_SPARC64
1734 static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1735                                   void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1736 {
1737     TCGv_i32 dst, src1, src2;
1738
1739     src1 = gen_load_fpr_F(dc, rs1);
1740     src2 = gen_load_fpr_F(dc, rs2);
1741     dst = gen_dest_fpr_F(dc);
1742
1743     gen(dst, src1, src2);
1744
1745     gen_store_fpr_F(dc, rd, dst);
1746 }
1747 #endif
1748
1749 static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1750                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1751 {
1752     TCGv_i64 dst, src;
1753
1754     src = gen_load_fpr_D(dc, rs);
1755     dst = gen_dest_fpr_D(dc, rd);
1756
1757     gen(dst, cpu_env, src);
1758
1759     gen_store_fpr_D(dc, rd, dst);
1760 }
1761
1762 #ifdef TARGET_SPARC64
1763 static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1764                                  void (*gen)(TCGv_i64, TCGv_i64))
1765 {
1766     TCGv_i64 dst, src;
1767
1768     src = gen_load_fpr_D(dc, rs);
1769     dst = gen_dest_fpr_D(dc, rd);
1770
1771     gen(dst, src);
1772
1773     gen_store_fpr_D(dc, rd, dst);
1774 }
1775 #endif
1776
1777 static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1778                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1779 {
1780     TCGv_i64 dst, src1, src2;
1781
1782     src1 = gen_load_fpr_D(dc, rs1);
1783     src2 = gen_load_fpr_D(dc, rs2);
1784     dst = gen_dest_fpr_D(dc, rd);
1785
1786     gen(dst, cpu_env, src1, src2);
1787
1788     gen_store_fpr_D(dc, rd, dst);
1789 }
1790
1791 #ifdef TARGET_SPARC64
1792 static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1793                                   void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1794 {
1795     TCGv_i64 dst, src1, src2;
1796
1797     src1 = gen_load_fpr_D(dc, rs1);
1798     src2 = gen_load_fpr_D(dc, rs2);
1799     dst = gen_dest_fpr_D(dc, rd);
1800
1801     gen(dst, src1, src2);
1802
1803     gen_store_fpr_D(dc, rd, dst);
1804 }
1805
1806 static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1807                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1808 {
1809     TCGv_i64 dst, src1, src2;
1810
1811     src1 = gen_load_fpr_D(dc, rs1);
1812     src2 = gen_load_fpr_D(dc, rs2);
1813     dst = gen_dest_fpr_D(dc, rd);
1814
1815     gen(dst, cpu_gsr, src1, src2);
1816
1817     gen_store_fpr_D(dc, rd, dst);
1818 }
1819
1820 static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1821                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1822 {
1823     TCGv_i64 dst, src0, src1, src2;
1824
1825     src1 = gen_load_fpr_D(dc, rs1);
1826     src2 = gen_load_fpr_D(dc, rs2);
1827     src0 = gen_load_fpr_D(dc, rd);
1828     dst = gen_dest_fpr_D(dc, rd);
1829
1830     gen(dst, src0, src1, src2);
1831
1832     gen_store_fpr_D(dc, rd, dst);
1833 }
1834 #endif
1835
1836 static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1837                               void (*gen)(TCGv_ptr))
1838 {
1839     gen_op_load_fpr_QT1(QFPREG(rs));
1840
1841     gen(cpu_env);
1842
1843     gen_op_store_QT0_fpr(QFPREG(rd));
1844     gen_update_fprs_dirty(QFPREG(rd));
1845 }
1846
1847 #ifdef TARGET_SPARC64
1848 static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1849                                  void (*gen)(TCGv_ptr))
1850 {
1851     gen_op_load_fpr_QT1(QFPREG(rs));
1852
1853     gen(cpu_env);
1854
1855     gen_op_store_QT0_fpr(QFPREG(rd));
1856     gen_update_fprs_dirty(QFPREG(rd));
1857 }
1858 #endif
1859
1860 static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1861                                void (*gen)(TCGv_ptr))
1862 {
1863     gen_op_load_fpr_QT0(QFPREG(rs1));
1864     gen_op_load_fpr_QT1(QFPREG(rs2));
1865
1866     gen(cpu_env);
1867
1868     gen_op_store_QT0_fpr(QFPREG(rd));
1869     gen_update_fprs_dirty(QFPREG(rd));
1870 }
1871
1872 static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1873                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1874 {
1875     TCGv_i64 dst;
1876     TCGv_i32 src1, src2;
1877
1878     src1 = gen_load_fpr_F(dc, rs1);
1879     src2 = gen_load_fpr_F(dc, rs2);
1880     dst = gen_dest_fpr_D(dc, rd);
1881
1882     gen(dst, cpu_env, src1, src2);
1883
1884     gen_store_fpr_D(dc, rd, dst);
1885 }
1886
1887 static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1888                                void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1889 {
1890     TCGv_i64 src1, src2;
1891
1892     src1 = gen_load_fpr_D(dc, rs1);
1893     src2 = gen_load_fpr_D(dc, rs2);
1894
1895     gen(cpu_env, src1, src2);
1896
1897     gen_op_store_QT0_fpr(QFPREG(rd));
1898     gen_update_fprs_dirty(QFPREG(rd));
1899 }
1900
1901 #ifdef TARGET_SPARC64
1902 static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1903                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1904 {
1905     TCGv_i64 dst;
1906     TCGv_i32 src;
1907
1908     src = gen_load_fpr_F(dc, rs);
1909     dst = gen_dest_fpr_D(dc, rd);
1910
1911     gen(dst, cpu_env, src);
1912
1913     gen_store_fpr_D(dc, rd, dst);
1914 }
1915 #endif
1916
1917 static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1918                                  void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1919 {
1920     TCGv_i64 dst;
1921     TCGv_i32 src;
1922
1923     src = gen_load_fpr_F(dc, rs);
1924     dst = gen_dest_fpr_D(dc, rd);
1925
1926     gen(dst, cpu_env, src);
1927
1928     gen_store_fpr_D(dc, rd, dst);
1929 }
1930
1931 static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1932                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1933 {
1934     TCGv_i32 dst;
1935     TCGv_i64 src;
1936
1937     src = gen_load_fpr_D(dc, rs);
1938     dst = gen_dest_fpr_F(dc);
1939
1940     gen(dst, cpu_env, src);
1941
1942     gen_store_fpr_F(dc, rd, dst);
1943 }
1944
1945 static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1946                               void (*gen)(TCGv_i32, TCGv_ptr))
1947 {
1948     TCGv_i32 dst;
1949
1950     gen_op_load_fpr_QT1(QFPREG(rs));
1951     dst = gen_dest_fpr_F(dc);
1952
1953     gen(dst, cpu_env);
1954
1955     gen_store_fpr_F(dc, rd, dst);
1956 }
1957
1958 static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1959                               void (*gen)(TCGv_i64, TCGv_ptr))
1960 {
1961     TCGv_i64 dst;
1962
1963     gen_op_load_fpr_QT1(QFPREG(rs));
1964     dst = gen_dest_fpr_D(dc, rd);
1965
1966     gen(dst, cpu_env);
1967
1968     gen_store_fpr_D(dc, rd, dst);
1969 }
1970
1971 static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1972                                  void (*gen)(TCGv_ptr, TCGv_i32))
1973 {
1974     TCGv_i32 src;
1975
1976     src = gen_load_fpr_F(dc, rs);
1977
1978     gen(cpu_env, src);
1979
1980     gen_op_store_QT0_fpr(QFPREG(rd));
1981     gen_update_fprs_dirty(QFPREG(rd));
1982 }
1983
1984 static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1985                                  void (*gen)(TCGv_ptr, TCGv_i64))
1986 {
1987     TCGv_i64 src;
1988
1989     src = gen_load_fpr_D(dc, rs);
1990
1991     gen(cpu_env, src);
1992
1993     gen_op_store_QT0_fpr(QFPREG(rd));
1994     gen_update_fprs_dirty(QFPREG(rd));
1995 }
1996
1997 /* asi moves */
1998 #ifdef TARGET_SPARC64
1999 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
2000 {
2001     int asi;
2002     TCGv_i32 r_asi;
2003
2004     if (IS_IMM) {
2005         r_asi = tcg_temp_new_i32();
2006         tcg_gen_mov_i32(r_asi, cpu_asi);
2007     } else {
2008         asi = GET_FIELD(insn, 19, 26);
2009         r_asi = tcg_const_i32(asi);
2010     }
2011     return r_asi;
2012 }
2013
2014 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
2015                               int sign)
2016 {
2017     TCGv_i32 r_asi, r_size, r_sign;
2018
2019     r_asi = gen_get_asi(insn, addr);
2020     r_size = tcg_const_i32(size);
2021     r_sign = tcg_const_i32(sign);
2022     gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_size, r_sign);
2023     tcg_temp_free_i32(r_sign);
2024     tcg_temp_free_i32(r_size);
2025     tcg_temp_free_i32(r_asi);
2026 }
2027
2028 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2029 {
2030     TCGv_i32 r_asi, r_size;
2031
2032     r_asi = gen_get_asi(insn, addr);
2033     r_size = tcg_const_i32(size);
2034     gen_helper_st_asi(cpu_env, addr, src, r_asi, r_size);
2035     tcg_temp_free_i32(r_size);
2036     tcg_temp_free_i32(r_asi);
2037 }
2038
2039 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
2040 {
2041     TCGv_i32 r_asi, r_size, r_rd;
2042
2043     r_asi = gen_get_asi(insn, addr);
2044     r_size = tcg_const_i32(size);
2045     r_rd = tcg_const_i32(rd);
2046     gen_helper_ldf_asi(cpu_env, addr, r_asi, r_size, r_rd);
2047     tcg_temp_free_i32(r_rd);
2048     tcg_temp_free_i32(r_size);
2049     tcg_temp_free_i32(r_asi);
2050 }
2051
2052 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
2053 {
2054     TCGv_i32 r_asi, r_size, r_rd;
2055
2056     r_asi = gen_get_asi(insn, addr);
2057     r_size = tcg_const_i32(size);
2058     r_rd = tcg_const_i32(rd);
2059     gen_helper_stf_asi(cpu_env, addr, r_asi, r_size, r_rd);
2060     tcg_temp_free_i32(r_rd);
2061     tcg_temp_free_i32(r_size);
2062     tcg_temp_free_i32(r_asi);
2063 }
2064
2065 static inline void gen_swap_asi(TCGv dst, TCGv src, TCGv addr, int insn)
2066 {
2067     TCGv_i32 r_asi, r_size, r_sign;
2068     TCGv_i64 t64 = tcg_temp_new_i64();
2069
2070     r_asi = gen_get_asi(insn, addr);
2071     r_size = tcg_const_i32(4);
2072     r_sign = tcg_const_i32(0);
2073     gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2074     tcg_temp_free_i32(r_sign);
2075     gen_helper_st_asi(cpu_env, addr, src, r_asi, r_size);
2076     tcg_temp_free_i32(r_size);
2077     tcg_temp_free_i32(r_asi);
2078     tcg_gen_trunc_i64_tl(dst, t64);
2079     tcg_temp_free_i64(t64);
2080 }
2081
2082 static inline void gen_ldda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2083                                 int insn, int rd)
2084 {
2085     TCGv_i32 r_asi, r_rd;
2086
2087     r_asi = gen_get_asi(insn, addr);
2088     r_rd = tcg_const_i32(rd);
2089     gen_helper_ldda_asi(cpu_env, addr, r_asi, r_rd);
2090     tcg_temp_free_i32(r_rd);
2091     tcg_temp_free_i32(r_asi);
2092 }
2093
2094 static inline void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2095                                 int insn, int rd)
2096 {
2097     TCGv_i32 r_asi, r_size;
2098     TCGv lo = gen_load_gpr(dc, rd + 1);
2099     TCGv_i64 t64 = tcg_temp_new_i64();
2100
2101     tcg_gen_concat_tl_i64(t64, lo, hi);
2102     r_asi = gen_get_asi(insn, addr);
2103     r_size = tcg_const_i32(8);
2104     gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_size);
2105     tcg_temp_free_i32(r_size);
2106     tcg_temp_free_i32(r_asi);
2107     tcg_temp_free_i64(t64);
2108 }
2109
2110 static inline void gen_cas_asi(DisasContext *dc, TCGv addr,
2111                                TCGv val2, int insn, int rd)
2112 {
2113     TCGv val1 = gen_load_gpr(dc, rd);
2114     TCGv dst = gen_dest_gpr(dc, rd);
2115     TCGv_i32 r_asi = gen_get_asi(insn, addr);
2116
2117     gen_helper_cas_asi(dst, cpu_env, addr, val1, val2, r_asi);
2118     tcg_temp_free_i32(r_asi);
2119     gen_store_gpr(dc, rd, dst);
2120 }
2121
2122 static inline void gen_casx_asi(DisasContext *dc, TCGv addr,
2123                                 TCGv val2, int insn, int rd)
2124 {
2125     TCGv val1 = gen_load_gpr(dc, rd);
2126     TCGv dst = gen_dest_gpr(dc, rd);
2127     TCGv_i32 r_asi = gen_get_asi(insn, addr);
2128
2129     gen_helper_casx_asi(dst, cpu_env, addr, val1, val2, r_asi);
2130     tcg_temp_free_i32(r_asi);
2131     gen_store_gpr(dc, rd, dst);
2132 }
2133
2134 #elif !defined(CONFIG_USER_ONLY)
2135
2136 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
2137                               int sign)
2138 {
2139     TCGv_i32 r_asi, r_size, r_sign;
2140     TCGv_i64 t64 = tcg_temp_new_i64();
2141
2142     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2143     r_size = tcg_const_i32(size);
2144     r_sign = tcg_const_i32(sign);
2145     gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2146     tcg_temp_free_i32(r_sign);
2147     tcg_temp_free_i32(r_size);
2148     tcg_temp_free_i32(r_asi);
2149     tcg_gen_trunc_i64_tl(dst, t64);
2150     tcg_temp_free_i64(t64);
2151 }
2152
2153 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2154 {
2155     TCGv_i32 r_asi, r_size;
2156     TCGv_i64 t64 = tcg_temp_new_i64();
2157
2158     tcg_gen_extu_tl_i64(t64, src);
2159     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2160     r_size = tcg_const_i32(size);
2161     gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_size);
2162     tcg_temp_free_i32(r_size);
2163     tcg_temp_free_i32(r_asi);
2164     tcg_temp_free_i64(t64);
2165 }
2166
2167 static inline void gen_swap_asi(TCGv dst, TCGv src, TCGv addr, int insn)
2168 {
2169     TCGv_i32 r_asi, r_size, r_sign;
2170     TCGv_i64 r_val, t64;
2171
2172     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2173     r_size = tcg_const_i32(4);
2174     r_sign = tcg_const_i32(0);
2175     t64 = tcg_temp_new_i64();
2176     gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2177     tcg_temp_free(r_sign);
2178     r_val = tcg_temp_new_i64();
2179     tcg_gen_extu_tl_i64(r_val, src);
2180     gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2181     tcg_temp_free_i64(r_val);
2182     tcg_temp_free_i32(r_size);
2183     tcg_temp_free_i32(r_asi);
2184     tcg_gen_trunc_i64_tl(dst, t64);
2185     tcg_temp_free_i64(t64);
2186 }
2187
2188 static inline void gen_ldda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2189                                 int insn, int rd)
2190 {
2191     TCGv_i32 r_asi, r_size, r_sign;
2192     TCGv t;
2193     TCGv_i64 t64;
2194
2195     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2196     r_size = tcg_const_i32(8);
2197     r_sign = tcg_const_i32(0);
2198     t64 = tcg_temp_new_i64();
2199     gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2200     tcg_temp_free_i32(r_sign);
2201     tcg_temp_free_i32(r_size);
2202     tcg_temp_free_i32(r_asi);
2203
2204     t = gen_dest_gpr(dc, rd + 1);
2205     tcg_gen_trunc_i64_tl(t, t64);
2206     gen_store_gpr(dc, rd + 1, t);
2207
2208     tcg_gen_shri_i64(t64, t64, 32);
2209     tcg_gen_trunc_i64_tl(hi, t64);
2210     tcg_temp_free_i64(t64);
2211     gen_store_gpr(dc, rd, hi);
2212 }
2213
2214 static inline void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2215                                 int insn, int rd)
2216 {
2217     TCGv_i32 r_asi, r_size;
2218     TCGv lo = gen_load_gpr(dc, rd + 1);
2219     TCGv_i64 t64 = tcg_temp_new_i64();
2220
2221     tcg_gen_concat_tl_i64(t64, lo, hi);
2222     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2223     r_size = tcg_const_i32(8);
2224     gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_size);
2225     tcg_temp_free_i32(r_size);
2226     tcg_temp_free_i32(r_asi);
2227     tcg_temp_free_i64(t64);
2228 }
2229 #endif
2230
2231 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2232 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
2233 {
2234     TCGv_i64 r_val;
2235     TCGv_i32 r_asi, r_size;
2236
2237     gen_ld_asi(dst, addr, insn, 1, 0);
2238
2239     r_val = tcg_const_i64(0xffULL);
2240     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2241     r_size = tcg_const_i32(1);
2242     gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2243     tcg_temp_free_i32(r_size);
2244     tcg_temp_free_i32(r_asi);
2245     tcg_temp_free_i64(r_val);
2246 }
2247 #endif
2248
2249 static TCGv get_src1(DisasContext *dc, unsigned int insn)
2250 {
2251     unsigned int rs1 = GET_FIELD(insn, 13, 17);
2252     return gen_load_gpr(dc, rs1);
2253 }
2254
2255 static TCGv get_src2(DisasContext *dc, unsigned int insn)
2256 {
2257     if (IS_IMM) { /* immediate */
2258         target_long simm = GET_FIELDs(insn, 19, 31);
2259         TCGv t = get_temp_tl(dc);
2260         tcg_gen_movi_tl(t, simm);
2261         return t;
2262     } else {      /* register */
2263         unsigned int rs2 = GET_FIELD(insn, 27, 31);
2264         return gen_load_gpr(dc, rs2);
2265     }
2266 }
2267
2268 #ifdef TARGET_SPARC64
2269 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2270 {
2271     TCGv_i32 c32, zero, dst, s1, s2;
2272
2273     /* We have two choices here: extend the 32 bit data and use movcond_i64,
2274        or fold the comparison down to 32 bits and use movcond_i32.  Choose
2275        the later.  */
2276     c32 = tcg_temp_new_i32();
2277     if (cmp->is_bool) {
2278         tcg_gen_trunc_i64_i32(c32, cmp->c1);
2279     } else {
2280         TCGv_i64 c64 = tcg_temp_new_i64();
2281         tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2282         tcg_gen_trunc_i64_i32(c32, c64);
2283         tcg_temp_free_i64(c64);
2284     }
2285
2286     s1 = gen_load_fpr_F(dc, rs);
2287     s2 = gen_load_fpr_F(dc, rd);
2288     dst = gen_dest_fpr_F(dc);
2289     zero = tcg_const_i32(0);
2290
2291     tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2292
2293     tcg_temp_free_i32(c32);
2294     tcg_temp_free_i32(zero);
2295     gen_store_fpr_F(dc, rd, dst);
2296 }
2297
2298 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2299 {
2300     TCGv_i64 dst = gen_dest_fpr_D(dc, rd);
2301     tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2302                         gen_load_fpr_D(dc, rs),
2303                         gen_load_fpr_D(dc, rd));
2304     gen_store_fpr_D(dc, rd, dst);
2305 }
2306
2307 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2308 {
2309     int qd = QFPREG(rd);
2310     int qs = QFPREG(rs);
2311
2312     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2313                         cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2314     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2315                         cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2316
2317     gen_update_fprs_dirty(qd);
2318 }
2319
2320 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
2321 {
2322     TCGv_i32 r_tl = tcg_temp_new_i32();
2323
2324     /* load env->tl into r_tl */
2325     tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2326
2327     /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2328     tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2329
2330     /* calculate offset to current trap state from env->ts, reuse r_tl */
2331     tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2332     tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2333
2334     /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2335     {
2336         TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2337         tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2338         tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2339         tcg_temp_free_ptr(r_tl_tmp);
2340     }
2341
2342     tcg_temp_free_i32(r_tl);
2343 }
2344
2345 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2346                      int width, bool cc, bool left)
2347 {
2348     TCGv lo1, lo2, t1, t2;
2349     uint64_t amask, tabl, tabr;
2350     int shift, imask, omask;
2351
2352     if (cc) {
2353         tcg_gen_mov_tl(cpu_cc_src, s1);
2354         tcg_gen_mov_tl(cpu_cc_src2, s2);
2355         tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2356         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2357         dc->cc_op = CC_OP_SUB;
2358     }
2359
2360     /* Theory of operation: there are two tables, left and right (not to
2361        be confused with the left and right versions of the opcode).  These
2362        are indexed by the low 3 bits of the inputs.  To make things "easy",
2363        these tables are loaded into two constants, TABL and TABR below.
2364        The operation index = (input & imask) << shift calculates the index
2365        into the constant, while val = (table >> index) & omask calculates
2366        the value we're looking for.  */
2367     switch (width) {
2368     case 8:
2369         imask = 0x7;
2370         shift = 3;
2371         omask = 0xff;
2372         if (left) {
2373             tabl = 0x80c0e0f0f8fcfeffULL;
2374             tabr = 0xff7f3f1f0f070301ULL;
2375         } else {
2376             tabl = 0x0103070f1f3f7fffULL;
2377             tabr = 0xfffefcf8f0e0c080ULL;
2378         }
2379         break;
2380     case 16:
2381         imask = 0x6;
2382         shift = 1;
2383         omask = 0xf;
2384         if (left) {
2385             tabl = 0x8cef;
2386             tabr = 0xf731;
2387         } else {
2388             tabl = 0x137f;
2389             tabr = 0xfec8;
2390         }
2391         break;
2392     case 32:
2393         imask = 0x4;
2394         shift = 0;
2395         omask = 0x3;
2396         if (left) {
2397             tabl = (2 << 2) | 3;
2398             tabr = (3 << 2) | 1;
2399         } else {
2400             tabl = (1 << 2) | 3;
2401             tabr = (3 << 2) | 2;
2402         }
2403         break;
2404     default:
2405         abort();
2406     }
2407
2408     lo1 = tcg_temp_new();
2409     lo2 = tcg_temp_new();
2410     tcg_gen_andi_tl(lo1, s1, imask);
2411     tcg_gen_andi_tl(lo2, s2, imask);
2412     tcg_gen_shli_tl(lo1, lo1, shift);
2413     tcg_gen_shli_tl(lo2, lo2, shift);
2414
2415     t1 = tcg_const_tl(tabl);
2416     t2 = tcg_const_tl(tabr);
2417     tcg_gen_shr_tl(lo1, t1, lo1);
2418     tcg_gen_shr_tl(lo2, t2, lo2);
2419     tcg_gen_andi_tl(dst, lo1, omask);
2420     tcg_gen_andi_tl(lo2, lo2, omask);
2421
2422     amask = -8;
2423     if (AM_CHECK(dc)) {
2424         amask &= 0xffffffffULL;
2425     }
2426     tcg_gen_andi_tl(s1, s1, amask);
2427     tcg_gen_andi_tl(s2, s2, amask);
2428
2429     /* We want to compute
2430         dst = (s1 == s2 ? lo1 : lo1 & lo2).
2431        We've already done dst = lo1, so this reduces to
2432         dst &= (s1 == s2 ? -1 : lo2)
2433        Which we perform by
2434         lo2 |= -(s1 == s2)
2435         dst &= lo2
2436     */
2437     tcg_gen_setcond_tl(TCG_COND_EQ, t1, s1, s2);
2438     tcg_gen_neg_tl(t1, t1);
2439     tcg_gen_or_tl(lo2, lo2, t1);
2440     tcg_gen_and_tl(dst, dst, lo2);
2441
2442     tcg_temp_free(lo1);
2443     tcg_temp_free(lo2);
2444     tcg_temp_free(t1);
2445     tcg_temp_free(t2);
2446 }
2447
2448 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2449 {
2450     TCGv tmp = tcg_temp_new();
2451
2452     tcg_gen_add_tl(tmp, s1, s2);
2453     tcg_gen_andi_tl(dst, tmp, -8);
2454     if (left) {
2455         tcg_gen_neg_tl(tmp, tmp);
2456     }
2457     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2458
2459     tcg_temp_free(tmp);
2460 }
2461
2462 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2463 {
2464     TCGv t1, t2, shift;
2465
2466     t1 = tcg_temp_new();
2467     t2 = tcg_temp_new();
2468     shift = tcg_temp_new();
2469
2470     tcg_gen_andi_tl(shift, gsr, 7);
2471     tcg_gen_shli_tl(shift, shift, 3);
2472     tcg_gen_shl_tl(t1, s1, shift);
2473
2474     /* A shift of 64 does not produce 0 in TCG.  Divide this into a
2475        shift of (up to 63) followed by a constant shift of 1.  */
2476     tcg_gen_xori_tl(shift, shift, 63);
2477     tcg_gen_shr_tl(t2, s2, shift);
2478     tcg_gen_shri_tl(t2, t2, 1);
2479
2480     tcg_gen_or_tl(dst, t1, t2);
2481
2482     tcg_temp_free(t1);
2483     tcg_temp_free(t2);
2484     tcg_temp_free(shift);
2485 }
2486 #endif
2487
2488 #define CHECK_IU_FEATURE(dc, FEATURE)                      \
2489     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2490         goto illegal_insn;
2491 #define CHECK_FPU_FEATURE(dc, FEATURE)                     \
2492     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2493         goto nfpu_insn;
2494
2495 /* before an instruction, dc->pc must be static */
2496 static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
2497 {
2498     unsigned int opc, rs1, rs2, rd;
2499     TCGv cpu_src1, cpu_src2;
2500     TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
2501     TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
2502     target_long simm;
2503
2504     if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP | CPU_LOG_TB_OP_OPT))) {
2505         tcg_gen_debug_insn_start(dc->pc);
2506     }
2507
2508     opc = GET_FIELD(insn, 0, 1);
2509     rd = GET_FIELD(insn, 2, 6);
2510
2511     switch (opc) {
2512     case 0:                     /* branches/sethi */
2513         {
2514             unsigned int xop = GET_FIELD(insn, 7, 9);
2515             int32_t target;
2516             switch (xop) {
2517 #ifdef TARGET_SPARC64
2518             case 0x1:           /* V9 BPcc */
2519                 {
2520                     int cc;
2521
2522                     target = GET_FIELD_SP(insn, 0, 18);
2523                     target = sign_extend(target, 19);
2524                     target <<= 2;
2525                     cc = GET_FIELD_SP(insn, 20, 21);
2526                     if (cc == 0)
2527                         do_branch(dc, target, insn, 0);
2528                     else if (cc == 2)
2529                         do_branch(dc, target, insn, 1);
2530                     else
2531                         goto illegal_insn;
2532                     goto jmp_insn;
2533                 }
2534             case 0x3:           /* V9 BPr */
2535                 {
2536                     target = GET_FIELD_SP(insn, 0, 13) |
2537                         (GET_FIELD_SP(insn, 20, 21) << 14);
2538                     target = sign_extend(target, 16);
2539                     target <<= 2;
2540                     cpu_src1 = get_src1(dc, insn);
2541                     do_branch_reg(dc, target, insn, cpu_src1);
2542                     goto jmp_insn;
2543                 }
2544             case 0x5:           /* V9 FBPcc */
2545                 {
2546                     int cc = GET_FIELD_SP(insn, 20, 21);
2547                     if (gen_trap_ifnofpu(dc)) {
2548                         goto jmp_insn;
2549                     }
2550                     target = GET_FIELD_SP(insn, 0, 18);
2551                     target = sign_extend(target, 19);
2552                     target <<= 2;
2553                     do_fbranch(dc, target, insn, cc);
2554                     goto jmp_insn;
2555                 }
2556 #else
2557             case 0x7:           /* CBN+x */
2558                 {
2559                     goto ncp_insn;
2560                 }
2561 #endif
2562             case 0x2:           /* BN+x */
2563                 {
2564                     target = GET_FIELD(insn, 10, 31);
2565                     target = sign_extend(target, 22);
2566                     target <<= 2;
2567                     do_branch(dc, target, insn, 0);
2568                     goto jmp_insn;
2569                 }
2570             case 0x6:           /* FBN+x */
2571                 {
2572                     if (gen_trap_ifnofpu(dc)) {
2573                         goto jmp_insn;
2574                     }
2575                     target = GET_FIELD(insn, 10, 31);
2576                     target = sign_extend(target, 22);
2577                     target <<= 2;
2578                     do_fbranch(dc, target, insn, 0);
2579                     goto jmp_insn;
2580                 }
2581             case 0x4:           /* SETHI */
2582                 /* Special-case %g0 because that's the canonical nop.  */
2583                 if (rd) {
2584                     uint32_t value = GET_FIELD(insn, 10, 31);
2585                     TCGv t = gen_dest_gpr(dc, rd);
2586                     tcg_gen_movi_tl(t, value << 10);
2587                     gen_store_gpr(dc, rd, t);
2588                 }
2589                 break;
2590             case 0x0:           /* UNIMPL */
2591             default:
2592                 goto illegal_insn;
2593             }
2594             break;
2595         }
2596         break;
2597     case 1:                     /*CALL*/
2598         {
2599             target_long target = GET_FIELDs(insn, 2, 31) << 2;
2600             TCGv o7 = gen_dest_gpr(dc, 15);
2601
2602             tcg_gen_movi_tl(o7, dc->pc);
2603             gen_store_gpr(dc, 15, o7);
2604             target += dc->pc;
2605             gen_mov_pc_npc(dc);
2606 #ifdef TARGET_SPARC64
2607             if (unlikely(AM_CHECK(dc))) {
2608                 target &= 0xffffffffULL;
2609             }
2610 #endif
2611             dc->npc = target;
2612         }
2613         goto jmp_insn;
2614     case 2:                     /* FPU & Logical Operations */
2615         {
2616             unsigned int xop = GET_FIELD(insn, 7, 12);
2617             TCGv cpu_dst = get_temp_tl(dc);
2618             TCGv cpu_tmp0;
2619
2620             if (xop == 0x3a) {  /* generate trap */
2621                 int cond = GET_FIELD(insn, 3, 6);
2622                 TCGv_i32 trap;
2623                 int l1 = -1, mask;
2624
2625                 if (cond == 0) {
2626                     /* Trap never.  */
2627                     break;
2628                 }
2629
2630                 save_state(dc);
2631
2632                 if (cond != 8) {
2633                     /* Conditional trap.  */
2634                     DisasCompare cmp;
2635 #ifdef TARGET_SPARC64
2636                     /* V9 icc/xcc */
2637                     int cc = GET_FIELD_SP(insn, 11, 12);
2638                     if (cc == 0) {
2639                         gen_compare(&cmp, 0, cond, dc);
2640                     } else if (cc == 2) {
2641                         gen_compare(&cmp, 1, cond, dc);
2642                     } else {
2643                         goto illegal_insn;
2644                     }
2645 #else
2646                     gen_compare(&cmp, 0, cond, dc);
2647 #endif
2648                     l1 = gen_new_label();
2649                     tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
2650                                       cmp.c1, cmp.c2, l1);
2651                     free_compare(&cmp);
2652                 }
2653
2654                 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
2655                         ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
2656
2657                 /* Don't use the normal temporaries, as they may well have
2658                    gone out of scope with the branch above.  While we're
2659                    doing that we might as well pre-truncate to 32-bit.  */
2660                 trap = tcg_temp_new_i32();
2661
2662                 rs1 = GET_FIELD_SP(insn, 14, 18);
2663                 if (IS_IMM) {
2664                     rs2 = GET_FIELD_SP(insn, 0, 6);
2665                     if (rs1 == 0) {
2666                         tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
2667                         /* Signal that the trap value is fully constant.  */
2668                         mask = 0;
2669                     } else {
2670                         TCGv t1 = gen_load_gpr(dc, rs1);
2671                         tcg_gen_trunc_tl_i32(trap, t1);
2672                         tcg_gen_addi_i32(trap, trap, rs2);
2673                     }
2674                 } else {
2675                     TCGv t1, t2;
2676                     rs2 = GET_FIELD_SP(insn, 0, 4);
2677                     t1 = gen_load_gpr(dc, rs1);
2678                     t2 = gen_load_gpr(dc, rs2);
2679                     tcg_gen_add_tl(t1, t1, t2);
2680                     tcg_gen_trunc_tl_i32(trap, t1);
2681                 }
2682                 if (mask != 0) {
2683                     tcg_gen_andi_i32(trap, trap, mask);
2684                     tcg_gen_addi_i32(trap, trap, TT_TRAP);
2685                 }
2686
2687                 gen_helper_raise_exception(cpu_env, trap);
2688                 tcg_temp_free_i32(trap);
2689
2690                 if (cond == 8) {
2691                     /* An unconditional trap ends the TB.  */
2692                     dc->is_br = 1;
2693                     goto jmp_insn;
2694                 } else {
2695                     /* A conditional trap falls through to the next insn.  */
2696                     gen_set_label(l1);
2697                     break;
2698                 }
2699             } else if (xop == 0x28) {
2700                 rs1 = GET_FIELD(insn, 13, 17);
2701                 switch(rs1) {
2702                 case 0: /* rdy */
2703 #ifndef TARGET_SPARC64
2704                 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2705                                        manual, rdy on the microSPARC
2706                                        II */
2707                 case 0x0f:          /* stbar in the SPARCv8 manual,
2708                                        rdy on the microSPARC II */
2709                 case 0x10 ... 0x1f: /* implementation-dependent in the
2710                                        SPARCv8 manual, rdy on the
2711                                        microSPARC II */
2712                     /* Read Asr17 */
2713                     if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2714                         TCGv t = gen_dest_gpr(dc, rd);
2715                         /* Read Asr17 for a Leon3 monoprocessor */
2716                         tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
2717                         gen_store_gpr(dc, rd, t);
2718                         break;
2719                     }
2720 #endif
2721                     gen_store_gpr(dc, rd, cpu_y);
2722                     break;
2723 #ifdef TARGET_SPARC64
2724                 case 0x2: /* V9 rdccr */
2725                     update_psr(dc);
2726                     gen_helper_rdccr(cpu_dst, cpu_env);
2727                     gen_store_gpr(dc, rd, cpu_dst);
2728                     break;
2729                 case 0x3: /* V9 rdasi */
2730                     tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2731                     gen_store_gpr(dc, rd, cpu_dst);
2732                     break;
2733                 case 0x4: /* V9 rdtick */
2734                     {
2735                         TCGv_ptr r_tickptr;
2736
2737                         r_tickptr = tcg_temp_new_ptr();
2738                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2739                                        offsetof(CPUSPARCState, tick));
2740                         gen_helper_tick_get_count(cpu_dst, r_tickptr);
2741                         tcg_temp_free_ptr(r_tickptr);
2742                         gen_store_gpr(dc, rd, cpu_dst);
2743                     }
2744                     break;
2745                 case 0x5: /* V9 rdpc */
2746                     {
2747                         TCGv t = gen_dest_gpr(dc, rd);
2748                         if (unlikely(AM_CHECK(dc))) {
2749                             tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
2750                         } else {
2751                             tcg_gen_movi_tl(t, dc->pc);
2752                         }
2753                         gen_store_gpr(dc, rd, t);
2754                     }
2755                     break;
2756                 case 0x6: /* V9 rdfprs */
2757                     tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2758                     gen_store_gpr(dc, rd, cpu_dst);
2759                     break;
2760                 case 0xf: /* V9 membar */
2761                     break; /* no effect */
2762                 case 0x13: /* Graphics Status */
2763                     if (gen_trap_ifnofpu(dc)) {
2764                         goto jmp_insn;
2765                     }
2766                     gen_store_gpr(dc, rd, cpu_gsr);
2767                     break;
2768                 case 0x16: /* Softint */
2769                     tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2770                     gen_store_gpr(dc, rd, cpu_dst);
2771                     break;
2772                 case 0x17: /* Tick compare */
2773                     gen_store_gpr(dc, rd, cpu_tick_cmpr);
2774                     break;
2775                 case 0x18: /* System tick */
2776                     {
2777                         TCGv_ptr r_tickptr;
2778
2779                         r_tickptr = tcg_temp_new_ptr();
2780                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2781                                        offsetof(CPUSPARCState, stick));
2782                         gen_helper_tick_get_count(cpu_dst, r_tickptr);
2783                         tcg_temp_free_ptr(r_tickptr);
2784                         gen_store_gpr(dc, rd, cpu_dst);
2785                     }
2786                     break;
2787                 case 0x19: /* System tick compare */
2788                     gen_store_gpr(dc, rd, cpu_stick_cmpr);
2789                     break;
2790                 case 0x10: /* Performance Control */
2791                 case 0x11: /* Performance Instrumentation Counter */
2792                 case 0x12: /* Dispatch Control */
2793                 case 0x14: /* Softint set, WO */
2794                 case 0x15: /* Softint clear, WO */
2795 #endif
2796                 default:
2797                     goto illegal_insn;
2798                 }
2799 #if !defined(CONFIG_USER_ONLY)
2800             } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2801 #ifndef TARGET_SPARC64
2802                 if (!supervisor(dc)) {
2803                     goto priv_insn;
2804                 }
2805                 update_psr(dc);
2806                 gen_helper_rdpsr(cpu_dst, cpu_env);
2807 #else
2808                 CHECK_IU_FEATURE(dc, HYPV);
2809                 if (!hypervisor(dc))
2810                     goto priv_insn;
2811                 rs1 = GET_FIELD(insn, 13, 17);
2812                 switch (rs1) {
2813                 case 0: // hpstate
2814                     // gen_op_rdhpstate();
2815                     break;
2816                 case 1: // htstate
2817                     // gen_op_rdhtstate();
2818                     break;
2819                 case 3: // hintp
2820                     tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2821                     break;
2822                 case 5: // htba
2823                     tcg_gen_mov_tl(cpu_dst, cpu_htba);
2824                     break;
2825                 case 6: // hver
2826                     tcg_gen_mov_tl(cpu_dst, cpu_hver);
2827                     break;
2828                 case 31: // hstick_cmpr
2829                     tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2830                     break;
2831                 default:
2832                     goto illegal_insn;
2833                 }
2834 #endif
2835                 gen_store_gpr(dc, rd, cpu_dst);
2836                 break;
2837             } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2838                 if (!supervisor(dc)) {
2839                     goto priv_insn;
2840                 }
2841                 cpu_tmp0 = get_temp_tl(dc);
2842 #ifdef TARGET_SPARC64
2843                 rs1 = GET_FIELD(insn, 13, 17);
2844                 switch (rs1) {
2845                 case 0: // tpc
2846                     {
2847                         TCGv_ptr r_tsptr;
2848
2849                         r_tsptr = tcg_temp_new_ptr();
2850                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2851                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2852                                       offsetof(trap_state, tpc));
2853                         tcg_temp_free_ptr(r_tsptr);
2854                     }
2855                     break;
2856                 case 1: // tnpc
2857                     {
2858                         TCGv_ptr r_tsptr;
2859
2860                         r_tsptr = tcg_temp_new_ptr();
2861                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2862                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2863                                       offsetof(trap_state, tnpc));
2864                         tcg_temp_free_ptr(r_tsptr);
2865                     }
2866                     break;
2867                 case 2: // tstate
2868                     {
2869                         TCGv_ptr r_tsptr;
2870
2871                         r_tsptr = tcg_temp_new_ptr();
2872                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2873                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2874                                       offsetof(trap_state, tstate));
2875                         tcg_temp_free_ptr(r_tsptr);
2876                     }
2877                     break;
2878                 case 3: // tt
2879                     {
2880                         TCGv_ptr r_tsptr = tcg_temp_new_ptr();
2881
2882                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2883                         tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
2884                                          offsetof(trap_state, tt));
2885                         tcg_temp_free_ptr(r_tsptr);
2886                     }
2887                     break;
2888                 case 4: // tick
2889                     {
2890                         TCGv_ptr r_tickptr;
2891
2892                         r_tickptr = tcg_temp_new_ptr();
2893                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2894                                        offsetof(CPUSPARCState, tick));
2895                         gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2896                         tcg_temp_free_ptr(r_tickptr);
2897                     }
2898                     break;
2899                 case 5: // tba
2900                     tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2901                     break;
2902                 case 6: // pstate
2903                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2904                                      offsetof(CPUSPARCState, pstate));
2905                     break;
2906                 case 7: // tl
2907                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2908                                      offsetof(CPUSPARCState, tl));
2909                     break;
2910                 case 8: // pil
2911                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2912                                      offsetof(CPUSPARCState, psrpil));
2913                     break;
2914                 case 9: // cwp
2915                     gen_helper_rdcwp(cpu_tmp0, cpu_env);
2916                     break;
2917                 case 10: // cansave
2918                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2919                                      offsetof(CPUSPARCState, cansave));
2920                     break;
2921                 case 11: // canrestore
2922                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2923                                      offsetof(CPUSPARCState, canrestore));
2924                     break;
2925                 case 12: // cleanwin
2926                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2927                                      offsetof(CPUSPARCState, cleanwin));
2928                     break;
2929                 case 13: // otherwin
2930                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2931                                      offsetof(CPUSPARCState, otherwin));
2932                     break;
2933                 case 14: // wstate
2934                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2935                                      offsetof(CPUSPARCState, wstate));
2936                     break;
2937                 case 16: // UA2005 gl
2938                     CHECK_IU_FEATURE(dc, GL);
2939                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2940                                      offsetof(CPUSPARCState, gl));
2941                     break;
2942                 case 26: // UA2005 strand status
2943                     CHECK_IU_FEATURE(dc, HYPV);
2944                     if (!hypervisor(dc))
2945                         goto priv_insn;
2946                     tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2947                     break;
2948                 case 31: // ver
2949                     tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2950                     break;
2951                 case 15: // fq
2952                 default:
2953                     goto illegal_insn;
2954                 }
2955 #else
2956                 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2957 #endif
2958                 gen_store_gpr(dc, rd, cpu_tmp0);
2959                 break;
2960             } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2961 #ifdef TARGET_SPARC64
2962                 save_state(dc);
2963                 gen_helper_flushw(cpu_env);
2964 #else
2965                 if (!supervisor(dc))
2966                     goto priv_insn;
2967                 gen_store_gpr(dc, rd, cpu_tbr);
2968 #endif
2969                 break;
2970 #endif
2971             } else if (xop == 0x34) {   /* FPU Operations */
2972                 if (gen_trap_ifnofpu(dc)) {
2973                     goto jmp_insn;
2974                 }
2975                 gen_op_clear_ieee_excp_and_FTT();
2976                 rs1 = GET_FIELD(insn, 13, 17);
2977                 rs2 = GET_FIELD(insn, 27, 31);
2978                 xop = GET_FIELD(insn, 18, 26);
2979                 save_state(dc);
2980                 switch (xop) {
2981                 case 0x1: /* fmovs */
2982                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2983                     gen_store_fpr_F(dc, rd, cpu_src1_32);
2984                     break;
2985                 case 0x5: /* fnegs */
2986                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
2987                     break;
2988                 case 0x9: /* fabss */
2989                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
2990                     break;
2991                 case 0x29: /* fsqrts */
2992                     CHECK_FPU_FEATURE(dc, FSQRT);
2993                     gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
2994                     break;
2995                 case 0x2a: /* fsqrtd */
2996                     CHECK_FPU_FEATURE(dc, FSQRT);
2997                     gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
2998                     break;
2999                 case 0x2b: /* fsqrtq */
3000                     CHECK_FPU_FEATURE(dc, FLOAT128);
3001                     gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
3002                     break;
3003                 case 0x41: /* fadds */
3004                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
3005                     break;
3006                 case 0x42: /* faddd */
3007                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
3008                     break;
3009                 case 0x43: /* faddq */
3010                     CHECK_FPU_FEATURE(dc, FLOAT128);
3011                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
3012                     break;
3013                 case 0x45: /* fsubs */
3014                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3015                     break;
3016                 case 0x46: /* fsubd */
3017                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3018                     break;
3019                 case 0x47: /* fsubq */
3020                     CHECK_FPU_FEATURE(dc, FLOAT128);
3021                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3022                     break;
3023                 case 0x49: /* fmuls */
3024                     CHECK_FPU_FEATURE(dc, FMUL);
3025                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3026                     break;
3027                 case 0x4a: /* fmuld */
3028                     CHECK_FPU_FEATURE(dc, FMUL);
3029                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3030                     break;
3031                 case 0x4b: /* fmulq */
3032                     CHECK_FPU_FEATURE(dc, FLOAT128);
3033                     CHECK_FPU_FEATURE(dc, FMUL);
3034                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3035                     break;
3036                 case 0x4d: /* fdivs */
3037                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3038                     break;
3039                 case 0x4e: /* fdivd */
3040                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3041                     break;
3042                 case 0x4f: /* fdivq */
3043                     CHECK_FPU_FEATURE(dc, FLOAT128);
3044                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3045                     break;
3046                 case 0x69: /* fsmuld */
3047                     CHECK_FPU_FEATURE(dc, FSMULD);
3048                     gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3049                     break;
3050                 case 0x6e: /* fdmulq */
3051                     CHECK_FPU_FEATURE(dc, FLOAT128);
3052                     gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3053                     break;
3054                 case 0xc4: /* fitos */
3055                     gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3056                     break;
3057                 case 0xc6: /* fdtos */
3058                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3059                     break;
3060                 case 0xc7: /* fqtos */
3061                     CHECK_FPU_FEATURE(dc, FLOAT128);
3062                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3063                     break;
3064                 case 0xc8: /* fitod */
3065                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3066                     break;
3067                 case 0xc9: /* fstod */
3068                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3069                     break;
3070                 case 0xcb: /* fqtod */
3071                     CHECK_FPU_FEATURE(dc, FLOAT128);
3072                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3073                     break;
3074                 case 0xcc: /* fitoq */
3075                     CHECK_FPU_FEATURE(dc, FLOAT128);
3076                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3077                     break;
3078                 case 0xcd: /* fstoq */
3079                     CHECK_FPU_FEATURE(dc, FLOAT128);
3080                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3081                     break;
3082                 case 0xce: /* fdtoq */
3083                     CHECK_FPU_FEATURE(dc, FLOAT128);
3084                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3085                     break;
3086                 case 0xd1: /* fstoi */
3087                     gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3088                     break;
3089                 case 0xd2: /* fdtoi */
3090                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3091                     break;
3092                 case 0xd3: /* fqtoi */
3093                     CHECK_FPU_FEATURE(dc, FLOAT128);
3094                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3095                     break;
3096 #ifdef TARGET_SPARC64
3097                 case 0x2: /* V9 fmovd */
3098                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3099                     gen_store_fpr_D(dc, rd, cpu_src1_64);
3100                     break;
3101                 case 0x3: /* V9 fmovq */
3102                     CHECK_FPU_FEATURE(dc, FLOAT128);
3103                     gen_move_Q(rd, rs2);
3104                     break;
3105                 case 0x6: /* V9 fnegd */
3106                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3107                     break;
3108                 case 0x7: /* V9 fnegq */
3109                     CHECK_FPU_FEATURE(dc, FLOAT128);
3110                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3111                     break;
3112                 case 0xa: /* V9 fabsd */
3113                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3114                     break;
3115                 case 0xb: /* V9 fabsq */
3116                     CHECK_FPU_FEATURE(dc, FLOAT128);
3117                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3118                     break;
3119                 case 0x81: /* V9 fstox */
3120                     gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3121                     break;
3122                 case 0x82: /* V9 fdtox */
3123                     gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3124                     break;
3125                 case 0x83: /* V9 fqtox */
3126                     CHECK_FPU_FEATURE(dc, FLOAT128);
3127                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3128                     break;
3129                 case 0x84: /* V9 fxtos */
3130                     gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3131                     break;
3132                 case 0x88: /* V9 fxtod */
3133                     gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3134                     break;
3135                 case 0x8c: /* V9 fxtoq */
3136                     CHECK_FPU_FEATURE(dc, FLOAT128);
3137                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3138                     break;
3139 #endif
3140                 default:
3141                     goto illegal_insn;
3142                 }
3143             } else if (xop == 0x35) {   /* FPU Operations */
3144 #ifdef TARGET_SPARC64
3145                 int cond;
3146 #endif
3147                 if (gen_trap_ifnofpu(dc)) {
3148                     goto jmp_insn;
3149                 }
3150                 gen_op_clear_ieee_excp_and_FTT();
3151                 rs1 = GET_FIELD(insn, 13, 17);
3152                 rs2 = GET_FIELD(insn, 27, 31);
3153                 xop = GET_FIELD(insn, 18, 26);
3154                 save_state(dc);
3155
3156 #ifdef TARGET_SPARC64
3157 #define FMOVR(sz)                                                  \
3158                 do {                                               \
3159                     DisasCompare cmp;                              \
3160                     cond = GET_FIELD_SP(insn, 10, 12);             \
3161                     cpu_src1 = get_src1(dc, insn);                 \
3162                     gen_compare_reg(&cmp, cond, cpu_src1);         \
3163                     gen_fmov##sz(dc, &cmp, rd, rs2);               \
3164                     free_compare(&cmp);                            \
3165                 } while (0)
3166
3167                 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3168                     FMOVR(s);
3169                     break;
3170                 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3171                     FMOVR(d);
3172                     break;
3173                 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3174                     CHECK_FPU_FEATURE(dc, FLOAT128);
3175                     FMOVR(q);
3176                     break;
3177                 }
3178 #undef FMOVR
3179 #endif
3180                 switch (xop) {
3181 #ifdef TARGET_SPARC64
3182 #define FMOVCC(fcc, sz)                                                 \
3183                     do {                                                \
3184                         DisasCompare cmp;                               \
3185                         cond = GET_FIELD_SP(insn, 14, 17);              \
3186                         gen_fcompare(&cmp, fcc, cond);                  \
3187                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3188                         free_compare(&cmp);                             \
3189                     } while (0)
3190
3191                     case 0x001: /* V9 fmovscc %fcc0 */
3192                         FMOVCC(0, s);
3193                         break;
3194                     case 0x002: /* V9 fmovdcc %fcc0 */
3195                         FMOVCC(0, d);
3196                         break;
3197                     case 0x003: /* V9 fmovqcc %fcc0 */
3198                         CHECK_FPU_FEATURE(dc, FLOAT128);
3199                         FMOVCC(0, q);
3200                         break;
3201                     case 0x041: /* V9 fmovscc %fcc1 */
3202                         FMOVCC(1, s);
3203                         break;
3204                     case 0x042: /* V9 fmovdcc %fcc1 */
3205                         FMOVCC(1, d);
3206                         break;
3207                     case 0x043: /* V9 fmovqcc %fcc1 */
3208                         CHECK_FPU_FEATURE(dc, FLOAT128);
3209                         FMOVCC(1, q);
3210                         break;
3211                     case 0x081: /* V9 fmovscc %fcc2 */
3212                         FMOVCC(2, s);
3213                         break;
3214                     case 0x082: /* V9 fmovdcc %fcc2 */
3215                         FMOVCC(2, d);
3216                         break;
3217                     case 0x083: /* V9 fmovqcc %fcc2 */
3218                         CHECK_FPU_FEATURE(dc, FLOAT128);
3219                         FMOVCC(2, q);
3220                         break;
3221                     case 0x0c1: /* V9 fmovscc %fcc3 */
3222                         FMOVCC(3, s);
3223                         break;
3224                     case 0x0c2: /* V9 fmovdcc %fcc3 */
3225                         FMOVCC(3, d);
3226                         break;
3227                     case 0x0c3: /* V9 fmovqcc %fcc3 */
3228                         CHECK_FPU_FEATURE(dc, FLOAT128);
3229                         FMOVCC(3, q);
3230                         break;
3231 #undef FMOVCC
3232 #define FMOVCC(xcc, sz)                                                 \
3233                     do {                                                \
3234                         DisasCompare cmp;                               \
3235                         cond = GET_FIELD_SP(insn, 14, 17);              \
3236                         gen_compare(&cmp, xcc, cond, dc);               \
3237                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3238                         free_compare(&cmp);                             \
3239                     } while (0)
3240
3241                     case 0x101: /* V9 fmovscc %icc */
3242                         FMOVCC(0, s);
3243                         break;
3244                     case 0x102: /* V9 fmovdcc %icc */
3245                         FMOVCC(0, d);
3246                         break;
3247                     case 0x103: /* V9 fmovqcc %icc */
3248                         CHECK_FPU_FEATURE(dc, FLOAT128);
3249                         FMOVCC(0, q);
3250                         break;
3251                     case 0x181: /* V9 fmovscc %xcc */
3252                         FMOVCC(1, s);
3253                         break;
3254                     case 0x182: /* V9 fmovdcc %xcc */
3255                         FMOVCC(1, d);
3256                         break;
3257                     case 0x183: /* V9 fmovqcc %xcc */
3258                         CHECK_FPU_FEATURE(dc, FLOAT128);
3259                         FMOVCC(1, q);
3260                         break;
3261 #undef FMOVCC
3262 #endif
3263                     case 0x51: /* fcmps, V9 %fcc */
3264                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3265                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3266                         gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3267                         break;
3268                     case 0x52: /* fcmpd, V9 %fcc */
3269                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3270                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3271                         gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3272                         break;
3273                     case 0x53: /* fcmpq, V9 %fcc */
3274                         CHECK_FPU_FEATURE(dc, FLOAT128);
3275                         gen_op_load_fpr_QT0(QFPREG(rs1));
3276                         gen_op_load_fpr_QT1(QFPREG(rs2));
3277                         gen_op_fcmpq(rd & 3);
3278                         break;
3279                     case 0x55: /* fcmpes, V9 %fcc */
3280                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3281                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3282                         gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3283                         break;
3284                     case 0x56: /* fcmped, V9 %fcc */
3285                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3286                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3287                         gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3288                         break;
3289                     case 0x57: /* fcmpeq, V9 %fcc */
3290                         CHECK_FPU_FEATURE(dc, FLOAT128);
3291                         gen_op_load_fpr_QT0(QFPREG(rs1));
3292                         gen_op_load_fpr_QT1(QFPREG(rs2));
3293                         gen_op_fcmpeq(rd & 3);
3294                         break;
3295                     default:
3296                         goto illegal_insn;
3297                 }
3298             } else if (xop == 0x2) {
3299                 TCGv dst = gen_dest_gpr(dc, rd);
3300                 rs1 = GET_FIELD(insn, 13, 17);
3301                 if (rs1 == 0) {
3302                     /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3303                     if (IS_IMM) {       /* immediate */
3304                         simm = GET_FIELDs(insn, 19, 31);
3305                         tcg_gen_movi_tl(dst, simm);
3306                         gen_store_gpr(dc, rd, dst);
3307                     } else {            /* register */
3308                         rs2 = GET_FIELD(insn, 27, 31);
3309                         if (rs2 == 0) {
3310                             tcg_gen_movi_tl(dst, 0);
3311                             gen_store_gpr(dc, rd, dst);
3312                         } else {
3313                             cpu_src2 = gen_load_gpr(dc, rs2);
3314                             gen_store_gpr(dc, rd, cpu_src2);
3315                         }
3316                     }
3317                 } else {
3318                     cpu_src1 = get_src1(dc, insn);
3319                     if (IS_IMM) {       /* immediate */
3320                         simm = GET_FIELDs(insn, 19, 31);
3321                         tcg_gen_ori_tl(dst, cpu_src1, simm);
3322                         gen_store_gpr(dc, rd, dst);
3323                     } else {            /* register */
3324                         rs2 = GET_FIELD(insn, 27, 31);
3325                         if (rs2 == 0) {
3326                             /* mov shortcut:  or x, %g0, y -> mov x, y */
3327                             gen_store_gpr(dc, rd, cpu_src1);
3328                         } else {
3329                             cpu_src2 = gen_load_gpr(dc, rs2);
3330                             tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
3331                             gen_store_gpr(dc, rd, dst);
3332                         }
3333                     }
3334                 }
3335 #ifdef TARGET_SPARC64
3336             } else if (xop == 0x25) { /* sll, V9 sllx */
3337                 cpu_src1 = get_src1(dc, insn);
3338                 if (IS_IMM) {   /* immediate */
3339                     simm = GET_FIELDs(insn, 20, 31);
3340                     if (insn & (1 << 12)) {
3341                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3342                     } else {
3343                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3344                     }
3345                 } else {                /* register */
3346                     rs2 = GET_FIELD(insn, 27, 31);
3347                     cpu_src2 = gen_load_gpr(dc, rs2);
3348                     cpu_tmp0 = get_temp_tl(dc);
3349                     if (insn & (1 << 12)) {
3350                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3351                     } else {
3352                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3353                     }
3354                     tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3355                 }
3356                 gen_store_gpr(dc, rd, cpu_dst);
3357             } else if (xop == 0x26) { /* srl, V9 srlx */
3358                 cpu_src1 = get_src1(dc, insn);
3359                 if (IS_IMM) {   /* immediate */
3360                     simm = GET_FIELDs(insn, 20, 31);
3361                     if (insn & (1 << 12)) {
3362                         tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3363                     } else {
3364                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3365                         tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3366                     }
3367                 } else {                /* register */
3368                     rs2 = GET_FIELD(insn, 27, 31);
3369                     cpu_src2 = gen_load_gpr(dc, rs2);
3370                     cpu_tmp0 = get_temp_tl(dc);
3371                     if (insn & (1 << 12)) {
3372                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3373                         tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3374                     } else {
3375                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3376                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3377                         tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3378                     }
3379                 }
3380                 gen_store_gpr(dc, rd, cpu_dst);
3381             } else if (xop == 0x27) { /* sra, V9 srax */
3382                 cpu_src1 = get_src1(dc, insn);
3383                 if (IS_IMM) {   /* immediate */
3384                     simm = GET_FIELDs(insn, 20, 31);
3385                     if (insn & (1 << 12)) {
3386                         tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3387                     } else {
3388                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3389                         tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3390                     }
3391                 } else {                /* register */
3392                     rs2 = GET_FIELD(insn, 27, 31);
3393                     cpu_src2 = gen_load_gpr(dc, rs2);
3394                     cpu_tmp0 = get_temp_tl(dc);
3395                     if (insn & (1 << 12)) {
3396                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3397                         tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3398                     } else {
3399                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3400                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3401                         tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3402                     }
3403                 }
3404                 gen_store_gpr(dc, rd, cpu_dst);
3405 #endif
3406             } else if (xop < 0x36) {
3407                 if (xop < 0x20) {
3408                     cpu_src1 = get_src1(dc, insn);
3409                     cpu_src2 = get_src2(dc, insn);
3410                     switch (xop & ~0x10) {
3411                     case 0x0: /* add */
3412                         if (xop & 0x10) {
3413                             gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3414                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3415                             dc->cc_op = CC_OP_ADD;
3416                         } else {
3417                             tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3418                         }
3419                         break;
3420                     case 0x1: /* and */
3421                         tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3422                         if (xop & 0x10) {
3423                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3424                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3425                             dc->cc_op = CC_OP_LOGIC;
3426                         }
3427                         break;
3428                     case 0x2: /* or */
3429                         tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3430                         if (xop & 0x10) {
3431                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3432                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3433                             dc->cc_op = CC_OP_LOGIC;
3434                         }
3435                         break;
3436                     case 0x3: /* xor */
3437                         tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3438                         if (xop & 0x10) {
3439                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3440                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3441                             dc->cc_op = CC_OP_LOGIC;
3442                         }
3443                         break;
3444                     case 0x4: /* sub */
3445                         if (xop & 0x10) {
3446                             gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3447                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3448                             dc->cc_op = CC_OP_SUB;
3449                         } else {
3450                             tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3451                         }
3452                         break;
3453                     case 0x5: /* andn */
3454                         tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3455                         if (xop & 0x10) {
3456                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3457                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3458                             dc->cc_op = CC_OP_LOGIC;
3459                         }
3460                         break;
3461                     case 0x6: /* orn */
3462                         tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3463                         if (xop & 0x10) {
3464                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3465                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3466                             dc->cc_op = CC_OP_LOGIC;
3467                         }
3468                         break;
3469                     case 0x7: /* xorn */
3470                         tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
3471                         if (xop & 0x10) {
3472                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3473                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3474                             dc->cc_op = CC_OP_LOGIC;
3475                         }
3476                         break;
3477                     case 0x8: /* addx, V9 addc */
3478                         gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3479                                         (xop & 0x10));
3480                         break;
3481 #ifdef TARGET_SPARC64
3482                     case 0x9: /* V9 mulx */
3483                         tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3484                         break;
3485 #endif
3486                     case 0xa: /* umul */
3487                         CHECK_IU_FEATURE(dc, MUL);
3488                         gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3489                         if (xop & 0x10) {
3490                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3491                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3492                             dc->cc_op = CC_OP_LOGIC;
3493                         }
3494                         break;
3495                     case 0xb: /* smul */
3496                         CHECK_IU_FEATURE(dc, MUL);
3497                         gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3498                         if (xop & 0x10) {
3499                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3500                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3501                             dc->cc_op = CC_OP_LOGIC;
3502                         }
3503                         break;
3504                     case 0xc: /* subx, V9 subc */
3505                         gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3506                                         (xop & 0x10));
3507                         break;
3508 #ifdef TARGET_SPARC64
3509                     case 0xd: /* V9 udivx */
3510                         gen_helper_udivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3511                         break;
3512 #endif
3513                     case 0xe: /* udiv */
3514                         CHECK_IU_FEATURE(dc, DIV);
3515                         if (xop & 0x10) {
3516                             gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
3517                                                cpu_src2);
3518                             dc->cc_op = CC_OP_DIV;
3519                         } else {
3520                             gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
3521                                             cpu_src2);
3522                         }
3523                         break;
3524                     case 0xf: /* sdiv */
3525                         CHECK_IU_FEATURE(dc, DIV);
3526                         if (xop & 0x10) {
3527                             gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
3528                                                cpu_src2);
3529                             dc->cc_op = CC_OP_DIV;
3530                         } else {
3531                             gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
3532                                             cpu_src2);
3533                         }
3534                         break;
3535                     default:
3536                         goto illegal_insn;
3537                     }
3538                     gen_store_gpr(dc, rd, cpu_dst);
3539                 } else {
3540                     cpu_src1 = get_src1(dc, insn);
3541                     cpu_src2 = get_src2(dc, insn);
3542                     switch (xop) {
3543                     case 0x20: /* taddcc */
3544                         gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3545                         gen_store_gpr(dc, rd, cpu_dst);
3546                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3547                         dc->cc_op = CC_OP_TADD;
3548                         break;
3549                     case 0x21: /* tsubcc */
3550                         gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3551                         gen_store_gpr(dc, rd, cpu_dst);
3552                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3553                         dc->cc_op = CC_OP_TSUB;
3554                         break;
3555                     case 0x22: /* taddcctv */
3556                         gen_helper_taddcctv(cpu_dst, cpu_env,
3557                                             cpu_src1, cpu_src2);
3558                         gen_store_gpr(dc, rd, cpu_dst);
3559                         dc->cc_op = CC_OP_TADDTV;
3560                         break;
3561                     case 0x23: /* tsubcctv */
3562                         gen_helper_tsubcctv(cpu_dst, cpu_env,
3563                                             cpu_src1, cpu_src2);
3564                         gen_store_gpr(dc, rd, cpu_dst);
3565                         dc->cc_op = CC_OP_TSUBTV;
3566                         break;
3567                     case 0x24: /* mulscc */
3568                         update_psr(dc);
3569                         gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3570                         gen_store_gpr(dc, rd, cpu_dst);
3571                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3572                         dc->cc_op = CC_OP_ADD;
3573                         break;
3574 #ifndef TARGET_SPARC64
3575                     case 0x25:  /* sll */
3576                         if (IS_IMM) { /* immediate */
3577                             simm = GET_FIELDs(insn, 20, 31);
3578                             tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3579                         } else { /* register */
3580                             cpu_tmp0 = get_temp_tl(dc);
3581                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3582                             tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3583                         }
3584                         gen_store_gpr(dc, rd, cpu_dst);
3585                         break;
3586                     case 0x26:  /* srl */
3587                         if (IS_IMM) { /* immediate */
3588                             simm = GET_FIELDs(insn, 20, 31);
3589                             tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3590                         } else { /* register */
3591                             cpu_tmp0 = get_temp_tl(dc);
3592                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3593                             tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3594                         }
3595                         gen_store_gpr(dc, rd, cpu_dst);
3596                         break;
3597                     case 0x27:  /* sra */
3598                         if (IS_IMM) { /* immediate */
3599                             simm = GET_FIELDs(insn, 20, 31);
3600                             tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3601                         } else { /* register */
3602                             cpu_tmp0 = get_temp_tl(dc);
3603                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3604                             tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3605                         }
3606                         gen_store_gpr(dc, rd, cpu_dst);
3607                         break;
3608 #endif
3609                     case 0x30:
3610                         {
3611                             cpu_tmp0 = get_temp_tl(dc);
3612                             switch(rd) {
3613                             case 0: /* wry */
3614                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3615                                 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3616                                 break;
3617 #ifndef TARGET_SPARC64
3618                             case 0x01 ... 0x0f: /* undefined in the
3619                                                    SPARCv8 manual, nop
3620                                                    on the microSPARC
3621                                                    II */
3622                             case 0x10 ... 0x1f: /* implementation-dependent
3623                                                    in the SPARCv8
3624                                                    manual, nop on the
3625                                                    microSPARC II */
3626                                 if ((rd == 0x13) && (dc->def->features &
3627                                                      CPU_FEATURE_POWERDOWN)) {
3628                                     /* LEON3 power-down */
3629                                     gen_helper_power_down(cpu_env);
3630                                 }
3631                                 break;
3632 #else
3633                             case 0x2: /* V9 wrccr */
3634                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3635                                 gen_helper_wrccr(cpu_env, cpu_tmp0);
3636                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3637                                 dc->cc_op = CC_OP_FLAGS;
3638                                 break;
3639                             case 0x3: /* V9 wrasi */
3640                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3641                                 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xff);
3642                                 tcg_gen_trunc_tl_i32(cpu_asi, cpu_tmp0);
3643                                 break;
3644                             case 0x6: /* V9 wrfprs */
3645                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3646                                 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_tmp0);
3647                                 save_state(dc);
3648                                 gen_op_next_insn();
3649                                 tcg_gen_exit_tb(0);
3650                                 dc->is_br = 1;
3651                                 break;
3652                             case 0xf: /* V9 sir, nop if user */
3653 #if !defined(CONFIG_USER_ONLY)
3654                                 if (supervisor(dc)) {
3655                                     ; // XXX
3656                                 }
3657 #endif
3658                                 break;
3659                             case 0x13: /* Graphics Status */
3660                                 if (gen_trap_ifnofpu(dc)) {
3661                                     goto jmp_insn;
3662                                 }
3663                                 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3664                                 break;
3665                             case 0x14: /* Softint set */
3666                                 if (!supervisor(dc))
3667                                     goto illegal_insn;
3668                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3669                                 gen_helper_set_softint(cpu_env, cpu_tmp0);
3670                                 break;
3671                             case 0x15: /* Softint clear */
3672                                 if (!supervisor(dc))
3673                                     goto illegal_insn;
3674                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3675                                 gen_helper_clear_softint(cpu_env, cpu_tmp0);
3676                                 break;
3677                             case 0x16: /* Softint write */
3678                                 if (!supervisor(dc))
3679                                     goto illegal_insn;
3680                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3681                                 gen_helper_write_softint(cpu_env, cpu_tmp0);
3682                                 break;
3683                             case 0x17: /* Tick compare */
3684 #if !defined(CONFIG_USER_ONLY)
3685                                 if (!supervisor(dc))
3686                                     goto illegal_insn;
3687 #endif
3688                                 {
3689                                     TCGv_ptr r_tickptr;
3690
3691                                     tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3692                                                    cpu_src2);
3693                                     r_tickptr = tcg_temp_new_ptr();
3694                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3695                                                    offsetof(CPUSPARCState, tick));
3696                                     gen_helper_tick_set_limit(r_tickptr,
3697                                                               cpu_tick_cmpr);
3698                                     tcg_temp_free_ptr(r_tickptr);
3699                                 }
3700                                 break;
3701                             case 0x18: /* System tick */
3702 #if !defined(CONFIG_USER_ONLY)
3703                                 if (!supervisor(dc))
3704                                     goto illegal_insn;
3705 #endif
3706                                 {
3707                                     TCGv_ptr r_tickptr;
3708
3709                                     tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
3710                                                    cpu_src2);
3711                                     r_tickptr = tcg_temp_new_ptr();
3712                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3713                                                    offsetof(CPUSPARCState, stick));
3714                                     gen_helper_tick_set_count(r_tickptr,
3715                                                               cpu_tmp0);
3716                                     tcg_temp_free_ptr(r_tickptr);
3717                                 }
3718                                 break;
3719                             case 0x19: /* System tick compare */
3720 #if !defined(CONFIG_USER_ONLY)
3721                                 if (!supervisor(dc))
3722                                     goto illegal_insn;
3723 #endif
3724                                 {
3725                                     TCGv_ptr r_tickptr;
3726
3727                                     tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3728                                                    cpu_src2);
3729                                     r_tickptr = tcg_temp_new_ptr();
3730                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3731                                                    offsetof(CPUSPARCState, stick));
3732                                     gen_helper_tick_set_limit(r_tickptr,
3733                                                               cpu_stick_cmpr);
3734                                     tcg_temp_free_ptr(r_tickptr);
3735                                 }
3736                                 break;
3737
3738                             case 0x10: /* Performance Control */
3739                             case 0x11: /* Performance Instrumentation
3740                                           Counter */
3741                             case 0x12: /* Dispatch Control */
3742 #endif
3743                             default:
3744                                 goto illegal_insn;
3745                             }
3746                         }
3747                         break;
3748 #if !defined(CONFIG_USER_ONLY)
3749                     case 0x31: /* wrpsr, V9 saved, restored */
3750                         {
3751                             if (!supervisor(dc))
3752                                 goto priv_insn;
3753 #ifdef TARGET_SPARC64
3754                             switch (rd) {
3755                             case 0:
3756                                 gen_helper_saved(cpu_env);
3757                                 break;
3758                             case 1:
3759                                 gen_helper_restored(cpu_env);
3760                                 break;
3761                             case 2: /* UA2005 allclean */
3762                             case 3: /* UA2005 otherw */
3763                             case 4: /* UA2005 normalw */
3764                             case 5: /* UA2005 invalw */
3765                                 // XXX
3766                             default:
3767                                 goto illegal_insn;
3768                             }
3769 #else
3770                             cpu_tmp0 = get_temp_tl(dc);
3771                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3772                             gen_helper_wrpsr(cpu_env, cpu_tmp0);
3773                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3774                             dc->cc_op = CC_OP_FLAGS;
3775                             save_state(dc);
3776                             gen_op_next_insn();
3777                             tcg_gen_exit_tb(0);
3778                             dc->is_br = 1;
3779 #endif
3780                         }
3781                         break;
3782                     case 0x32: /* wrwim, V9 wrpr */
3783                         {
3784                             if (!supervisor(dc))
3785                                 goto priv_insn;
3786                             cpu_tmp0 = get_temp_tl(dc);
3787                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3788 #ifdef TARGET_SPARC64
3789                             switch (rd) {
3790                             case 0: // tpc
3791                                 {
3792                                     TCGv_ptr r_tsptr;
3793
3794                                     r_tsptr = tcg_temp_new_ptr();
3795                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3796                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3797                                                   offsetof(trap_state, tpc));
3798                                     tcg_temp_free_ptr(r_tsptr);
3799                                 }
3800                                 break;
3801                             case 1: // tnpc
3802                                 {
3803                                     TCGv_ptr r_tsptr;
3804
3805                                     r_tsptr = tcg_temp_new_ptr();
3806                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3807                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3808                                                   offsetof(trap_state, tnpc));
3809                                     tcg_temp_free_ptr(r_tsptr);
3810                                 }
3811                                 break;
3812                             case 2: // tstate
3813                                 {
3814                                     TCGv_ptr r_tsptr;
3815
3816                                     r_tsptr = tcg_temp_new_ptr();
3817                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3818                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3819                                                   offsetof(trap_state,
3820                                                            tstate));
3821                                     tcg_temp_free_ptr(r_tsptr);
3822                                 }
3823                                 break;
3824                             case 3: // tt
3825                                 {
3826                                     TCGv_ptr r_tsptr;
3827
3828                                     r_tsptr = tcg_temp_new_ptr();
3829                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3830                                     tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
3831                                                     offsetof(trap_state, tt));
3832                                     tcg_temp_free_ptr(r_tsptr);
3833                                 }
3834                                 break;
3835                             case 4: // tick
3836                                 {
3837                                     TCGv_ptr r_tickptr;
3838
3839                                     r_tickptr = tcg_temp_new_ptr();
3840                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3841                                                    offsetof(CPUSPARCState, tick));
3842                                     gen_helper_tick_set_count(r_tickptr,
3843                                                               cpu_tmp0);
3844                                     tcg_temp_free_ptr(r_tickptr);
3845                                 }
3846                                 break;
3847                             case 5: // tba
3848                                 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3849                                 break;
3850                             case 6: // pstate
3851                                 save_state(dc);
3852                                 gen_helper_wrpstate(cpu_env, cpu_tmp0);
3853                                 dc->npc = DYNAMIC_PC;
3854                                 break;
3855                             case 7: // tl
3856                                 save_state(dc);
3857                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3858                                                offsetof(CPUSPARCState, tl));
3859                                 dc->npc = DYNAMIC_PC;
3860                                 break;
3861                             case 8: // pil
3862                                 gen_helper_wrpil(cpu_env, cpu_tmp0);
3863                                 break;
3864                             case 9: // cwp
3865                                 gen_helper_wrcwp(cpu_env, cpu_tmp0);
3866                                 break;
3867                             case 10: // cansave
3868                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3869                                                 offsetof(CPUSPARCState,
3870                                                          cansave));
3871                                 break;
3872                             case 11: // canrestore
3873                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3874                                                 offsetof(CPUSPARCState,
3875                                                          canrestore));
3876                                 break;
3877                             case 12: // cleanwin
3878                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3879                                                 offsetof(CPUSPARCState,
3880                                                          cleanwin));
3881                                 break;
3882                             case 13: // otherwin
3883                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3884                                                 offsetof(CPUSPARCState,
3885                                                          otherwin));
3886                                 break;
3887                             case 14: // wstate
3888                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3889                                                 offsetof(CPUSPARCState,
3890                                                          wstate));
3891                                 break;
3892                             case 16: // UA2005 gl
3893                                 CHECK_IU_FEATURE(dc, GL);
3894                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3895                                                 offsetof(CPUSPARCState, gl));
3896                                 break;
3897                             case 26: // UA2005 strand status
3898                                 CHECK_IU_FEATURE(dc, HYPV);
3899                                 if (!hypervisor(dc))
3900                                     goto priv_insn;
3901                                 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3902                                 break;
3903                             default:
3904                                 goto illegal_insn;
3905                             }
3906 #else
3907                             tcg_gen_trunc_tl_i32(cpu_wim, cpu_tmp0);
3908                             if (dc->def->nwindows != 32) {
3909                                 tcg_gen_andi_tl(cpu_wim, cpu_wim,
3910                                                 (1 << dc->def->nwindows) - 1);
3911                             }
3912 #endif
3913                         }
3914                         break;
3915                     case 0x33: /* wrtbr, UA2005 wrhpr */
3916                         {
3917 #ifndef TARGET_SPARC64
3918                             if (!supervisor(dc))
3919                                 goto priv_insn;
3920                             tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3921 #else
3922                             CHECK_IU_FEATURE(dc, HYPV);
3923                             if (!hypervisor(dc))
3924                                 goto priv_insn;
3925                             cpu_tmp0 = get_temp_tl(dc);
3926                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3927                             switch (rd) {
3928                             case 0: // hpstate
3929                                 // XXX gen_op_wrhpstate();
3930                                 save_state(dc);
3931                                 gen_op_next_insn();
3932                                 tcg_gen_exit_tb(0);
3933                                 dc->is_br = 1;
3934                                 break;
3935                             case 1: // htstate
3936                                 // XXX gen_op_wrhtstate();
3937                                 break;
3938                             case 3: // hintp
3939                                 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3940                                 break;
3941                             case 5: // htba
3942                                 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3943                                 break;
3944                             case 31: // hstick_cmpr
3945                                 {
3946                                     TCGv_ptr r_tickptr;
3947
3948                                     tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3949                                     r_tickptr = tcg_temp_new_ptr();
3950                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3951                                                    offsetof(CPUSPARCState, hstick));
3952                                     gen_helper_tick_set_limit(r_tickptr,
3953                                                               cpu_hstick_cmpr);
3954                                     tcg_temp_free_ptr(r_tickptr);
3955                                 }
3956                                 break;
3957                             case 6: // hver readonly
3958                             default:
3959                                 goto illegal_insn;
3960                             }
3961 #endif
3962                         }
3963                         break;
3964 #endif
3965 #ifdef TARGET_SPARC64
3966                     case 0x2c: /* V9 movcc */
3967                         {
3968                             int cc = GET_FIELD_SP(insn, 11, 12);
3969                             int cond = GET_FIELD_SP(insn, 14, 17);
3970                             DisasCompare cmp;
3971                             TCGv dst;
3972
3973                             if (insn & (1 << 18)) {
3974                                 if (cc == 0) {
3975                                     gen_compare(&cmp, 0, cond, dc);
3976                                 } else if (cc == 2) {
3977                                     gen_compare(&cmp, 1, cond, dc);
3978                                 } else {
3979                                     goto illegal_insn;
3980                                 }
3981                             } else {
3982                                 gen_fcompare(&cmp, cc, cond);
3983                             }
3984
3985                             /* The get_src2 above loaded the normal 13-bit
3986                                immediate field, not the 11-bit field we have
3987                                in movcc.  But it did handle the reg case.  */
3988                             if (IS_IMM) {
3989                                 simm = GET_FIELD_SPs(insn, 0, 10);
3990                                 tcg_gen_movi_tl(cpu_src2, simm);
3991                             }
3992
3993                             dst = gen_load_gpr(dc, rd);
3994                             tcg_gen_movcond_tl(cmp.cond, dst,
3995                                                cmp.c1, cmp.c2,
3996                                                cpu_src2, dst);
3997                             free_compare(&cmp);
3998                             gen_store_gpr(dc, rd, dst);
3999                             break;
4000                         }
4001                     case 0x2d: /* V9 sdivx */
4002                         gen_helper_sdivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4003                         gen_store_gpr(dc, rd, cpu_dst);
4004                         break;
4005                     case 0x2e: /* V9 popc */
4006                         gen_helper_popc(cpu_dst, cpu_src2);
4007                         gen_store_gpr(dc, rd, cpu_dst);
4008                         break;
4009                     case 0x2f: /* V9 movr */
4010                         {
4011                             int cond = GET_FIELD_SP(insn, 10, 12);
4012                             DisasCompare cmp;
4013                             TCGv dst;
4014
4015                             gen_compare_reg(&cmp, cond, cpu_src1);
4016
4017                             /* The get_src2 above loaded the normal 13-bit
4018                                immediate field, not the 10-bit field we have
4019                                in movr.  But it did handle the reg case.  */
4020                             if (IS_IMM) {
4021                                 simm = GET_FIELD_SPs(insn, 0, 9);
4022                                 tcg_gen_movi_tl(cpu_src2, simm);
4023                             }
4024
4025                             dst = gen_load_gpr(dc, rd);
4026                             tcg_gen_movcond_tl(cmp.cond, dst,
4027                                                cmp.c1, cmp.c2,
4028                                                cpu_src2, dst);
4029                             free_compare(&cmp);
4030                             gen_store_gpr(dc, rd, dst);
4031                             break;
4032                         }
4033 #endif
4034                     default:
4035                         goto illegal_insn;
4036                     }
4037                 }
4038             } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4039 #ifdef TARGET_SPARC64
4040                 int opf = GET_FIELD_SP(insn, 5, 13);
4041                 rs1 = GET_FIELD(insn, 13, 17);
4042                 rs2 = GET_FIELD(insn, 27, 31);
4043                 if (gen_trap_ifnofpu(dc)) {
4044                     goto jmp_insn;
4045                 }
4046
4047                 switch (opf) {
4048                 case 0x000: /* VIS I edge8cc */
4049                     CHECK_FPU_FEATURE(dc, VIS1);
4050                     cpu_src1 = gen_load_gpr(dc, rs1);
4051                     cpu_src2 = gen_load_gpr(dc, rs2);
4052                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4053                     gen_store_gpr(dc, rd, cpu_dst);
4054                     break;
4055                 case 0x001: /* VIS II edge8n */
4056                     CHECK_FPU_FEATURE(dc, VIS2);
4057                     cpu_src1 = gen_load_gpr(dc, rs1);
4058                     cpu_src2 = gen_load_gpr(dc, rs2);
4059                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4060                     gen_store_gpr(dc, rd, cpu_dst);
4061                     break;
4062                 case 0x002: /* VIS I edge8lcc */
4063                     CHECK_FPU_FEATURE(dc, VIS1);
4064                     cpu_src1 = gen_load_gpr(dc, rs1);
4065                     cpu_src2 = gen_load_gpr(dc, rs2);
4066                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4067                     gen_store_gpr(dc, rd, cpu_dst);
4068                     break;
4069                 case 0x003: /* VIS II edge8ln */
4070                     CHECK_FPU_FEATURE(dc, VIS2);
4071                     cpu_src1 = gen_load_gpr(dc, rs1);
4072                     cpu_src2 = gen_load_gpr(dc, rs2);
4073                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4074                     gen_store_gpr(dc, rd, cpu_dst);
4075                     break;
4076                 case 0x004: /* VIS I edge16cc */
4077                     CHECK_FPU_FEATURE(dc, VIS1);
4078                     cpu_src1 = gen_load_gpr(dc, rs1);
4079                     cpu_src2 = gen_load_gpr(dc, rs2);
4080                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4081                     gen_store_gpr(dc, rd, cpu_dst);
4082                     break;
4083                 case 0x005: /* VIS II edge16n */
4084                     CHECK_FPU_FEATURE(dc, VIS2);
4085                     cpu_src1 = gen_load_gpr(dc, rs1);
4086                     cpu_src2 = gen_load_gpr(dc, rs2);
4087                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4088                     gen_store_gpr(dc, rd, cpu_dst);
4089                     break;
4090                 case 0x006: /* VIS I edge16lcc */
4091                     CHECK_FPU_FEATURE(dc, VIS1);
4092                     cpu_src1 = gen_load_gpr(dc, rs1);
4093                     cpu_src2 = gen_load_gpr(dc, rs2);
4094                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4095                     gen_store_gpr(dc, rd, cpu_dst);
4096                     break;
4097                 case 0x007: /* VIS II edge16ln */
4098                     CHECK_FPU_FEATURE(dc, VIS2);
4099                     cpu_src1 = gen_load_gpr(dc, rs1);
4100                     cpu_src2 = gen_load_gpr(dc, rs2);
4101                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4102                     gen_store_gpr(dc, rd, cpu_dst);
4103                     break;
4104                 case 0x008: /* VIS I edge32cc */
4105                     CHECK_FPU_FEATURE(dc, VIS1);
4106                     cpu_src1 = gen_load_gpr(dc, rs1);
4107                     cpu_src2 = gen_load_gpr(dc, rs2);
4108                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4109                     gen_store_gpr(dc, rd, cpu_dst);
4110                     break;
4111                 case 0x009: /* VIS II edge32n */
4112                     CHECK_FPU_FEATURE(dc, VIS2);
4113                     cpu_src1 = gen_load_gpr(dc, rs1);
4114                     cpu_src2 = gen_load_gpr(dc, rs2);
4115                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4116                     gen_store_gpr(dc, rd, cpu_dst);
4117                     break;
4118                 case 0x00a: /* VIS I edge32lcc */
4119                     CHECK_FPU_FEATURE(dc, VIS1);
4120                     cpu_src1 = gen_load_gpr(dc, rs1);
4121                     cpu_src2 = gen_load_gpr(dc, rs2);
4122                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4123                     gen_store_gpr(dc, rd, cpu_dst);
4124                     break;
4125                 case 0x00b: /* VIS II edge32ln */
4126                     CHECK_FPU_FEATURE(dc, VIS2);
4127                     cpu_src1 = gen_load_gpr(dc, rs1);
4128                     cpu_src2 = gen_load_gpr(dc, rs2);
4129                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4130                     gen_store_gpr(dc, rd, cpu_dst);
4131                     break;
4132                 case 0x010: /* VIS I array8 */
4133                     CHECK_FPU_FEATURE(dc, VIS1);
4134                     cpu_src1 = gen_load_gpr(dc, rs1);
4135                     cpu_src2 = gen_load_gpr(dc, rs2);
4136                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4137                     gen_store_gpr(dc, rd, cpu_dst);
4138                     break;
4139                 case 0x012: /* VIS I array16 */
4140                     CHECK_FPU_FEATURE(dc, VIS1);
4141                     cpu_src1 = gen_load_gpr(dc, rs1);
4142                     cpu_src2 = gen_load_gpr(dc, rs2);
4143                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4144                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4145                     gen_store_gpr(dc, rd, cpu_dst);
4146                     break;
4147                 case 0x014: /* VIS I array32 */
4148                     CHECK_FPU_FEATURE(dc, VIS1);
4149                     cpu_src1 = gen_load_gpr(dc, rs1);
4150                     cpu_src2 = gen_load_gpr(dc, rs2);
4151                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4152                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4153                     gen_store_gpr(dc, rd, cpu_dst);
4154                     break;
4155                 case 0x018: /* VIS I alignaddr */
4156                     CHECK_FPU_FEATURE(dc, VIS1);
4157                     cpu_src1 = gen_load_gpr(dc, rs1);
4158                     cpu_src2 = gen_load_gpr(dc, rs2);
4159                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4160                     gen_store_gpr(dc, rd, cpu_dst);
4161                     break;
4162                 case 0x01a: /* VIS I alignaddrl */
4163                     CHECK_FPU_FEATURE(dc, VIS1);
4164                     cpu_src1 = gen_load_gpr(dc, rs1);
4165                     cpu_src2 = gen_load_gpr(dc, rs2);
4166                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4167                     gen_store_gpr(dc, rd, cpu_dst);
4168                     break;
4169                 case 0x019: /* VIS II bmask */
4170                     CHECK_FPU_FEATURE(dc, VIS2);
4171                     cpu_src1 = gen_load_gpr(dc, rs1);
4172                     cpu_src2 = gen_load_gpr(dc, rs2);
4173                     tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4174                     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4175                     gen_store_gpr(dc, rd, cpu_dst);
4176                     break;
4177                 case 0x020: /* VIS I fcmple16 */
4178                     CHECK_FPU_FEATURE(dc, VIS1);
4179                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4180                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4181                     gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4182                     gen_store_gpr(dc, rd, cpu_dst);
4183                     break;
4184                 case 0x022: /* VIS I fcmpne16 */
4185                     CHECK_FPU_FEATURE(dc, VIS1);
4186                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4187                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4188                     gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4189                     gen_store_gpr(dc, rd, cpu_dst);
4190                     break;
4191                 case 0x024: /* VIS I fcmple32 */
4192                     CHECK_FPU_FEATURE(dc, VIS1);
4193                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4194                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4195                     gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4196                     gen_store_gpr(dc, rd, cpu_dst);
4197                     break;
4198                 case 0x026: /* VIS I fcmpne32 */
4199                     CHECK_FPU_FEATURE(dc, VIS1);
4200                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4201                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4202                     gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4203                     gen_store_gpr(dc, rd, cpu_dst);
4204                     break;
4205                 case 0x028: /* VIS I fcmpgt16 */
4206                     CHECK_FPU_FEATURE(dc, VIS1);
4207                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4208                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4209                     gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4210                     gen_store_gpr(dc, rd, cpu_dst);
4211                     break;
4212                 case 0x02a: /* VIS I fcmpeq16 */
4213                     CHECK_FPU_FEATURE(dc, VIS1);
4214                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4215                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4216                     gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4217                     gen_store_gpr(dc, rd, cpu_dst);
4218                     break;
4219                 case 0x02c: /* VIS I fcmpgt32 */
4220                     CHECK_FPU_FEATURE(dc, VIS1);
4221                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4222                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4223                     gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4224                     gen_store_gpr(dc, rd, cpu_dst);
4225                     break;
4226                 case 0x02e: /* VIS I fcmpeq32 */
4227                     CHECK_FPU_FEATURE(dc, VIS1);
4228                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4229                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4230                     gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4231                     gen_store_gpr(dc, rd, cpu_dst);
4232                     break;
4233                 case 0x031: /* VIS I fmul8x16 */
4234                     CHECK_FPU_FEATURE(dc, VIS1);
4235                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4236                     break;
4237                 case 0x033: /* VIS I fmul8x16au */
4238                     CHECK_FPU_FEATURE(dc, VIS1);
4239                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4240                     break;
4241                 case 0x035: /* VIS I fmul8x16al */
4242                     CHECK_FPU_FEATURE(dc, VIS1);
4243                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4244                     break;
4245                 case 0x036: /* VIS I fmul8sux16 */
4246                     CHECK_FPU_FEATURE(dc, VIS1);
4247                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4248                     break;
4249                 case 0x037: /* VIS I fmul8ulx16 */
4250                     CHECK_FPU_FEATURE(dc, VIS1);
4251                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4252                     break;
4253                 case 0x038: /* VIS I fmuld8sux16 */
4254                     CHECK_FPU_FEATURE(dc, VIS1);
4255                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4256                     break;
4257                 case 0x039: /* VIS I fmuld8ulx16 */
4258                     CHECK_FPU_FEATURE(dc, VIS1);
4259                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4260                     break;
4261                 case 0x03a: /* VIS I fpack32 */
4262                     CHECK_FPU_FEATURE(dc, VIS1);
4263                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4264                     break;
4265                 case 0x03b: /* VIS I fpack16 */
4266                     CHECK_FPU_FEATURE(dc, VIS1);
4267                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4268                     cpu_dst_32 = gen_dest_fpr_F(dc);
4269                     gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4270                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4271                     break;
4272                 case 0x03d: /* VIS I fpackfix */
4273                     CHECK_FPU_FEATURE(dc, VIS1);
4274                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4275                     cpu_dst_32 = gen_dest_fpr_F(dc);
4276                     gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4277                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4278                     break;
4279                 case 0x03e: /* VIS I pdist */
4280                     CHECK_FPU_FEATURE(dc, VIS1);
4281                     gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4282                     break;
4283                 case 0x048: /* VIS I faligndata */
4284                     CHECK_FPU_FEATURE(dc, VIS1);
4285                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4286                     break;
4287                 case 0x04b: /* VIS I fpmerge */
4288                     CHECK_FPU_FEATURE(dc, VIS1);
4289                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4290                     break;
4291                 case 0x04c: /* VIS II bshuffle */
4292                     CHECK_FPU_FEATURE(dc, VIS2);
4293                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4294                     break;
4295                 case 0x04d: /* VIS I fexpand */
4296                     CHECK_FPU_FEATURE(dc, VIS1);
4297                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4298                     break;
4299                 case 0x050: /* VIS I fpadd16 */
4300                     CHECK_FPU_FEATURE(dc, VIS1);
4301                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4302                     break;
4303                 case 0x051: /* VIS I fpadd16s */
4304                     CHECK_FPU_FEATURE(dc, VIS1);
4305                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4306                     break;
4307                 case 0x052: /* VIS I fpadd32 */
4308                     CHECK_FPU_FEATURE(dc, VIS1);
4309                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4310                     break;
4311                 case 0x053: /* VIS I fpadd32s */
4312                     CHECK_FPU_FEATURE(dc, VIS1);
4313                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4314                     break;
4315                 case 0x054: /* VIS I fpsub16 */
4316                     CHECK_FPU_FEATURE(dc, VIS1);
4317                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4318                     break;
4319                 case 0x055: /* VIS I fpsub16s */
4320                     CHECK_FPU_FEATURE(dc, VIS1);
4321                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4322                     break;
4323                 case 0x056: /* VIS I fpsub32 */
4324                     CHECK_FPU_FEATURE(dc, VIS1);
4325                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4326                     break;
4327                 case 0x057: /* VIS I fpsub32s */
4328                     CHECK_FPU_FEATURE(dc, VIS1);
4329                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4330                     break;
4331                 case 0x060: /* VIS I fzero */
4332                     CHECK_FPU_FEATURE(dc, VIS1);
4333                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4334                     tcg_gen_movi_i64(cpu_dst_64, 0);
4335                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4336                     break;
4337                 case 0x061: /* VIS I fzeros */
4338                     CHECK_FPU_FEATURE(dc, VIS1);
4339                     cpu_dst_32 = gen_dest_fpr_F(dc);
4340                     tcg_gen_movi_i32(cpu_dst_32, 0);
4341                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4342                     break;
4343                 case 0x062: /* VIS I fnor */
4344                     CHECK_FPU_FEATURE(dc, VIS1);
4345                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4346                     break;
4347                 case 0x063: /* VIS I fnors */
4348                     CHECK_FPU_FEATURE(dc, VIS1);
4349                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4350                     break;
4351                 case 0x064: /* VIS I fandnot2 */
4352                     CHECK_FPU_FEATURE(dc, VIS1);
4353                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4354                     break;
4355                 case 0x065: /* VIS I fandnot2s */
4356                     CHECK_FPU_FEATURE(dc, VIS1);
4357                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4358                     break;
4359                 case 0x066: /* VIS I fnot2 */
4360                     CHECK_FPU_FEATURE(dc, VIS1);
4361                     gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4362                     break;
4363                 case 0x067: /* VIS I fnot2s */
4364                     CHECK_FPU_FEATURE(dc, VIS1);
4365                     gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4366                     break;
4367                 case 0x068: /* VIS I fandnot1 */
4368                     CHECK_FPU_FEATURE(dc, VIS1);
4369                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4370                     break;
4371                 case 0x069: /* VIS I fandnot1s */
4372                     CHECK_FPU_FEATURE(dc, VIS1);
4373                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4374                     break;
4375                 case 0x06a: /* VIS I fnot1 */
4376                     CHECK_FPU_FEATURE(dc, VIS1);
4377                     gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4378                     break;
4379                 case 0x06b: /* VIS I fnot1s */
4380                     CHECK_FPU_FEATURE(dc, VIS1);
4381                     gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4382                     break;
4383                 case 0x06c: /* VIS I fxor */
4384                     CHECK_FPU_FEATURE(dc, VIS1);
4385                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4386                     break;
4387                 case 0x06d: /* VIS I fxors */
4388                     CHECK_FPU_FEATURE(dc, VIS1);
4389                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4390                     break;
4391                 case 0x06e: /* VIS I fnand */
4392                     CHECK_FPU_FEATURE(dc, VIS1);
4393                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4394                     break;
4395                 case 0x06f: /* VIS I fnands */
4396                     CHECK_FPU_FEATURE(dc, VIS1);
4397                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4398                     break;
4399                 case 0x070: /* VIS I fand */
4400                     CHECK_FPU_FEATURE(dc, VIS1);
4401                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4402                     break;
4403                 case 0x071: /* VIS I fands */
4404                     CHECK_FPU_FEATURE(dc, VIS1);
4405                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4406                     break;
4407                 case 0x072: /* VIS I fxnor */
4408                     CHECK_FPU_FEATURE(dc, VIS1);
4409                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4410                     break;
4411                 case 0x073: /* VIS I fxnors */
4412                     CHECK_FPU_FEATURE(dc, VIS1);
4413                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4414                     break;
4415                 case 0x074: /* VIS I fsrc1 */
4416                     CHECK_FPU_FEATURE(dc, VIS1);
4417                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4418                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4419                     break;
4420                 case 0x075: /* VIS I fsrc1s */
4421                     CHECK_FPU_FEATURE(dc, VIS1);
4422                     cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4423                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4424                     break;
4425                 case 0x076: /* VIS I fornot2 */
4426                     CHECK_FPU_FEATURE(dc, VIS1);
4427                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4428                     break;
4429                 case 0x077: /* VIS I fornot2s */
4430                     CHECK_FPU_FEATURE(dc, VIS1);
4431                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4432                     break;
4433                 case 0x078: /* VIS I fsrc2 */
4434                     CHECK_FPU_FEATURE(dc, VIS1);
4435                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4436                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4437                     break;
4438                 case 0x079: /* VIS I fsrc2s */
4439                     CHECK_FPU_FEATURE(dc, VIS1);
4440                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4441                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4442                     break;
4443                 case 0x07a: /* VIS I fornot1 */
4444                     CHECK_FPU_FEATURE(dc, VIS1);
4445                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4446                     break;
4447                 case 0x07b: /* VIS I fornot1s */
4448                     CHECK_FPU_FEATURE(dc, VIS1);
4449                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
4450                     break;
4451                 case 0x07c: /* VIS I for */
4452                     CHECK_FPU_FEATURE(dc, VIS1);
4453                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
4454                     break;
4455                 case 0x07d: /* VIS I fors */
4456                     CHECK_FPU_FEATURE(dc, VIS1);
4457                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
4458                     break;
4459                 case 0x07e: /* VIS I fone */
4460                     CHECK_FPU_FEATURE(dc, VIS1);
4461                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4462                     tcg_gen_movi_i64(cpu_dst_64, -1);
4463                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4464                     break;
4465                 case 0x07f: /* VIS I fones */
4466                     CHECK_FPU_FEATURE(dc, VIS1);
4467                     cpu_dst_32 = gen_dest_fpr_F(dc);
4468                     tcg_gen_movi_i32(cpu_dst_32, -1);
4469                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4470                     break;
4471                 case 0x080: /* VIS I shutdown */
4472                 case 0x081: /* VIS II siam */
4473                     // XXX
4474                     goto illegal_insn;
4475                 default:
4476                     goto illegal_insn;
4477                 }
4478 #else
4479                 goto ncp_insn;
4480 #endif
4481             } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4482 #ifdef TARGET_SPARC64
4483                 goto illegal_insn;
4484 #else
4485                 goto ncp_insn;
4486 #endif
4487 #ifdef TARGET_SPARC64
4488             } else if (xop == 0x39) { /* V9 return */
4489                 TCGv_i32 r_const;
4490
4491                 save_state(dc);
4492                 cpu_src1 = get_src1(dc, insn);
4493                 cpu_tmp0 = get_temp_tl(dc);
4494                 if (IS_IMM) {   /* immediate */
4495                     simm = GET_FIELDs(insn, 19, 31);
4496                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
4497                 } else {                /* register */
4498                     rs2 = GET_FIELD(insn, 27, 31);
4499                     if (rs2) {
4500                         cpu_src2 = gen_load_gpr(dc, rs2);
4501                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
4502                     } else {
4503                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
4504                     }
4505                 }
4506                 gen_helper_restore(cpu_env);
4507                 gen_mov_pc_npc(dc);
4508                 r_const = tcg_const_i32(3);
4509                 gen_helper_check_align(cpu_env, cpu_tmp0, r_const);
4510                 tcg_temp_free_i32(r_const);
4511                 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
4512                 dc->npc = DYNAMIC_PC;
4513                 goto jmp_insn;
4514 #endif
4515             } else {
4516                 cpu_src1 = get_src1(dc, insn);
4517                 cpu_tmp0 = get_temp_tl(dc);
4518                 if (IS_IMM) {   /* immediate */
4519                     simm = GET_FIELDs(insn, 19, 31);
4520                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
4521                 } else {                /* register */
4522                     rs2 = GET_FIELD(insn, 27, 31);
4523                     if (rs2) {
4524                         cpu_src2 = gen_load_gpr(dc, rs2);
4525                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
4526                     } else {
4527                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
4528                     }
4529                 }
4530                 switch (xop) {
4531                 case 0x38:      /* jmpl */
4532                     {
4533                         TCGv t;
4534                         TCGv_i32 r_const;
4535
4536                         t = gen_dest_gpr(dc, rd);
4537                         tcg_gen_movi_tl(t, dc->pc);
4538                         gen_store_gpr(dc, rd, t);
4539                         gen_mov_pc_npc(dc);
4540                         r_const = tcg_const_i32(3);
4541                         gen_helper_check_align(cpu_env, cpu_tmp0, r_const);
4542                         tcg_temp_free_i32(r_const);
4543                         gen_address_mask(dc, cpu_tmp0);
4544                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
4545                         dc->npc = DYNAMIC_PC;
4546                     }
4547                     goto jmp_insn;
4548 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4549                 case 0x39:      /* rett, V9 return */
4550                     {
4551                         TCGv_i32 r_const;
4552
4553                         if (!supervisor(dc))
4554                             goto priv_insn;
4555                         gen_mov_pc_npc(dc);
4556                         r_const = tcg_const_i32(3);
4557                         gen_helper_check_align(cpu_env, cpu_tmp0, r_const);
4558                         tcg_temp_free_i32(r_const);
4559                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
4560                         dc->npc = DYNAMIC_PC;
4561                         gen_helper_rett(cpu_env);
4562                     }
4563                     goto jmp_insn;
4564 #endif
4565                 case 0x3b: /* flush */
4566                     if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4567                         goto unimp_flush;
4568                     /* nop */
4569                     break;
4570                 case 0x3c:      /* save */
4571                     save_state(dc);
4572                     gen_helper_save(cpu_env);
4573                     gen_store_gpr(dc, rd, cpu_tmp0);
4574                     break;
4575                 case 0x3d:      /* restore */
4576                     save_state(dc);
4577                     gen_helper_restore(cpu_env);
4578                     gen_store_gpr(dc, rd, cpu_tmp0);
4579                     break;
4580 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4581                 case 0x3e:      /* V9 done/retry */
4582                     {
4583                         switch (rd) {
4584                         case 0:
4585                             if (!supervisor(dc))
4586                                 goto priv_insn;
4587                             dc->npc = DYNAMIC_PC;
4588                             dc->pc = DYNAMIC_PC;
4589                             gen_helper_done(cpu_env);
4590                             goto jmp_insn;
4591                         case 1:
4592                             if (!supervisor(dc))
4593                                 goto priv_insn;
4594                             dc->npc = DYNAMIC_PC;
4595                             dc->pc = DYNAMIC_PC;
4596                             gen_helper_retry(cpu_env);
4597                             goto jmp_insn;
4598                         default:
4599                             goto illegal_insn;
4600                         }
4601                     }
4602                     break;
4603 #endif
4604                 default:
4605                     goto illegal_insn;
4606                 }
4607             }
4608             break;
4609         }
4610         break;
4611     case 3:                     /* load/store instructions */
4612         {
4613             unsigned int xop = GET_FIELD(insn, 7, 12);
4614             /* ??? gen_address_mask prevents us from using a source
4615                register directly.  Always generate a temporary.  */
4616             TCGv cpu_addr = get_temp_tl(dc);
4617
4618             tcg_gen_mov_tl(cpu_addr, get_src1(dc, insn));
4619             if (xop == 0x3c || xop == 0x3e) {
4620                 /* V9 casa/casxa : no offset */
4621             } else if (IS_IMM) {     /* immediate */
4622                 simm = GET_FIELDs(insn, 19, 31);
4623                 if (simm != 0) {
4624                     tcg_gen_addi_tl(cpu_addr, cpu_addr, simm);
4625                 }
4626             } else {            /* register */
4627                 rs2 = GET_FIELD(insn, 27, 31);
4628                 if (rs2 != 0) {
4629                     tcg_gen_add_tl(cpu_addr, cpu_addr, gen_load_gpr(dc, rs2));
4630                 }
4631             }
4632             if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4633                 (xop > 0x17 && xop <= 0x1d ) ||
4634                 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4635                 TCGv cpu_val = gen_dest_gpr(dc, rd);
4636
4637                 switch (xop) {
4638                 case 0x0:       /* ld, V9 lduw, load unsigned word */
4639                     gen_address_mask(dc, cpu_addr);
4640                     tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4641                     break;
4642                 case 0x1:       /* ldub, load unsigned byte */
4643                     gen_address_mask(dc, cpu_addr);
4644                     tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4645                     break;
4646                 case 0x2:       /* lduh, load unsigned halfword */
4647                     gen_address_mask(dc, cpu_addr);
4648                     tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4649                     break;
4650                 case 0x3:       /* ldd, load double word */
4651                     if (rd & 1)
4652                         goto illegal_insn;
4653                     else {
4654                         TCGv_i32 r_const;
4655                         TCGv_i64 t64;
4656
4657                         save_state(dc);
4658                         r_const = tcg_const_i32(7);
4659                         /* XXX remove alignment check */
4660                         gen_helper_check_align(cpu_env, cpu_addr, r_const);
4661                         tcg_temp_free_i32(r_const);
4662                         gen_address_mask(dc, cpu_addr);
4663                         t64 = tcg_temp_new_i64();
4664                         tcg_gen_qemu_ld64(t64, cpu_addr, dc->mem_idx);
4665                         tcg_gen_trunc_i64_tl(cpu_val, t64);
4666                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
4667                         gen_store_gpr(dc, rd + 1, cpu_val);
4668                         tcg_gen_shri_i64(t64, t64, 32);
4669                         tcg_gen_trunc_i64_tl(cpu_val, t64);
4670                         tcg_temp_free_i64(t64);
4671                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
4672                     }
4673                     break;
4674                 case 0x9:       /* ldsb, load signed byte */
4675                     gen_address_mask(dc, cpu_addr);
4676                     tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4677                     break;
4678                 case 0xa:       /* ldsh, load signed halfword */
4679                     gen_address_mask(dc, cpu_addr);
4680                     tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4681                     break;
4682                 case 0xd:       /* ldstub -- XXX: should be atomically */
4683                     {
4684                         TCGv r_const;
4685
4686                         gen_address_mask(dc, cpu_addr);
4687                         tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4688                         r_const = tcg_const_tl(0xff);
4689                         tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4690                         tcg_temp_free(r_const);
4691                     }
4692                     break;
4693                 case 0x0f:
4694                     /* swap, swap register with memory. Also atomically */
4695                     {
4696                         TCGv t0 = get_temp_tl(dc);
4697                         CHECK_IU_FEATURE(dc, SWAP);
4698                         cpu_src1 = gen_load_gpr(dc, rd);
4699                         gen_address_mask(dc, cpu_addr);
4700                         tcg_gen_qemu_ld32u(t0, cpu_addr, dc->mem_idx);
4701                         tcg_gen_qemu_st32(cpu_src1, cpu_addr, dc->mem_idx);
4702                         tcg_gen_mov_tl(cpu_val, t0);
4703                     }
4704                     break;
4705 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4706                 case 0x10:      /* lda, V9 lduwa, load word alternate */
4707 #ifndef TARGET_SPARC64
4708                     if (IS_IMM)
4709                         goto illegal_insn;
4710                     if (!supervisor(dc))
4711                         goto priv_insn;
4712 #endif
4713                     save_state(dc);
4714                     gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4715                     break;
4716                 case 0x11:      /* lduba, load unsigned byte alternate */
4717 #ifndef TARGET_SPARC64
4718                     if (IS_IMM)
4719                         goto illegal_insn;
4720                     if (!supervisor(dc))
4721                         goto priv_insn;
4722 #endif
4723                     save_state(dc);
4724                     gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4725                     break;
4726                 case 0x12:      /* lduha, load unsigned halfword alternate */
4727 #ifndef TARGET_SPARC64
4728                     if (IS_IMM)
4729                         goto illegal_insn;
4730                     if (!supervisor(dc))
4731                         goto priv_insn;
4732 #endif
4733                     save_state(dc);
4734                     gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4735                     break;
4736                 case 0x13:      /* ldda, load double word alternate */
4737 #ifndef TARGET_SPARC64
4738                     if (IS_IMM)
4739                         goto illegal_insn;
4740                     if (!supervisor(dc))
4741                         goto priv_insn;
4742 #endif
4743                     if (rd & 1)
4744                         goto illegal_insn;
4745                     save_state(dc);
4746                     gen_ldda_asi(dc, cpu_val, cpu_addr, insn, rd);
4747                     goto skip_move;
4748                 case 0x19:      /* ldsba, load signed byte alternate */
4749 #ifndef TARGET_SPARC64
4750                     if (IS_IMM)
4751                         goto illegal_insn;
4752                     if (!supervisor(dc))
4753                         goto priv_insn;
4754 #endif
4755                     save_state(dc);
4756                     gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4757                     break;
4758                 case 0x1a:      /* ldsha, load signed halfword alternate */
4759 #ifndef TARGET_SPARC64
4760                     if (IS_IMM)
4761                         goto illegal_insn;
4762                     if (!supervisor(dc))
4763                         goto priv_insn;
4764 #endif
4765                     save_state(dc);
4766                     gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4767                     break;
4768                 case 0x1d:      /* ldstuba -- XXX: should be atomically */
4769 #ifndef TARGET_SPARC64
4770                     if (IS_IMM)
4771                         goto illegal_insn;
4772                     if (!supervisor(dc))
4773                         goto priv_insn;
4774 #endif
4775                     save_state(dc);
4776                     gen_ldstub_asi(cpu_val, cpu_addr, insn);
4777                     break;
4778                 case 0x1f:      /* swapa, swap reg with alt. memory. Also
4779                                    atomically */
4780                     CHECK_IU_FEATURE(dc, SWAP);
4781 #ifndef TARGET_SPARC64
4782                     if (IS_IMM)
4783                         goto illegal_insn;
4784                     if (!supervisor(dc))
4785                         goto priv_insn;
4786 #endif
4787                     save_state(dc);
4788                     cpu_src1 = gen_load_gpr(dc, rd);
4789                     gen_swap_asi(cpu_val, cpu_src1, cpu_addr, insn);
4790                     break;
4791
4792 #ifndef TARGET_SPARC64
4793                 case 0x30: /* ldc */
4794                 case 0x31: /* ldcsr */
4795                 case 0x33: /* lddc */
4796                     goto ncp_insn;
4797 #endif
4798 #endif
4799 #ifdef TARGET_SPARC64
4800                 case 0x08: /* V9 ldsw */
4801                     gen_address_mask(dc, cpu_addr);
4802                     tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4803                     break;
4804                 case 0x0b: /* V9 ldx */
4805                     gen_address_mask(dc, cpu_addr);
4806                     tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4807                     break;
4808                 case 0x18: /* V9 ldswa */
4809                     save_state(dc);
4810                     gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4811                     break;
4812                 case 0x1b: /* V9 ldxa */
4813                     save_state(dc);
4814                     gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4815                     break;
4816                 case 0x2d: /* V9 prefetch, no effect */
4817                     goto skip_move;
4818                 case 0x30: /* V9 ldfa */
4819                     if (gen_trap_ifnofpu(dc)) {
4820                         goto jmp_insn;
4821                     }
4822                     save_state(dc);
4823                     gen_ldf_asi(cpu_addr, insn, 4, rd);
4824                     gen_update_fprs_dirty(rd);
4825                     goto skip_move;
4826                 case 0x33: /* V9 lddfa */
4827                     if (gen_trap_ifnofpu(dc)) {
4828                         goto jmp_insn;
4829                     }
4830                     save_state(dc);
4831                     gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4832                     gen_update_fprs_dirty(DFPREG(rd));
4833                     goto skip_move;
4834                 case 0x3d: /* V9 prefetcha, no effect */
4835                     goto skip_move;
4836                 case 0x32: /* V9 ldqfa */
4837                     CHECK_FPU_FEATURE(dc, FLOAT128);
4838                     if (gen_trap_ifnofpu(dc)) {
4839                         goto jmp_insn;
4840                     }
4841                     save_state(dc);
4842                     gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4843                     gen_update_fprs_dirty(QFPREG(rd));
4844                     goto skip_move;
4845 #endif
4846                 default:
4847                     goto illegal_insn;
4848                 }
4849                 gen_store_gpr(dc, rd, cpu_val);
4850 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4851             skip_move: ;
4852 #endif
4853             } else if (xop >= 0x20 && xop < 0x24) {
4854                 TCGv t0;
4855
4856                 if (gen_trap_ifnofpu(dc)) {
4857                     goto jmp_insn;
4858                 }
4859                 save_state(dc);
4860                 switch (xop) {
4861                 case 0x20:      /* ldf, load fpreg */
4862                     gen_address_mask(dc, cpu_addr);
4863                     t0 = get_temp_tl(dc);
4864                     tcg_gen_qemu_ld32u(t0, cpu_addr, dc->mem_idx);
4865                     cpu_dst_32 = gen_dest_fpr_F(dc);
4866                     tcg_gen_trunc_tl_i32(cpu_dst_32, t0);
4867                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4868                     break;
4869                 case 0x21:      /* ldfsr, V9 ldxfsr */
4870 #ifdef TARGET_SPARC64
4871                     gen_address_mask(dc, cpu_addr);
4872                     if (rd == 1) {
4873                         TCGv_i64 t64 = tcg_temp_new_i64();
4874                         tcg_gen_qemu_ld64(t64, cpu_addr, dc->mem_idx);
4875                         gen_helper_ldxfsr(cpu_env, t64);
4876                         tcg_temp_free_i64(t64);
4877                         break;
4878                     }
4879 #endif
4880                     cpu_dst_32 = get_temp_i32(dc);
4881                     t0 = get_temp_tl(dc);
4882                     tcg_gen_qemu_ld32u(t0, cpu_addr, dc->mem_idx);
4883                     tcg_gen_trunc_tl_i32(cpu_dst_32, t0);
4884                     gen_helper_ldfsr(cpu_env, cpu_dst_32);
4885                     break;
4886                 case 0x22:      /* ldqf, load quad fpreg */
4887                     {
4888                         TCGv_i32 r_const;
4889
4890                         CHECK_FPU_FEATURE(dc, FLOAT128);
4891                         r_const = tcg_const_i32(dc->mem_idx);
4892                         gen_address_mask(dc, cpu_addr);
4893                         gen_helper_ldqf(cpu_env, cpu_addr, r_const);
4894                         tcg_temp_free_i32(r_const);
4895                         gen_op_store_QT0_fpr(QFPREG(rd));
4896                         gen_update_fprs_dirty(QFPREG(rd));
4897                     }
4898                     break;
4899                 case 0x23:      /* lddf, load double fpreg */
4900                     gen_address_mask(dc, cpu_addr);
4901                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4902                     tcg_gen_qemu_ld64(cpu_dst_64, cpu_addr, dc->mem_idx);
4903                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4904                     break;
4905                 default:
4906                     goto illegal_insn;
4907                 }
4908             } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4909                        xop == 0xe || xop == 0x1e) {
4910                 TCGv cpu_val = gen_load_gpr(dc, rd);
4911
4912                 switch (xop) {
4913                 case 0x4: /* st, store word */
4914                     gen_address_mask(dc, cpu_addr);
4915                     tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4916                     break;
4917                 case 0x5: /* stb, store byte */
4918                     gen_address_mask(dc, cpu_addr);
4919                     tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4920                     break;
4921                 case 0x6: /* sth, store halfword */
4922                     gen_address_mask(dc, cpu_addr);
4923                     tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4924                     break;
4925                 case 0x7: /* std, store double word */
4926                     if (rd & 1)
4927                         goto illegal_insn;
4928                     else {
4929                         TCGv_i32 r_const;
4930                         TCGv_i64 t64;
4931                         TCGv lo;
4932
4933                         save_state(dc);
4934                         gen_address_mask(dc, cpu_addr);
4935                         r_const = tcg_const_i32(7);
4936                         /* XXX remove alignment check */
4937                         gen_helper_check_align(cpu_env, cpu_addr, r_const);
4938                         tcg_temp_free_i32(r_const);
4939                         lo = gen_load_gpr(dc, rd + 1);
4940
4941                         t64 = tcg_temp_new_i64();
4942                         tcg_gen_concat_tl_i64(t64, lo, cpu_val);
4943                         tcg_gen_qemu_st64(t64, cpu_addr, dc->mem_idx);
4944                         tcg_temp_free_i64(t64);
4945                     }
4946                     break;
4947 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4948                 case 0x14: /* sta, V9 stwa, store word alternate */
4949 #ifndef TARGET_SPARC64
4950                     if (IS_IMM)
4951                         goto illegal_insn;
4952                     if (!supervisor(dc))
4953                         goto priv_insn;
4954 #endif
4955                     save_state(dc);
4956                     gen_st_asi(cpu_val, cpu_addr, insn, 4);
4957                     dc->npc = DYNAMIC_PC;
4958                     break;
4959                 case 0x15: /* stba, store byte alternate */
4960 #ifndef TARGET_SPARC64
4961                     if (IS_IMM)
4962                         goto illegal_insn;
4963                     if (!supervisor(dc))
4964                         goto priv_insn;
4965 #endif
4966                     save_state(dc);
4967                     gen_st_asi(cpu_val, cpu_addr, insn, 1);
4968                     dc->npc = DYNAMIC_PC;
4969                     break;
4970                 case 0x16: /* stha, store halfword alternate */
4971 #ifndef TARGET_SPARC64
4972                     if (IS_IMM)
4973                         goto illegal_insn;
4974                     if (!supervisor(dc))
4975                         goto priv_insn;
4976 #endif
4977                     save_state(dc);
4978                     gen_st_asi(cpu_val, cpu_addr, insn, 2);
4979                     dc->npc = DYNAMIC_PC;
4980                     break;
4981                 case 0x17: /* stda, store double word alternate */
4982 #ifndef TARGET_SPARC64
4983                     if (IS_IMM)
4984                         goto illegal_insn;
4985                     if (!supervisor(dc))
4986                         goto priv_insn;
4987 #endif
4988                     if (rd & 1)
4989                         goto illegal_insn;
4990                     else {
4991                         save_state(dc);
4992                         gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
4993                     }
4994                     break;
4995 #endif
4996 #ifdef TARGET_SPARC64
4997                 case 0x0e: /* V9 stx */
4998                     gen_address_mask(dc, cpu_addr);
4999                     tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
5000                     break;
5001                 case 0x1e: /* V9 stxa */
5002                     save_state(dc);
5003                     gen_st_asi(cpu_val, cpu_addr, insn, 8);
5004                     dc->npc = DYNAMIC_PC;
5005                     break;
5006 #endif
5007                 default:
5008                     goto illegal_insn;
5009                 }
5010             } else if (xop > 0x23 && xop < 0x28) {
5011                 if (gen_trap_ifnofpu(dc)) {
5012                     goto jmp_insn;
5013                 }
5014                 save_state(dc);
5015                 switch (xop) {
5016                 case 0x24: /* stf, store fpreg */
5017                     {
5018                         TCGv t = get_temp_tl(dc);
5019                         gen_address_mask(dc, cpu_addr);
5020                         cpu_src1_32 = gen_load_fpr_F(dc, rd);
5021                         tcg_gen_ext_i32_tl(t, cpu_src1_32);
5022                         tcg_gen_qemu_st32(t, cpu_addr, dc->mem_idx);
5023                     }
5024                     break;
5025                 case 0x25: /* stfsr, V9 stxfsr */
5026                     {
5027                         TCGv t = get_temp_tl(dc);
5028
5029                         tcg_gen_ld_tl(t, cpu_env, offsetof(CPUSPARCState, fsr));
5030 #ifdef TARGET_SPARC64
5031                         gen_address_mask(dc, cpu_addr);
5032                         if (rd == 1) {
5033                             tcg_gen_qemu_st64(t, cpu_addr, dc->mem_idx);
5034                             break;
5035                         }
5036 #endif
5037                         tcg_gen_qemu_st32(t, cpu_addr, dc->mem_idx);
5038                     }
5039                     break;
5040                 case 0x26:
5041 #ifdef TARGET_SPARC64
5042                     /* V9 stqf, store quad fpreg */
5043                     {
5044                         TCGv_i32 r_const;
5045
5046                         CHECK_FPU_FEATURE(dc, FLOAT128);
5047                         gen_op_load_fpr_QT0(QFPREG(rd));
5048                         r_const = tcg_const_i32(dc->mem_idx);
5049                         gen_address_mask(dc, cpu_addr);
5050                         gen_helper_stqf(cpu_env, cpu_addr, r_const);
5051                         tcg_temp_free_i32(r_const);
5052                     }
5053                     break;
5054 #else /* !TARGET_SPARC64 */
5055                     /* stdfq, store floating point queue */
5056 #if defined(CONFIG_USER_ONLY)
5057                     goto illegal_insn;
5058 #else
5059                     if (!supervisor(dc))
5060                         goto priv_insn;
5061                     if (gen_trap_ifnofpu(dc)) {
5062                         goto jmp_insn;
5063                     }
5064                     goto nfq_insn;
5065 #endif
5066 #endif
5067                 case 0x27: /* stdf, store double fpreg */
5068                     gen_address_mask(dc, cpu_addr);
5069                     cpu_src1_64 = gen_load_fpr_D(dc, rd);
5070                     tcg_gen_qemu_st64(cpu_src1_64, cpu_addr, dc->mem_idx);
5071                     break;
5072                 default:
5073                     goto illegal_insn;
5074                 }
5075             } else if (xop > 0x33 && xop < 0x3f) {
5076                 save_state(dc);
5077                 switch (xop) {
5078 #ifdef TARGET_SPARC64
5079                 case 0x34: /* V9 stfa */
5080                     if (gen_trap_ifnofpu(dc)) {
5081                         goto jmp_insn;
5082                     }
5083                     gen_stf_asi(cpu_addr, insn, 4, rd);
5084                     break;
5085                 case 0x36: /* V9 stqfa */
5086                     {
5087                         TCGv_i32 r_const;
5088
5089                         CHECK_FPU_FEATURE(dc, FLOAT128);
5090                         if (gen_trap_ifnofpu(dc)) {
5091                             goto jmp_insn;
5092                         }
5093                         r_const = tcg_const_i32(7);
5094                         gen_helper_check_align(cpu_env, cpu_addr, r_const);
5095                         tcg_temp_free_i32(r_const);
5096                         gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
5097                     }
5098                     break;
5099                 case 0x37: /* V9 stdfa */
5100                     if (gen_trap_ifnofpu(dc)) {
5101                         goto jmp_insn;
5102                     }
5103                     gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
5104                     break;
5105                 case 0x3c: /* V9 casa */
5106                     rs2 = GET_FIELD(insn, 27, 31);
5107                     cpu_src2 = gen_load_gpr(dc, rs2);
5108                     gen_cas_asi(dc, cpu_addr, cpu_src2, insn, rd);
5109                     break;
5110                 case 0x3e: /* V9 casxa */
5111                     rs2 = GET_FIELD(insn, 27, 31);
5112                     cpu_src2 = gen_load_gpr(dc, rs2);
5113                     gen_casx_asi(dc, cpu_addr, cpu_src2, insn, rd);
5114                     break;
5115 #else
5116                 case 0x34: /* stc */
5117                 case 0x35: /* stcsr */
5118                 case 0x36: /* stdcq */
5119                 case 0x37: /* stdc */
5120                     goto ncp_insn;
5121 #endif
5122                 default:
5123                     goto illegal_insn;
5124                 }
5125             } else {
5126                 goto illegal_insn;
5127             }
5128         }
5129         break;
5130     }
5131     /* default case for non jump instructions */
5132     if (dc->npc == DYNAMIC_PC) {
5133         dc->pc = DYNAMIC_PC;
5134         gen_op_next_insn();
5135     } else if (dc->npc == JUMP_PC) {
5136         /* we can do a static jump */
5137         gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5138         dc->is_br = 1;
5139     } else {
5140         dc->pc = dc->npc;
5141         dc->npc = dc->npc + 4;
5142     }
5143  jmp_insn:
5144     goto egress;
5145  illegal_insn:
5146     {
5147         TCGv_i32 r_const;
5148
5149         save_state(dc);
5150         r_const = tcg_const_i32(TT_ILL_INSN);
5151         gen_helper_raise_exception(cpu_env, r_const);
5152         tcg_temp_free_i32(r_const);
5153         dc->is_br = 1;
5154     }
5155     goto egress;
5156  unimp_flush:
5157     {
5158         TCGv_i32 r_const;
5159
5160         save_state(dc);
5161         r_const = tcg_const_i32(TT_UNIMP_FLUSH);
5162         gen_helper_raise_exception(cpu_env, r_const);
5163         tcg_temp_free_i32(r_const);
5164         dc->is_br = 1;
5165     }
5166     goto egress;
5167 #if !defined(CONFIG_USER_ONLY)
5168  priv_insn:
5169     {
5170         TCGv_i32 r_const;
5171
5172         save_state(dc);
5173         r_const = tcg_const_i32(TT_PRIV_INSN);
5174         gen_helper_raise_exception(cpu_env, r_const);
5175         tcg_temp_free_i32(r_const);
5176         dc->is_br = 1;
5177     }
5178     goto egress;
5179 #endif
5180  nfpu_insn:
5181     save_state(dc);
5182     gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
5183     dc->is_br = 1;
5184     goto egress;
5185 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5186  nfq_insn:
5187     save_state(dc);
5188     gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
5189     dc->is_br = 1;
5190     goto egress;
5191 #endif
5192 #ifndef TARGET_SPARC64
5193  ncp_insn:
5194     {
5195         TCGv r_const;
5196
5197         save_state(dc);
5198         r_const = tcg_const_i32(TT_NCP_INSN);
5199         gen_helper_raise_exception(cpu_env, r_const);
5200         tcg_temp_free(r_const);
5201         dc->is_br = 1;
5202     }
5203     goto egress;
5204 #endif
5205  egress:
5206     if (dc->n_t32 != 0) {
5207         int i;
5208         for (i = dc->n_t32 - 1; i >= 0; --i) {
5209             tcg_temp_free_i32(dc->t32[i]);
5210         }
5211         dc->n_t32 = 0;
5212     }
5213     if (dc->n_ttl != 0) {
5214         int i;
5215         for (i = dc->n_ttl - 1; i >= 0; --i) {
5216             tcg_temp_free(dc->ttl[i]);
5217         }
5218         dc->n_ttl = 0;
5219     }
5220 }
5221
5222 static inline void gen_intermediate_code_internal(SPARCCPU *cpu,
5223                                                   TranslationBlock *tb,
5224                                                   bool spc)
5225 {
5226     CPUSPARCState *env = &cpu->env;
5227     target_ulong pc_start, last_pc;
5228     uint16_t *gen_opc_end;
5229     DisasContext dc1, *dc = &dc1;
5230     CPUBreakpoint *bp;
5231     int j, lj = -1;
5232     int num_insns;
5233     int max_insns;
5234     unsigned int insn;
5235
5236     memset(dc, 0, sizeof(DisasContext));
5237     dc->tb = tb;
5238     pc_start = tb->pc;
5239     dc->pc = pc_start;
5240     last_pc = dc->pc;
5241     dc->npc = (target_ulong) tb->cs_base;
5242     dc->cc_op = CC_OP_DYNAMIC;
5243     dc->mem_idx = cpu_mmu_index(env);
5244     dc->def = env->def;
5245     dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5246     dc->address_mask_32bit = tb_am_enabled(tb->flags);
5247     dc->singlestep = (env->singlestep_enabled || singlestep);
5248     gen_opc_end = tcg_ctx.gen_opc_buf + OPC_MAX_SIZE;
5249
5250     num_insns = 0;
5251     max_insns = tb->cflags & CF_COUNT_MASK;
5252     if (max_insns == 0)
5253         max_insns = CF_COUNT_MASK;
5254     gen_tb_start();
5255     do {
5256         if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
5257             QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
5258                 if (bp->pc == dc->pc) {
5259                     if (dc->pc != pc_start)
5260                         save_state(dc);
5261                     gen_helper_debug(cpu_env);
5262                     tcg_gen_exit_tb(0);
5263                     dc->is_br = 1;
5264                     goto exit_gen_loop;
5265                 }
5266             }
5267         }
5268         if (spc) {
5269             qemu_log("Search PC...\n");
5270             j = tcg_ctx.gen_opc_ptr - tcg_ctx.gen_opc_buf;
5271             if (lj < j) {
5272                 lj++;
5273                 while (lj < j)
5274                     tcg_ctx.gen_opc_instr_start[lj++] = 0;
5275                 tcg_ctx.gen_opc_pc[lj] = dc->pc;
5276                 gen_opc_npc[lj] = dc->npc;
5277                 tcg_ctx.gen_opc_instr_start[lj] = 1;
5278                 tcg_ctx.gen_opc_icount[lj] = num_insns;
5279             }
5280         }
5281         if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
5282             gen_io_start();
5283         last_pc = dc->pc;
5284         insn = cpu_ldl_code(env, dc->pc);
5285
5286         disas_sparc_insn(dc, insn);
5287         num_insns++;
5288
5289         if (dc->is_br)
5290             break;
5291         /* if the next PC is different, we abort now */
5292         if (dc->pc != (last_pc + 4))
5293             break;
5294         /* if we reach a page boundary, we stop generation so that the
5295            PC of a TT_TFAULT exception is always in the right page */
5296         if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5297             break;
5298         /* if single step mode, we generate only one instruction and
5299            generate an exception */
5300         if (dc->singlestep) {
5301             break;
5302         }
5303     } while ((tcg_ctx.gen_opc_ptr < gen_opc_end) &&
5304              (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5305              num_insns < max_insns);
5306
5307  exit_gen_loop:
5308     if (tb->cflags & CF_LAST_IO) {
5309         gen_io_end();
5310     }
5311     if (!dc->is_br) {
5312         if (dc->pc != DYNAMIC_PC &&
5313             (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5314             /* static PC and NPC: we can use direct chaining */
5315             gen_goto_tb(dc, 0, dc->pc, dc->npc);
5316         } else {
5317             if (dc->pc != DYNAMIC_PC) {
5318                 tcg_gen_movi_tl(cpu_pc, dc->pc);
5319             }
5320             save_npc(dc);
5321             tcg_gen_exit_tb(0);
5322         }
5323     }
5324     gen_tb_end(tb, num_insns);
5325     *tcg_ctx.gen_opc_ptr = INDEX_op_end;
5326     if (spc) {
5327         j = tcg_ctx.gen_opc_ptr - tcg_ctx.gen_opc_buf;
5328         lj++;
5329         while (lj <= j)
5330             tcg_ctx.gen_opc_instr_start[lj++] = 0;
5331 #if 0
5332         log_page_dump();
5333 #endif
5334         gen_opc_jump_pc[0] = dc->jump_pc[0];
5335         gen_opc_jump_pc[1] = dc->jump_pc[1];
5336     } else {
5337         tb->size = last_pc + 4 - pc_start;
5338         tb->icount = num_insns;
5339     }
5340 #ifdef DEBUG_DISAS
5341     if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5342         qemu_log("--------------\n");
5343         qemu_log("IN: %s\n", lookup_symbol(pc_start));
5344         log_target_disas(env, pc_start, last_pc + 4 - pc_start, 0);
5345         qemu_log("\n");
5346     }
5347 #endif
5348 }
5349
5350 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5351 {
5352     gen_intermediate_code_internal(sparc_env_get_cpu(env), tb, false);
5353 }
5354
5355 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5356 {
5357     gen_intermediate_code_internal(sparc_env_get_cpu(env), tb, true);
5358 }
5359
5360 void gen_intermediate_code_init(CPUSPARCState *env)
5361 {
5362     unsigned int i;
5363     static int inited;
5364     static const char * const gregnames[8] = {
5365         NULL, // g0 not used
5366         "g1",
5367         "g2",
5368         "g3",
5369         "g4",
5370         "g5",
5371         "g6",
5372         "g7",
5373     };
5374     static const char * const fregnames[32] = {
5375         "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5376         "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5377         "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5378         "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5379     };
5380
5381     /* init various static tables */
5382     if (!inited) {
5383         inited = 1;
5384
5385         cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5386         cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5387                                              offsetof(CPUSPARCState, regwptr),
5388                                              "regwptr");
5389 #ifdef TARGET_SPARC64
5390         cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, xcc),
5391                                          "xcc");
5392         cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, asi),
5393                                          "asi");
5394         cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, fprs),
5395                                           "fprs");
5396         cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, gsr),
5397                                      "gsr");
5398         cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5399                                            offsetof(CPUSPARCState, tick_cmpr),
5400                                            "tick_cmpr");
5401         cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5402                                             offsetof(CPUSPARCState, stick_cmpr),
5403                                             "stick_cmpr");
5404         cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5405                                              offsetof(CPUSPARCState, hstick_cmpr),
5406                                              "hstick_cmpr");
5407         cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hintp),
5408                                        "hintp");
5409         cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, htba),
5410                                       "htba");
5411         cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hver),
5412                                       "hver");
5413         cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5414                                      offsetof(CPUSPARCState, ssr), "ssr");
5415         cpu_ver = tcg_global_mem_new(TCG_AREG0,
5416                                      offsetof(CPUSPARCState, version), "ver");
5417         cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5418                                              offsetof(CPUSPARCState, softint),
5419                                              "softint");
5420 #else
5421         cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, wim),
5422                                      "wim");
5423 #endif
5424         cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cond),
5425                                       "cond");
5426         cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_src),
5427                                         "cc_src");
5428         cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5429                                          offsetof(CPUSPARCState, cc_src2),
5430                                          "cc_src2");
5431         cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_dst),
5432                                         "cc_dst");
5433         cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, cc_op),
5434                                            "cc_op");
5435         cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, psr),
5436                                          "psr");
5437         cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, fsr),
5438                                      "fsr");
5439         cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, pc),
5440                                     "pc");
5441         cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, npc),
5442                                      "npc");
5443         cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, y), "y");
5444 #ifndef CONFIG_USER_ONLY
5445         cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, tbr),
5446                                      "tbr");
5447 #endif
5448         for (i = 1; i < 8; i++) {
5449             cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5450                                               offsetof(CPUSPARCState, gregs[i]),
5451                                               gregnames[i]);
5452         }
5453         for (i = 0; i < TARGET_DPREGS; i++) {
5454             cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
5455                                                 offsetof(CPUSPARCState, fpr[i]),
5456                                                 fregnames[i]);
5457         }
5458
5459         /* register helpers */
5460
5461 #define GEN_HELPER 2
5462 #include "helper.h"
5463     }
5464 }
5465
5466 void restore_state_to_opc(CPUSPARCState *env, TranslationBlock *tb, int pc_pos)
5467 {
5468     target_ulong npc;
5469     env->pc = tcg_ctx.gen_opc_pc[pc_pos];
5470     npc = gen_opc_npc[pc_pos];
5471     if (npc == 1) {
5472         /* dynamic NPC: already stored */
5473     } else if (npc == 2) {
5474         /* jump PC: use 'cond' and the jump targets of the translation */
5475         if (env->cond) {
5476             env->npc = gen_opc_jump_pc[0];
5477         } else {
5478             env->npc = gen_opc_jump_pc[1];
5479         }
5480     } else {
5481         env->npc = npc;
5482     }
5483 }
This page took 0.333949 seconds and 4 git commands to generate.