]> Git Repo - qemu.git/blob - target-sparc/translate.c
target-sparc: Convert swap to gen_load/store_gpr
[qemu.git] / target-sparc / translate.c
1 /*
2    SPARC translation
3
4    Copyright (C) 2003 Thomas M. Ogrisegg <[email protected]>
5    Copyright (C) 2003-2005 Fabrice Bellard
6
7    This library is free software; you can redistribute it and/or
8    modify it under the terms of the GNU Lesser General Public
9    License as published by the Free Software Foundation; either
10    version 2 of the License, or (at your option) any later version.
11
12    This library is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15    Lesser General Public License for more details.
16
17    You should have received a copy of the GNU Lesser General Public
18    License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20
21 #include <stdarg.h>
22 #include <stdlib.h>
23 #include <stdio.h>
24 #include <string.h>
25 #include <inttypes.h>
26
27 #include "cpu.h"
28 #include "disas.h"
29 #include "helper.h"
30 #include "tcg-op.h"
31
32 #define GEN_HELPER 1
33 #include "helper.h"
34
35 #define DEBUG_DISAS
36
37 #define DYNAMIC_PC  1 /* dynamic pc value */
38 #define JUMP_PC     2 /* dynamic pc value which takes only two values
39                          according to jump_pc[T2] */
40
41 /* global register indexes */
42 static TCGv_ptr cpu_env, cpu_regwptr;
43 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
44 static TCGv_i32 cpu_cc_op;
45 static TCGv_i32 cpu_psr;
46 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
47 static TCGv cpu_y;
48 #ifndef CONFIG_USER_ONLY
49 static TCGv cpu_tbr;
50 #endif
51 static TCGv cpu_cond, cpu_dst, cpu_addr, cpu_val;
52 #ifdef TARGET_SPARC64
53 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
54 static TCGv cpu_gsr;
55 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
56 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
57 static TCGv_i32 cpu_softint;
58 #else
59 static TCGv cpu_wim;
60 #endif
61 /* local register indexes (only used inside old micro ops) */
62 static TCGv cpu_tmp0;
63 static TCGv_i32 cpu_tmp32;
64 static TCGv_i64 cpu_tmp64;
65 /* Floating point registers */
66 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
67
68 static target_ulong gen_opc_npc[OPC_BUF_SIZE];
69 static target_ulong gen_opc_jump_pc[2];
70
71 #include "gen-icount.h"
72
73 typedef struct DisasContext {
74     target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
75     target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
76     target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
77     int is_br;
78     int mem_idx;
79     int fpu_enabled;
80     int address_mask_32bit;
81     int singlestep;
82     uint32_t cc_op;  /* current CC operation */
83     struct TranslationBlock *tb;
84     sparc_def_t *def;
85     TCGv_i32 t32[3];
86     TCGv ttl[5];
87     int n_t32;
88     int n_ttl;
89 } DisasContext;
90
91 typedef struct {
92     TCGCond cond;
93     bool is_bool;
94     bool g1, g2;
95     TCGv c1, c2;
96 } DisasCompare;
97
98 // This function uses non-native bit order
99 #define GET_FIELD(X, FROM, TO)                                  \
100     ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
101
102 // This function uses the order in the manuals, i.e. bit 0 is 2^0
103 #define GET_FIELD_SP(X, FROM, TO)               \
104     GET_FIELD(X, 31 - (TO), 31 - (FROM))
105
106 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
107 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
108
109 #ifdef TARGET_SPARC64
110 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
111 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
112 #else
113 #define DFPREG(r) (r & 0x1e)
114 #define QFPREG(r) (r & 0x1c)
115 #endif
116
117 #define UA2005_HTRAP_MASK 0xff
118 #define V8_TRAP_MASK 0x7f
119
120 static int sign_extend(int x, int len)
121 {
122     len = 32 - len;
123     return (x << len) >> len;
124 }
125
126 #define IS_IMM (insn & (1<<13))
127
128 static inline void gen_update_fprs_dirty(int rd)
129 {
130 #if defined(TARGET_SPARC64)
131     tcg_gen_ori_i32(cpu_fprs, cpu_fprs, (rd < 32) ? 1 : 2);
132 #endif
133 }
134
135 /* floating point registers moves */
136 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
137 {
138 #if TCG_TARGET_REG_BITS == 32
139     if (src & 1) {
140         return TCGV_LOW(cpu_fpr[src / 2]);
141     } else {
142         return TCGV_HIGH(cpu_fpr[src / 2]);
143     }
144 #else
145     if (src & 1) {
146         return MAKE_TCGV_I32(GET_TCGV_I64(cpu_fpr[src / 2]));
147     } else {
148         TCGv_i32 ret = tcg_temp_new_i32();
149         TCGv_i64 t = tcg_temp_new_i64();
150
151         tcg_gen_shri_i64(t, cpu_fpr[src / 2], 32);
152         tcg_gen_trunc_i64_i32(ret, t);
153         tcg_temp_free_i64(t);
154
155         dc->t32[dc->n_t32++] = ret;
156         assert(dc->n_t32 <= ARRAY_SIZE(dc->t32));
157
158         return ret;
159     }
160 #endif
161 }
162
163 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
164 {
165 #if TCG_TARGET_REG_BITS == 32
166     if (dst & 1) {
167         tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
168     } else {
169         tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
170     }
171 #else
172     TCGv_i64 t = MAKE_TCGV_I64(GET_TCGV_I32(v));
173     tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
174                         (dst & 1 ? 0 : 32), 32);
175 #endif
176     gen_update_fprs_dirty(dst);
177 }
178
179 static TCGv_i32 gen_dest_fpr_F(void)
180 {
181     return cpu_tmp32;
182 }
183
184 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
185 {
186     src = DFPREG(src);
187     return cpu_fpr[src / 2];
188 }
189
190 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
191 {
192     dst = DFPREG(dst);
193     tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
194     gen_update_fprs_dirty(dst);
195 }
196
197 static TCGv_i64 gen_dest_fpr_D(void)
198 {
199     return cpu_tmp64;
200 }
201
202 static void gen_op_load_fpr_QT0(unsigned int src)
203 {
204     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
205                    offsetof(CPU_QuadU, ll.upper));
206     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
207                    offsetof(CPU_QuadU, ll.lower));
208 }
209
210 static void gen_op_load_fpr_QT1(unsigned int src)
211 {
212     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
213                    offsetof(CPU_QuadU, ll.upper));
214     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
215                    offsetof(CPU_QuadU, ll.lower));
216 }
217
218 static void gen_op_store_QT0_fpr(unsigned int dst)
219 {
220     tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
221                    offsetof(CPU_QuadU, ll.upper));
222     tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
223                    offsetof(CPU_QuadU, ll.lower));
224 }
225
226 #ifdef TARGET_SPARC64
227 static void gen_move_Q(unsigned int rd, unsigned int rs)
228 {
229     rd = QFPREG(rd);
230     rs = QFPREG(rs);
231
232     tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
233     tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
234     gen_update_fprs_dirty(rd);
235 }
236 #endif
237
238 /* moves */
239 #ifdef CONFIG_USER_ONLY
240 #define supervisor(dc) 0
241 #ifdef TARGET_SPARC64
242 #define hypervisor(dc) 0
243 #endif
244 #else
245 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
246 #ifdef TARGET_SPARC64
247 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
248 #else
249 #endif
250 #endif
251
252 #ifdef TARGET_SPARC64
253 #ifndef TARGET_ABI32
254 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
255 #else
256 #define AM_CHECK(dc) (1)
257 #endif
258 #endif
259
260 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
261 {
262 #ifdef TARGET_SPARC64
263     if (AM_CHECK(dc))
264         tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
265 #endif
266 }
267
268 static inline TCGv get_temp_tl(DisasContext *dc)
269 {
270     TCGv t;
271     assert(dc->n_ttl < ARRAY_SIZE(dc->ttl));
272     dc->ttl[dc->n_ttl++] = t = tcg_temp_new();
273     return t;
274 }
275
276 static inline TCGv gen_load_gpr(DisasContext *dc, int reg)
277 {
278     if (reg == 0 || reg >= 8) {
279         TCGv t = get_temp_tl(dc);
280         if (reg == 0) {
281             tcg_gen_movi_tl(t, 0);
282         } else {
283             tcg_gen_ld_tl(t, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
284         }
285         return t;
286     } else {
287         return cpu_gregs[reg];
288     }
289 }
290
291 static inline void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
292 {
293     if (reg > 0) {
294         if (reg < 8) {
295             tcg_gen_mov_tl(cpu_gregs[reg], v);
296         } else {
297             tcg_gen_st_tl(v, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
298         }
299     }
300 }
301
302 static inline TCGv gen_dest_gpr(DisasContext *dc, int reg)
303 {
304     if (reg == 0 || reg >= 8) {
305         return get_temp_tl(dc);
306     } else {
307         return cpu_gregs[reg];
308     }
309 }
310
311 static inline void gen_movl_reg_TN(int reg, TCGv tn)
312 {
313     if (reg == 0)
314         tcg_gen_movi_tl(tn, 0);
315     else if (reg < 8)
316         tcg_gen_mov_tl(tn, cpu_gregs[reg]);
317     else {
318         tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
319     }
320 }
321
322 static inline void gen_movl_TN_reg(int reg, TCGv tn)
323 {
324     if (reg == 0)
325         return;
326     else if (reg < 8)
327         tcg_gen_mov_tl(cpu_gregs[reg], tn);
328     else {
329         tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
330     }
331 }
332
333 static inline void gen_goto_tb(DisasContext *s, int tb_num,
334                                target_ulong pc, target_ulong npc)
335 {
336     TranslationBlock *tb;
337
338     tb = s->tb;
339     if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
340         (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
341         !s->singlestep)  {
342         /* jump to same page: we can use a direct jump */
343         tcg_gen_goto_tb(tb_num);
344         tcg_gen_movi_tl(cpu_pc, pc);
345         tcg_gen_movi_tl(cpu_npc, npc);
346         tcg_gen_exit_tb((tcg_target_long)tb + tb_num);
347     } else {
348         /* jump to another page: currently not optimized */
349         tcg_gen_movi_tl(cpu_pc, pc);
350         tcg_gen_movi_tl(cpu_npc, npc);
351         tcg_gen_exit_tb(0);
352     }
353 }
354
355 // XXX suboptimal
356 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
357 {
358     tcg_gen_extu_i32_tl(reg, src);
359     tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
360     tcg_gen_andi_tl(reg, reg, 0x1);
361 }
362
363 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
364 {
365     tcg_gen_extu_i32_tl(reg, src);
366     tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
367     tcg_gen_andi_tl(reg, reg, 0x1);
368 }
369
370 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
371 {
372     tcg_gen_extu_i32_tl(reg, src);
373     tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
374     tcg_gen_andi_tl(reg, reg, 0x1);
375 }
376
377 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
378 {
379     tcg_gen_extu_i32_tl(reg, src);
380     tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
381     tcg_gen_andi_tl(reg, reg, 0x1);
382 }
383
384 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
385 {
386     tcg_gen_mov_tl(cpu_cc_src, src1);
387     tcg_gen_movi_tl(cpu_cc_src2, src2);
388     tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
389     tcg_gen_mov_tl(dst, cpu_cc_dst);
390 }
391
392 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
393 {
394     tcg_gen_mov_tl(cpu_cc_src, src1);
395     tcg_gen_mov_tl(cpu_cc_src2, src2);
396     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
397     tcg_gen_mov_tl(dst, cpu_cc_dst);
398 }
399
400 static TCGv_i32 gen_add32_carry32(void)
401 {
402     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
403
404     /* Carry is computed from a previous add: (dst < src)  */
405 #if TARGET_LONG_BITS == 64
406     cc_src1_32 = tcg_temp_new_i32();
407     cc_src2_32 = tcg_temp_new_i32();
408     tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_dst);
409     tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src);
410 #else
411     cc_src1_32 = cpu_cc_dst;
412     cc_src2_32 = cpu_cc_src;
413 #endif
414
415     carry_32 = tcg_temp_new_i32();
416     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
417
418 #if TARGET_LONG_BITS == 64
419     tcg_temp_free_i32(cc_src1_32);
420     tcg_temp_free_i32(cc_src2_32);
421 #endif
422
423     return carry_32;
424 }
425
426 static TCGv_i32 gen_sub32_carry32(void)
427 {
428     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
429
430     /* Carry is computed from a previous borrow: (src1 < src2)  */
431 #if TARGET_LONG_BITS == 64
432     cc_src1_32 = tcg_temp_new_i32();
433     cc_src2_32 = tcg_temp_new_i32();
434     tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_src);
435     tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src2);
436 #else
437     cc_src1_32 = cpu_cc_src;
438     cc_src2_32 = cpu_cc_src2;
439 #endif
440
441     carry_32 = tcg_temp_new_i32();
442     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
443
444 #if TARGET_LONG_BITS == 64
445     tcg_temp_free_i32(cc_src1_32);
446     tcg_temp_free_i32(cc_src2_32);
447 #endif
448
449     return carry_32;
450 }
451
452 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
453                             TCGv src2, int update_cc)
454 {
455     TCGv_i32 carry_32;
456     TCGv carry;
457
458     switch (dc->cc_op) {
459     case CC_OP_DIV:
460     case CC_OP_LOGIC:
461         /* Carry is known to be zero.  Fall back to plain ADD.  */
462         if (update_cc) {
463             gen_op_add_cc(dst, src1, src2);
464         } else {
465             tcg_gen_add_tl(dst, src1, src2);
466         }
467         return;
468
469     case CC_OP_ADD:
470     case CC_OP_TADD:
471     case CC_OP_TADDTV:
472 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
473         {
474             /* For 32-bit hosts, we can re-use the host's hardware carry
475                generation by using an ADD2 opcode.  We discard the low
476                part of the output.  Ideally we'd combine this operation
477                with the add that generated the carry in the first place.  */
478             TCGv dst_low = tcg_temp_new();
479             tcg_gen_op6_i32(INDEX_op_add2_i32, dst_low, dst,
480                             cpu_cc_src, src1, cpu_cc_src2, src2);
481             tcg_temp_free(dst_low);
482             goto add_done;
483         }
484 #endif
485         carry_32 = gen_add32_carry32();
486         break;
487
488     case CC_OP_SUB:
489     case CC_OP_TSUB:
490     case CC_OP_TSUBTV:
491         carry_32 = gen_sub32_carry32();
492         break;
493
494     default:
495         /* We need external help to produce the carry.  */
496         carry_32 = tcg_temp_new_i32();
497         gen_helper_compute_C_icc(carry_32, cpu_env);
498         break;
499     }
500
501 #if TARGET_LONG_BITS == 64
502     carry = tcg_temp_new();
503     tcg_gen_extu_i32_i64(carry, carry_32);
504 #else
505     carry = carry_32;
506 #endif
507
508     tcg_gen_add_tl(dst, src1, src2);
509     tcg_gen_add_tl(dst, dst, carry);
510
511     tcg_temp_free_i32(carry_32);
512 #if TARGET_LONG_BITS == 64
513     tcg_temp_free(carry);
514 #endif
515
516 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
517  add_done:
518 #endif
519     if (update_cc) {
520         tcg_gen_mov_tl(cpu_cc_src, src1);
521         tcg_gen_mov_tl(cpu_cc_src2, src2);
522         tcg_gen_mov_tl(cpu_cc_dst, dst);
523         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
524         dc->cc_op = CC_OP_ADDX;
525     }
526 }
527
528 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
529 {
530     tcg_gen_mov_tl(cpu_cc_src, src1);
531     tcg_gen_movi_tl(cpu_cc_src2, src2);
532     if (src2 == 0) {
533         tcg_gen_mov_tl(cpu_cc_dst, src1);
534         tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
535         dc->cc_op = CC_OP_LOGIC;
536     } else {
537         tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
538         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
539         dc->cc_op = CC_OP_SUB;
540     }
541     tcg_gen_mov_tl(dst, cpu_cc_dst);
542 }
543
544 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
545 {
546     tcg_gen_mov_tl(cpu_cc_src, src1);
547     tcg_gen_mov_tl(cpu_cc_src2, src2);
548     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
549     tcg_gen_mov_tl(dst, cpu_cc_dst);
550 }
551
552 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
553                             TCGv src2, int update_cc)
554 {
555     TCGv_i32 carry_32;
556     TCGv carry;
557
558     switch (dc->cc_op) {
559     case CC_OP_DIV:
560     case CC_OP_LOGIC:
561         /* Carry is known to be zero.  Fall back to plain SUB.  */
562         if (update_cc) {
563             gen_op_sub_cc(dst, src1, src2);
564         } else {
565             tcg_gen_sub_tl(dst, src1, src2);
566         }
567         return;
568
569     case CC_OP_ADD:
570     case CC_OP_TADD:
571     case CC_OP_TADDTV:
572         carry_32 = gen_add32_carry32();
573         break;
574
575     case CC_OP_SUB:
576     case CC_OP_TSUB:
577     case CC_OP_TSUBTV:
578 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
579         {
580             /* For 32-bit hosts, we can re-use the host's hardware carry
581                generation by using a SUB2 opcode.  We discard the low
582                part of the output.  Ideally we'd combine this operation
583                with the add that generated the carry in the first place.  */
584             TCGv dst_low = tcg_temp_new();
585             tcg_gen_op6_i32(INDEX_op_sub2_i32, dst_low, dst,
586                             cpu_cc_src, src1, cpu_cc_src2, src2);
587             tcg_temp_free(dst_low);
588             goto sub_done;
589         }
590 #endif
591         carry_32 = gen_sub32_carry32();
592         break;
593
594     default:
595         /* We need external help to produce the carry.  */
596         carry_32 = tcg_temp_new_i32();
597         gen_helper_compute_C_icc(carry_32, cpu_env);
598         break;
599     }
600
601 #if TARGET_LONG_BITS == 64
602     carry = tcg_temp_new();
603     tcg_gen_extu_i32_i64(carry, carry_32);
604 #else
605     carry = carry_32;
606 #endif
607
608     tcg_gen_sub_tl(dst, src1, src2);
609     tcg_gen_sub_tl(dst, dst, carry);
610
611     tcg_temp_free_i32(carry_32);
612 #if TARGET_LONG_BITS == 64
613     tcg_temp_free(carry);
614 #endif
615
616 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
617  sub_done:
618 #endif
619     if (update_cc) {
620         tcg_gen_mov_tl(cpu_cc_src, src1);
621         tcg_gen_mov_tl(cpu_cc_src2, src2);
622         tcg_gen_mov_tl(cpu_cc_dst, dst);
623         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
624         dc->cc_op = CC_OP_SUBX;
625     }
626 }
627
628 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
629 {
630     TCGv r_temp, zero;
631
632     r_temp = tcg_temp_new();
633
634     /* old op:
635     if (!(env->y & 1))
636         T1 = 0;
637     */
638     zero = tcg_const_tl(0);
639     tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
640     tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
641     tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
642     tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
643                        zero, cpu_cc_src2);
644     tcg_temp_free(zero);
645
646     // b2 = T0 & 1;
647     // env->y = (b2 << 31) | (env->y >> 1);
648     tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
649     tcg_gen_shli_tl(r_temp, r_temp, 31);
650     tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
651     tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
652     tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
653     tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
654
655     // b1 = N ^ V;
656     gen_mov_reg_N(cpu_tmp0, cpu_psr);
657     gen_mov_reg_V(r_temp, cpu_psr);
658     tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
659     tcg_temp_free(r_temp);
660
661     // T0 = (b1 << 31) | (T0 >> 1);
662     // src1 = T0;
663     tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
664     tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
665     tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
666
667     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
668
669     tcg_gen_mov_tl(dst, cpu_cc_dst);
670 }
671
672 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
673 {
674     TCGv_i32 r_src1, r_src2;
675     TCGv_i64 r_temp, r_temp2;
676
677     r_src1 = tcg_temp_new_i32();
678     r_src2 = tcg_temp_new_i32();
679
680     tcg_gen_trunc_tl_i32(r_src1, src1);
681     tcg_gen_trunc_tl_i32(r_src2, src2);
682
683     r_temp = tcg_temp_new_i64();
684     r_temp2 = tcg_temp_new_i64();
685
686     if (sign_ext) {
687         tcg_gen_ext_i32_i64(r_temp, r_src2);
688         tcg_gen_ext_i32_i64(r_temp2, r_src1);
689     } else {
690         tcg_gen_extu_i32_i64(r_temp, r_src2);
691         tcg_gen_extu_i32_i64(r_temp2, r_src1);
692     }
693
694     tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
695
696     tcg_gen_shri_i64(r_temp, r_temp2, 32);
697     tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
698     tcg_temp_free_i64(r_temp);
699     tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
700
701     tcg_gen_trunc_i64_tl(dst, r_temp2);
702
703     tcg_temp_free_i64(r_temp2);
704
705     tcg_temp_free_i32(r_src1);
706     tcg_temp_free_i32(r_src2);
707 }
708
709 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
710 {
711     /* zero-extend truncated operands before multiplication */
712     gen_op_multiply(dst, src1, src2, 0);
713 }
714
715 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
716 {
717     /* sign-extend truncated operands before multiplication */
718     gen_op_multiply(dst, src1, src2, 1);
719 }
720
721 // 1
722 static inline void gen_op_eval_ba(TCGv dst)
723 {
724     tcg_gen_movi_tl(dst, 1);
725 }
726
727 // Z
728 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
729 {
730     gen_mov_reg_Z(dst, src);
731 }
732
733 // Z | (N ^ V)
734 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
735 {
736     gen_mov_reg_N(cpu_tmp0, src);
737     gen_mov_reg_V(dst, src);
738     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
739     gen_mov_reg_Z(cpu_tmp0, src);
740     tcg_gen_or_tl(dst, dst, cpu_tmp0);
741 }
742
743 // N ^ V
744 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
745 {
746     gen_mov_reg_V(cpu_tmp0, src);
747     gen_mov_reg_N(dst, src);
748     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
749 }
750
751 // C | Z
752 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
753 {
754     gen_mov_reg_Z(cpu_tmp0, src);
755     gen_mov_reg_C(dst, src);
756     tcg_gen_or_tl(dst, dst, cpu_tmp0);
757 }
758
759 // C
760 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
761 {
762     gen_mov_reg_C(dst, src);
763 }
764
765 // V
766 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
767 {
768     gen_mov_reg_V(dst, src);
769 }
770
771 // 0
772 static inline void gen_op_eval_bn(TCGv dst)
773 {
774     tcg_gen_movi_tl(dst, 0);
775 }
776
777 // N
778 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
779 {
780     gen_mov_reg_N(dst, src);
781 }
782
783 // !Z
784 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
785 {
786     gen_mov_reg_Z(dst, src);
787     tcg_gen_xori_tl(dst, dst, 0x1);
788 }
789
790 // !(Z | (N ^ V))
791 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
792 {
793     gen_mov_reg_N(cpu_tmp0, src);
794     gen_mov_reg_V(dst, src);
795     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
796     gen_mov_reg_Z(cpu_tmp0, src);
797     tcg_gen_or_tl(dst, dst, cpu_tmp0);
798     tcg_gen_xori_tl(dst, dst, 0x1);
799 }
800
801 // !(N ^ V)
802 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
803 {
804     gen_mov_reg_V(cpu_tmp0, src);
805     gen_mov_reg_N(dst, src);
806     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
807     tcg_gen_xori_tl(dst, dst, 0x1);
808 }
809
810 // !(C | Z)
811 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
812 {
813     gen_mov_reg_Z(cpu_tmp0, src);
814     gen_mov_reg_C(dst, src);
815     tcg_gen_or_tl(dst, dst, cpu_tmp0);
816     tcg_gen_xori_tl(dst, dst, 0x1);
817 }
818
819 // !C
820 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
821 {
822     gen_mov_reg_C(dst, src);
823     tcg_gen_xori_tl(dst, dst, 0x1);
824 }
825
826 // !N
827 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
828 {
829     gen_mov_reg_N(dst, src);
830     tcg_gen_xori_tl(dst, dst, 0x1);
831 }
832
833 // !V
834 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
835 {
836     gen_mov_reg_V(dst, src);
837     tcg_gen_xori_tl(dst, dst, 0x1);
838 }
839
840 /*
841   FPSR bit field FCC1 | FCC0:
842    0 =
843    1 <
844    2 >
845    3 unordered
846 */
847 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
848                                     unsigned int fcc_offset)
849 {
850     tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
851     tcg_gen_andi_tl(reg, reg, 0x1);
852 }
853
854 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
855                                     unsigned int fcc_offset)
856 {
857     tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
858     tcg_gen_andi_tl(reg, reg, 0x1);
859 }
860
861 // !0: FCC0 | FCC1
862 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
863                                     unsigned int fcc_offset)
864 {
865     gen_mov_reg_FCC0(dst, src, fcc_offset);
866     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
867     tcg_gen_or_tl(dst, dst, cpu_tmp0);
868 }
869
870 // 1 or 2: FCC0 ^ FCC1
871 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
872                                     unsigned int fcc_offset)
873 {
874     gen_mov_reg_FCC0(dst, src, fcc_offset);
875     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
876     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
877 }
878
879 // 1 or 3: FCC0
880 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
881                                     unsigned int fcc_offset)
882 {
883     gen_mov_reg_FCC0(dst, src, fcc_offset);
884 }
885
886 // 1: FCC0 & !FCC1
887 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
888                                     unsigned int fcc_offset)
889 {
890     gen_mov_reg_FCC0(dst, src, fcc_offset);
891     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
892     tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
893     tcg_gen_and_tl(dst, dst, cpu_tmp0);
894 }
895
896 // 2 or 3: FCC1
897 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
898                                     unsigned int fcc_offset)
899 {
900     gen_mov_reg_FCC1(dst, src, fcc_offset);
901 }
902
903 // 2: !FCC0 & FCC1
904 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
905                                     unsigned int fcc_offset)
906 {
907     gen_mov_reg_FCC0(dst, src, fcc_offset);
908     tcg_gen_xori_tl(dst, dst, 0x1);
909     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
910     tcg_gen_and_tl(dst, dst, cpu_tmp0);
911 }
912
913 // 3: FCC0 & FCC1
914 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
915                                     unsigned int fcc_offset)
916 {
917     gen_mov_reg_FCC0(dst, src, fcc_offset);
918     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
919     tcg_gen_and_tl(dst, dst, cpu_tmp0);
920 }
921
922 // 0: !(FCC0 | FCC1)
923 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
924                                     unsigned int fcc_offset)
925 {
926     gen_mov_reg_FCC0(dst, src, fcc_offset);
927     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
928     tcg_gen_or_tl(dst, dst, cpu_tmp0);
929     tcg_gen_xori_tl(dst, dst, 0x1);
930 }
931
932 // 0 or 3: !(FCC0 ^ FCC1)
933 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
934                                     unsigned int fcc_offset)
935 {
936     gen_mov_reg_FCC0(dst, src, fcc_offset);
937     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
938     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
939     tcg_gen_xori_tl(dst, dst, 0x1);
940 }
941
942 // 0 or 2: !FCC0
943 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
944                                     unsigned int fcc_offset)
945 {
946     gen_mov_reg_FCC0(dst, src, fcc_offset);
947     tcg_gen_xori_tl(dst, dst, 0x1);
948 }
949
950 // !1: !(FCC0 & !FCC1)
951 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
952                                     unsigned int fcc_offset)
953 {
954     gen_mov_reg_FCC0(dst, src, fcc_offset);
955     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
956     tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
957     tcg_gen_and_tl(dst, dst, cpu_tmp0);
958     tcg_gen_xori_tl(dst, dst, 0x1);
959 }
960
961 // 0 or 1: !FCC1
962 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
963                                     unsigned int fcc_offset)
964 {
965     gen_mov_reg_FCC1(dst, src, fcc_offset);
966     tcg_gen_xori_tl(dst, dst, 0x1);
967 }
968
969 // !2: !(!FCC0 & FCC1)
970 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
971                                     unsigned int fcc_offset)
972 {
973     gen_mov_reg_FCC0(dst, src, fcc_offset);
974     tcg_gen_xori_tl(dst, dst, 0x1);
975     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
976     tcg_gen_and_tl(dst, dst, cpu_tmp0);
977     tcg_gen_xori_tl(dst, dst, 0x1);
978 }
979
980 // !3: !(FCC0 & FCC1)
981 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
982                                     unsigned int fcc_offset)
983 {
984     gen_mov_reg_FCC0(dst, src, fcc_offset);
985     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
986     tcg_gen_and_tl(dst, dst, cpu_tmp0);
987     tcg_gen_xori_tl(dst, dst, 0x1);
988 }
989
990 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
991                                target_ulong pc2, TCGv r_cond)
992 {
993     int l1;
994
995     l1 = gen_new_label();
996
997     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
998
999     gen_goto_tb(dc, 0, pc1, pc1 + 4);
1000
1001     gen_set_label(l1);
1002     gen_goto_tb(dc, 1, pc2, pc2 + 4);
1003 }
1004
1005 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1006                                 target_ulong pc2, TCGv r_cond)
1007 {
1008     int l1;
1009
1010     l1 = gen_new_label();
1011
1012     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1013
1014     gen_goto_tb(dc, 0, pc2, pc1);
1015
1016     gen_set_label(l1);
1017     gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1018 }
1019
1020 static inline void gen_generic_branch(DisasContext *dc)
1021 {
1022     TCGv npc0 = tcg_const_tl(dc->jump_pc[0]);
1023     TCGv npc1 = tcg_const_tl(dc->jump_pc[1]);
1024     TCGv zero = tcg_const_tl(0);
1025
1026     tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
1027
1028     tcg_temp_free(npc0);
1029     tcg_temp_free(npc1);
1030     tcg_temp_free(zero);
1031 }
1032
1033 /* call this function before using the condition register as it may
1034    have been set for a jump */
1035 static inline void flush_cond(DisasContext *dc)
1036 {
1037     if (dc->npc == JUMP_PC) {
1038         gen_generic_branch(dc);
1039         dc->npc = DYNAMIC_PC;
1040     }
1041 }
1042
1043 static inline void save_npc(DisasContext *dc)
1044 {
1045     if (dc->npc == JUMP_PC) {
1046         gen_generic_branch(dc);
1047         dc->npc = DYNAMIC_PC;
1048     } else if (dc->npc != DYNAMIC_PC) {
1049         tcg_gen_movi_tl(cpu_npc, dc->npc);
1050     }
1051 }
1052
1053 static inline void update_psr(DisasContext *dc)
1054 {
1055     if (dc->cc_op != CC_OP_FLAGS) {
1056         dc->cc_op = CC_OP_FLAGS;
1057         gen_helper_compute_psr(cpu_env);
1058     }
1059 }
1060
1061 static inline void save_state(DisasContext *dc)
1062 {
1063     tcg_gen_movi_tl(cpu_pc, dc->pc);
1064     save_npc(dc);
1065 }
1066
1067 static inline void gen_mov_pc_npc(DisasContext *dc)
1068 {
1069     if (dc->npc == JUMP_PC) {
1070         gen_generic_branch(dc);
1071         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1072         dc->pc = DYNAMIC_PC;
1073     } else if (dc->npc == DYNAMIC_PC) {
1074         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1075         dc->pc = DYNAMIC_PC;
1076     } else {
1077         dc->pc = dc->npc;
1078     }
1079 }
1080
1081 static inline void gen_op_next_insn(void)
1082 {
1083     tcg_gen_mov_tl(cpu_pc, cpu_npc);
1084     tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1085 }
1086
1087 static void free_compare(DisasCompare *cmp)
1088 {
1089     if (!cmp->g1) {
1090         tcg_temp_free(cmp->c1);
1091     }
1092     if (!cmp->g2) {
1093         tcg_temp_free(cmp->c2);
1094     }
1095 }
1096
1097 static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1098                         DisasContext *dc)
1099 {
1100     static int subcc_cond[16] = {
1101         TCG_COND_NEVER,
1102         TCG_COND_EQ,
1103         TCG_COND_LE,
1104         TCG_COND_LT,
1105         TCG_COND_LEU,
1106         TCG_COND_LTU,
1107         -1, /* neg */
1108         -1, /* overflow */
1109         TCG_COND_ALWAYS,
1110         TCG_COND_NE,
1111         TCG_COND_GT,
1112         TCG_COND_GE,
1113         TCG_COND_GTU,
1114         TCG_COND_GEU,
1115         -1, /* pos */
1116         -1, /* no overflow */
1117     };
1118
1119     static int logic_cond[16] = {
1120         TCG_COND_NEVER,
1121         TCG_COND_EQ,     /* eq:  Z */
1122         TCG_COND_LE,     /* le:  Z | (N ^ V) -> Z | N */
1123         TCG_COND_LT,     /* lt:  N ^ V -> N */
1124         TCG_COND_EQ,     /* leu: C | Z -> Z */
1125         TCG_COND_NEVER,  /* ltu: C -> 0 */
1126         TCG_COND_LT,     /* neg: N */
1127         TCG_COND_NEVER,  /* vs:  V -> 0 */
1128         TCG_COND_ALWAYS,
1129         TCG_COND_NE,     /* ne:  !Z */
1130         TCG_COND_GT,     /* gt:  !(Z | (N ^ V)) -> !(Z | N) */
1131         TCG_COND_GE,     /* ge:  !(N ^ V) -> !N */
1132         TCG_COND_NE,     /* gtu: !(C | Z) -> !Z */
1133         TCG_COND_ALWAYS, /* geu: !C -> 1 */
1134         TCG_COND_GE,     /* pos: !N */
1135         TCG_COND_ALWAYS, /* vc:  !V -> 1 */
1136     };
1137
1138     TCGv_i32 r_src;
1139     TCGv r_dst;
1140
1141 #ifdef TARGET_SPARC64
1142     if (xcc) {
1143         r_src = cpu_xcc;
1144     } else {
1145         r_src = cpu_psr;
1146     }
1147 #else
1148     r_src = cpu_psr;
1149 #endif
1150
1151     switch (dc->cc_op) {
1152     case CC_OP_LOGIC:
1153         cmp->cond = logic_cond[cond];
1154     do_compare_dst_0:
1155         cmp->is_bool = false;
1156         cmp->g2 = false;
1157         cmp->c2 = tcg_const_tl(0);
1158 #ifdef TARGET_SPARC64
1159         if (!xcc) {
1160             cmp->g1 = false;
1161             cmp->c1 = tcg_temp_new();
1162             tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1163             break;
1164         }
1165 #endif
1166         cmp->g1 = true;
1167         cmp->c1 = cpu_cc_dst;
1168         break;
1169
1170     case CC_OP_SUB:
1171         switch (cond) {
1172         case 6:  /* neg */
1173         case 14: /* pos */
1174             cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1175             goto do_compare_dst_0;
1176
1177         case 7: /* overflow */
1178         case 15: /* !overflow */
1179             goto do_dynamic;
1180
1181         default:
1182             cmp->cond = subcc_cond[cond];
1183             cmp->is_bool = false;
1184 #ifdef TARGET_SPARC64
1185             if (!xcc) {
1186                 /* Note that sign-extension works for unsigned compares as
1187                    long as both operands are sign-extended.  */
1188                 cmp->g1 = cmp->g2 = false;
1189                 cmp->c1 = tcg_temp_new();
1190                 cmp->c2 = tcg_temp_new();
1191                 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1192                 tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1193                 break;
1194             }
1195 #endif
1196             cmp->g1 = cmp->g2 = true;
1197             cmp->c1 = cpu_cc_src;
1198             cmp->c2 = cpu_cc_src2;
1199             break;
1200         }
1201         break;
1202
1203     default:
1204     do_dynamic:
1205         gen_helper_compute_psr(cpu_env);
1206         dc->cc_op = CC_OP_FLAGS;
1207         /* FALLTHRU */
1208
1209     case CC_OP_FLAGS:
1210         /* We're going to generate a boolean result.  */
1211         cmp->cond = TCG_COND_NE;
1212         cmp->is_bool = true;
1213         cmp->g1 = cmp->g2 = false;
1214         cmp->c1 = r_dst = tcg_temp_new();
1215         cmp->c2 = tcg_const_tl(0);
1216
1217         switch (cond) {
1218         case 0x0:
1219             gen_op_eval_bn(r_dst);
1220             break;
1221         case 0x1:
1222             gen_op_eval_be(r_dst, r_src);
1223             break;
1224         case 0x2:
1225             gen_op_eval_ble(r_dst, r_src);
1226             break;
1227         case 0x3:
1228             gen_op_eval_bl(r_dst, r_src);
1229             break;
1230         case 0x4:
1231             gen_op_eval_bleu(r_dst, r_src);
1232             break;
1233         case 0x5:
1234             gen_op_eval_bcs(r_dst, r_src);
1235             break;
1236         case 0x6:
1237             gen_op_eval_bneg(r_dst, r_src);
1238             break;
1239         case 0x7:
1240             gen_op_eval_bvs(r_dst, r_src);
1241             break;
1242         case 0x8:
1243             gen_op_eval_ba(r_dst);
1244             break;
1245         case 0x9:
1246             gen_op_eval_bne(r_dst, r_src);
1247             break;
1248         case 0xa:
1249             gen_op_eval_bg(r_dst, r_src);
1250             break;
1251         case 0xb:
1252             gen_op_eval_bge(r_dst, r_src);
1253             break;
1254         case 0xc:
1255             gen_op_eval_bgu(r_dst, r_src);
1256             break;
1257         case 0xd:
1258             gen_op_eval_bcc(r_dst, r_src);
1259             break;
1260         case 0xe:
1261             gen_op_eval_bpos(r_dst, r_src);
1262             break;
1263         case 0xf:
1264             gen_op_eval_bvc(r_dst, r_src);
1265             break;
1266         }
1267         break;
1268     }
1269 }
1270
1271 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1272 {
1273     unsigned int offset;
1274     TCGv r_dst;
1275
1276     /* For now we still generate a straight boolean result.  */
1277     cmp->cond = TCG_COND_NE;
1278     cmp->is_bool = true;
1279     cmp->g1 = cmp->g2 = false;
1280     cmp->c1 = r_dst = tcg_temp_new();
1281     cmp->c2 = tcg_const_tl(0);
1282
1283     switch (cc) {
1284     default:
1285     case 0x0:
1286         offset = 0;
1287         break;
1288     case 0x1:
1289         offset = 32 - 10;
1290         break;
1291     case 0x2:
1292         offset = 34 - 10;
1293         break;
1294     case 0x3:
1295         offset = 36 - 10;
1296         break;
1297     }
1298
1299     switch (cond) {
1300     case 0x0:
1301         gen_op_eval_bn(r_dst);
1302         break;
1303     case 0x1:
1304         gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1305         break;
1306     case 0x2:
1307         gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1308         break;
1309     case 0x3:
1310         gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1311         break;
1312     case 0x4:
1313         gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1314         break;
1315     case 0x5:
1316         gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1317         break;
1318     case 0x6:
1319         gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1320         break;
1321     case 0x7:
1322         gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1323         break;
1324     case 0x8:
1325         gen_op_eval_ba(r_dst);
1326         break;
1327     case 0x9:
1328         gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1329         break;
1330     case 0xa:
1331         gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1332         break;
1333     case 0xb:
1334         gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1335         break;
1336     case 0xc:
1337         gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1338         break;
1339     case 0xd:
1340         gen_op_eval_fble(r_dst, cpu_fsr, offset);
1341         break;
1342     case 0xe:
1343         gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1344         break;
1345     case 0xf:
1346         gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1347         break;
1348     }
1349 }
1350
1351 static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1352                      DisasContext *dc)
1353 {
1354     DisasCompare cmp;
1355     gen_compare(&cmp, cc, cond, dc);
1356
1357     /* The interface is to return a boolean in r_dst.  */
1358     if (cmp.is_bool) {
1359         tcg_gen_mov_tl(r_dst, cmp.c1);
1360     } else {
1361         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1362     }
1363
1364     free_compare(&cmp);
1365 }
1366
1367 static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1368 {
1369     DisasCompare cmp;
1370     gen_fcompare(&cmp, cc, cond);
1371
1372     /* The interface is to return a boolean in r_dst.  */
1373     if (cmp.is_bool) {
1374         tcg_gen_mov_tl(r_dst, cmp.c1);
1375     } else {
1376         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1377     }
1378
1379     free_compare(&cmp);
1380 }
1381
1382 #ifdef TARGET_SPARC64
1383 // Inverted logic
1384 static const int gen_tcg_cond_reg[8] = {
1385     -1,
1386     TCG_COND_NE,
1387     TCG_COND_GT,
1388     TCG_COND_GE,
1389     -1,
1390     TCG_COND_EQ,
1391     TCG_COND_LE,
1392     TCG_COND_LT,
1393 };
1394
1395 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1396 {
1397     cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1398     cmp->is_bool = false;
1399     cmp->g1 = true;
1400     cmp->g2 = false;
1401     cmp->c1 = r_src;
1402     cmp->c2 = tcg_const_tl(0);
1403 }
1404
1405 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1406 {
1407     DisasCompare cmp;
1408     gen_compare_reg(&cmp, cond, r_src);
1409
1410     /* The interface is to return a boolean in r_dst.  */
1411     tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1412
1413     free_compare(&cmp);
1414 }
1415 #endif
1416
1417 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1418 {
1419     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1420     target_ulong target = dc->pc + offset;
1421
1422 #ifdef TARGET_SPARC64
1423     if (unlikely(AM_CHECK(dc))) {
1424         target &= 0xffffffffULL;
1425     }
1426 #endif
1427     if (cond == 0x0) {
1428         /* unconditional not taken */
1429         if (a) {
1430             dc->pc = dc->npc + 4;
1431             dc->npc = dc->pc + 4;
1432         } else {
1433             dc->pc = dc->npc;
1434             dc->npc = dc->pc + 4;
1435         }
1436     } else if (cond == 0x8) {
1437         /* unconditional taken */
1438         if (a) {
1439             dc->pc = target;
1440             dc->npc = dc->pc + 4;
1441         } else {
1442             dc->pc = dc->npc;
1443             dc->npc = target;
1444             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1445         }
1446     } else {
1447         flush_cond(dc);
1448         gen_cond(cpu_cond, cc, cond, dc);
1449         if (a) {
1450             gen_branch_a(dc, target, dc->npc, cpu_cond);
1451             dc->is_br = 1;
1452         } else {
1453             dc->pc = dc->npc;
1454             dc->jump_pc[0] = target;
1455             if (unlikely(dc->npc == DYNAMIC_PC)) {
1456                 dc->jump_pc[1] = DYNAMIC_PC;
1457                 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1458             } else {
1459                 dc->jump_pc[1] = dc->npc + 4;
1460                 dc->npc = JUMP_PC;
1461             }
1462         }
1463     }
1464 }
1465
1466 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1467 {
1468     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1469     target_ulong target = dc->pc + offset;
1470
1471 #ifdef TARGET_SPARC64
1472     if (unlikely(AM_CHECK(dc))) {
1473         target &= 0xffffffffULL;
1474     }
1475 #endif
1476     if (cond == 0x0) {
1477         /* unconditional not taken */
1478         if (a) {
1479             dc->pc = dc->npc + 4;
1480             dc->npc = dc->pc + 4;
1481         } else {
1482             dc->pc = dc->npc;
1483             dc->npc = dc->pc + 4;
1484         }
1485     } else if (cond == 0x8) {
1486         /* unconditional taken */
1487         if (a) {
1488             dc->pc = target;
1489             dc->npc = dc->pc + 4;
1490         } else {
1491             dc->pc = dc->npc;
1492             dc->npc = target;
1493             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1494         }
1495     } else {
1496         flush_cond(dc);
1497         gen_fcond(cpu_cond, cc, cond);
1498         if (a) {
1499             gen_branch_a(dc, target, dc->npc, cpu_cond);
1500             dc->is_br = 1;
1501         } else {
1502             dc->pc = dc->npc;
1503             dc->jump_pc[0] = target;
1504             if (unlikely(dc->npc == DYNAMIC_PC)) {
1505                 dc->jump_pc[1] = DYNAMIC_PC;
1506                 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1507             } else {
1508                 dc->jump_pc[1] = dc->npc + 4;
1509                 dc->npc = JUMP_PC;
1510             }
1511         }
1512     }
1513 }
1514
1515 #ifdef TARGET_SPARC64
1516 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1517                           TCGv r_reg)
1518 {
1519     unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1520     target_ulong target = dc->pc + offset;
1521
1522     if (unlikely(AM_CHECK(dc))) {
1523         target &= 0xffffffffULL;
1524     }
1525     flush_cond(dc);
1526     gen_cond_reg(cpu_cond, cond, r_reg);
1527     if (a) {
1528         gen_branch_a(dc, target, dc->npc, cpu_cond);
1529         dc->is_br = 1;
1530     } else {
1531         dc->pc = dc->npc;
1532         dc->jump_pc[0] = target;
1533         if (unlikely(dc->npc == DYNAMIC_PC)) {
1534             dc->jump_pc[1] = DYNAMIC_PC;
1535             tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1536         } else {
1537             dc->jump_pc[1] = dc->npc + 4;
1538             dc->npc = JUMP_PC;
1539         }
1540     }
1541 }
1542
1543 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1544 {
1545     switch (fccno) {
1546     case 0:
1547         gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1548         break;
1549     case 1:
1550         gen_helper_fcmps_fcc1(cpu_env, r_rs1, r_rs2);
1551         break;
1552     case 2:
1553         gen_helper_fcmps_fcc2(cpu_env, r_rs1, r_rs2);
1554         break;
1555     case 3:
1556         gen_helper_fcmps_fcc3(cpu_env, r_rs1, r_rs2);
1557         break;
1558     }
1559 }
1560
1561 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1562 {
1563     switch (fccno) {
1564     case 0:
1565         gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1566         break;
1567     case 1:
1568         gen_helper_fcmpd_fcc1(cpu_env, r_rs1, r_rs2);
1569         break;
1570     case 2:
1571         gen_helper_fcmpd_fcc2(cpu_env, r_rs1, r_rs2);
1572         break;
1573     case 3:
1574         gen_helper_fcmpd_fcc3(cpu_env, r_rs1, r_rs2);
1575         break;
1576     }
1577 }
1578
1579 static inline void gen_op_fcmpq(int fccno)
1580 {
1581     switch (fccno) {
1582     case 0:
1583         gen_helper_fcmpq(cpu_env);
1584         break;
1585     case 1:
1586         gen_helper_fcmpq_fcc1(cpu_env);
1587         break;
1588     case 2:
1589         gen_helper_fcmpq_fcc2(cpu_env);
1590         break;
1591     case 3:
1592         gen_helper_fcmpq_fcc3(cpu_env);
1593         break;
1594     }
1595 }
1596
1597 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1598 {
1599     switch (fccno) {
1600     case 0:
1601         gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1602         break;
1603     case 1:
1604         gen_helper_fcmpes_fcc1(cpu_env, r_rs1, r_rs2);
1605         break;
1606     case 2:
1607         gen_helper_fcmpes_fcc2(cpu_env, r_rs1, r_rs2);
1608         break;
1609     case 3:
1610         gen_helper_fcmpes_fcc3(cpu_env, r_rs1, r_rs2);
1611         break;
1612     }
1613 }
1614
1615 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1616 {
1617     switch (fccno) {
1618     case 0:
1619         gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1620         break;
1621     case 1:
1622         gen_helper_fcmped_fcc1(cpu_env, r_rs1, r_rs2);
1623         break;
1624     case 2:
1625         gen_helper_fcmped_fcc2(cpu_env, r_rs1, r_rs2);
1626         break;
1627     case 3:
1628         gen_helper_fcmped_fcc3(cpu_env, r_rs1, r_rs2);
1629         break;
1630     }
1631 }
1632
1633 static inline void gen_op_fcmpeq(int fccno)
1634 {
1635     switch (fccno) {
1636     case 0:
1637         gen_helper_fcmpeq(cpu_env);
1638         break;
1639     case 1:
1640         gen_helper_fcmpeq_fcc1(cpu_env);
1641         break;
1642     case 2:
1643         gen_helper_fcmpeq_fcc2(cpu_env);
1644         break;
1645     case 3:
1646         gen_helper_fcmpeq_fcc3(cpu_env);
1647         break;
1648     }
1649 }
1650
1651 #else
1652
1653 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1654 {
1655     gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1656 }
1657
1658 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1659 {
1660     gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1661 }
1662
1663 static inline void gen_op_fcmpq(int fccno)
1664 {
1665     gen_helper_fcmpq(cpu_env);
1666 }
1667
1668 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1669 {
1670     gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1671 }
1672
1673 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1674 {
1675     gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1676 }
1677
1678 static inline void gen_op_fcmpeq(int fccno)
1679 {
1680     gen_helper_fcmpeq(cpu_env);
1681 }
1682 #endif
1683
1684 static inline void gen_op_fpexception_im(int fsr_flags)
1685 {
1686     TCGv_i32 r_const;
1687
1688     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1689     tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1690     r_const = tcg_const_i32(TT_FP_EXCP);
1691     gen_helper_raise_exception(cpu_env, r_const);
1692     tcg_temp_free_i32(r_const);
1693 }
1694
1695 static int gen_trap_ifnofpu(DisasContext *dc)
1696 {
1697 #if !defined(CONFIG_USER_ONLY)
1698     if (!dc->fpu_enabled) {
1699         TCGv_i32 r_const;
1700
1701         save_state(dc);
1702         r_const = tcg_const_i32(TT_NFPU_INSN);
1703         gen_helper_raise_exception(cpu_env, r_const);
1704         tcg_temp_free_i32(r_const);
1705         dc->is_br = 1;
1706         return 1;
1707     }
1708 #endif
1709     return 0;
1710 }
1711
1712 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1713 {
1714     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1715 }
1716
1717 static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1718                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1719 {
1720     TCGv_i32 dst, src;
1721
1722     src = gen_load_fpr_F(dc, rs);
1723     dst = gen_dest_fpr_F();
1724
1725     gen(dst, cpu_env, src);
1726
1727     gen_store_fpr_F(dc, rd, dst);
1728 }
1729
1730 static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1731                                  void (*gen)(TCGv_i32, TCGv_i32))
1732 {
1733     TCGv_i32 dst, src;
1734
1735     src = gen_load_fpr_F(dc, rs);
1736     dst = gen_dest_fpr_F();
1737
1738     gen(dst, src);
1739
1740     gen_store_fpr_F(dc, rd, dst);
1741 }
1742
1743 static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1744                         void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1745 {
1746     TCGv_i32 dst, src1, src2;
1747
1748     src1 = gen_load_fpr_F(dc, rs1);
1749     src2 = gen_load_fpr_F(dc, rs2);
1750     dst = gen_dest_fpr_F();
1751
1752     gen(dst, cpu_env, src1, src2);
1753
1754     gen_store_fpr_F(dc, rd, dst);
1755 }
1756
1757 #ifdef TARGET_SPARC64
1758 static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1759                                   void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1760 {
1761     TCGv_i32 dst, src1, src2;
1762
1763     src1 = gen_load_fpr_F(dc, rs1);
1764     src2 = gen_load_fpr_F(dc, rs2);
1765     dst = gen_dest_fpr_F();
1766
1767     gen(dst, src1, src2);
1768
1769     gen_store_fpr_F(dc, rd, dst);
1770 }
1771 #endif
1772
1773 static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1774                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1775 {
1776     TCGv_i64 dst, src;
1777
1778     src = gen_load_fpr_D(dc, rs);
1779     dst = gen_dest_fpr_D();
1780
1781     gen(dst, cpu_env, src);
1782
1783     gen_store_fpr_D(dc, rd, dst);
1784 }
1785
1786 #ifdef TARGET_SPARC64
1787 static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1788                                  void (*gen)(TCGv_i64, TCGv_i64))
1789 {
1790     TCGv_i64 dst, src;
1791
1792     src = gen_load_fpr_D(dc, rs);
1793     dst = gen_dest_fpr_D();
1794
1795     gen(dst, src);
1796
1797     gen_store_fpr_D(dc, rd, dst);
1798 }
1799 #endif
1800
1801 static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1802                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1803 {
1804     TCGv_i64 dst, src1, src2;
1805
1806     src1 = gen_load_fpr_D(dc, rs1);
1807     src2 = gen_load_fpr_D(dc, rs2);
1808     dst = gen_dest_fpr_D();
1809
1810     gen(dst, cpu_env, src1, src2);
1811
1812     gen_store_fpr_D(dc, rd, dst);
1813 }
1814
1815 #ifdef TARGET_SPARC64
1816 static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1817                                   void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1818 {
1819     TCGv_i64 dst, src1, src2;
1820
1821     src1 = gen_load_fpr_D(dc, rs1);
1822     src2 = gen_load_fpr_D(dc, rs2);
1823     dst = gen_dest_fpr_D();
1824
1825     gen(dst, src1, src2);
1826
1827     gen_store_fpr_D(dc, rd, dst);
1828 }
1829
1830 static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1831                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1832 {
1833     TCGv_i64 dst, src1, src2;
1834
1835     src1 = gen_load_fpr_D(dc, rs1);
1836     src2 = gen_load_fpr_D(dc, rs2);
1837     dst = gen_dest_fpr_D();
1838
1839     gen(dst, cpu_gsr, src1, src2);
1840
1841     gen_store_fpr_D(dc, rd, dst);
1842 }
1843
1844 static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1845                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1846 {
1847     TCGv_i64 dst, src0, src1, src2;
1848
1849     src1 = gen_load_fpr_D(dc, rs1);
1850     src2 = gen_load_fpr_D(dc, rs2);
1851     src0 = gen_load_fpr_D(dc, rd);
1852     dst = gen_dest_fpr_D();
1853
1854     gen(dst, src0, src1, src2);
1855
1856     gen_store_fpr_D(dc, rd, dst);
1857 }
1858 #endif
1859
1860 static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1861                               void (*gen)(TCGv_ptr))
1862 {
1863     gen_op_load_fpr_QT1(QFPREG(rs));
1864
1865     gen(cpu_env);
1866
1867     gen_op_store_QT0_fpr(QFPREG(rd));
1868     gen_update_fprs_dirty(QFPREG(rd));
1869 }
1870
1871 #ifdef TARGET_SPARC64
1872 static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1873                                  void (*gen)(TCGv_ptr))
1874 {
1875     gen_op_load_fpr_QT1(QFPREG(rs));
1876
1877     gen(cpu_env);
1878
1879     gen_op_store_QT0_fpr(QFPREG(rd));
1880     gen_update_fprs_dirty(QFPREG(rd));
1881 }
1882 #endif
1883
1884 static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1885                                void (*gen)(TCGv_ptr))
1886 {
1887     gen_op_load_fpr_QT0(QFPREG(rs1));
1888     gen_op_load_fpr_QT1(QFPREG(rs2));
1889
1890     gen(cpu_env);
1891
1892     gen_op_store_QT0_fpr(QFPREG(rd));
1893     gen_update_fprs_dirty(QFPREG(rd));
1894 }
1895
1896 static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1897                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1898 {
1899     TCGv_i64 dst;
1900     TCGv_i32 src1, src2;
1901
1902     src1 = gen_load_fpr_F(dc, rs1);
1903     src2 = gen_load_fpr_F(dc, rs2);
1904     dst = gen_dest_fpr_D();
1905
1906     gen(dst, cpu_env, src1, src2);
1907
1908     gen_store_fpr_D(dc, rd, dst);
1909 }
1910
1911 static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1912                                void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1913 {
1914     TCGv_i64 src1, src2;
1915
1916     src1 = gen_load_fpr_D(dc, rs1);
1917     src2 = gen_load_fpr_D(dc, rs2);
1918
1919     gen(cpu_env, src1, src2);
1920
1921     gen_op_store_QT0_fpr(QFPREG(rd));
1922     gen_update_fprs_dirty(QFPREG(rd));
1923 }
1924
1925 #ifdef TARGET_SPARC64
1926 static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1927                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1928 {
1929     TCGv_i64 dst;
1930     TCGv_i32 src;
1931
1932     src = gen_load_fpr_F(dc, rs);
1933     dst = gen_dest_fpr_D();
1934
1935     gen(dst, cpu_env, src);
1936
1937     gen_store_fpr_D(dc, rd, dst);
1938 }
1939 #endif
1940
1941 static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1942                                  void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1943 {
1944     TCGv_i64 dst;
1945     TCGv_i32 src;
1946
1947     src = gen_load_fpr_F(dc, rs);
1948     dst = gen_dest_fpr_D();
1949
1950     gen(dst, cpu_env, src);
1951
1952     gen_store_fpr_D(dc, rd, dst);
1953 }
1954
1955 static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1956                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1957 {
1958     TCGv_i32 dst;
1959     TCGv_i64 src;
1960
1961     src = gen_load_fpr_D(dc, rs);
1962     dst = gen_dest_fpr_F();
1963
1964     gen(dst, cpu_env, src);
1965
1966     gen_store_fpr_F(dc, rd, dst);
1967 }
1968
1969 static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1970                               void (*gen)(TCGv_i32, TCGv_ptr))
1971 {
1972     TCGv_i32 dst;
1973
1974     gen_op_load_fpr_QT1(QFPREG(rs));
1975     dst = gen_dest_fpr_F();
1976
1977     gen(dst, cpu_env);
1978
1979     gen_store_fpr_F(dc, rd, dst);
1980 }
1981
1982 static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1983                               void (*gen)(TCGv_i64, TCGv_ptr))
1984 {
1985     TCGv_i64 dst;
1986
1987     gen_op_load_fpr_QT1(QFPREG(rs));
1988     dst = gen_dest_fpr_D();
1989
1990     gen(dst, cpu_env);
1991
1992     gen_store_fpr_D(dc, rd, dst);
1993 }
1994
1995 static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1996                                  void (*gen)(TCGv_ptr, TCGv_i32))
1997 {
1998     TCGv_i32 src;
1999
2000     src = gen_load_fpr_F(dc, rs);
2001
2002     gen(cpu_env, src);
2003
2004     gen_op_store_QT0_fpr(QFPREG(rd));
2005     gen_update_fprs_dirty(QFPREG(rd));
2006 }
2007
2008 static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
2009                                  void (*gen)(TCGv_ptr, TCGv_i64))
2010 {
2011     TCGv_i64 src;
2012
2013     src = gen_load_fpr_D(dc, rs);
2014
2015     gen(cpu_env, src);
2016
2017     gen_op_store_QT0_fpr(QFPREG(rd));
2018     gen_update_fprs_dirty(QFPREG(rd));
2019 }
2020
2021 /* asi moves */
2022 #ifdef TARGET_SPARC64
2023 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
2024 {
2025     int asi;
2026     TCGv_i32 r_asi;
2027
2028     if (IS_IMM) {
2029         r_asi = tcg_temp_new_i32();
2030         tcg_gen_mov_i32(r_asi, cpu_asi);
2031     } else {
2032         asi = GET_FIELD(insn, 19, 26);
2033         r_asi = tcg_const_i32(asi);
2034     }
2035     return r_asi;
2036 }
2037
2038 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
2039                               int sign)
2040 {
2041     TCGv_i32 r_asi, r_size, r_sign;
2042
2043     r_asi = gen_get_asi(insn, addr);
2044     r_size = tcg_const_i32(size);
2045     r_sign = tcg_const_i32(sign);
2046     gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_size, r_sign);
2047     tcg_temp_free_i32(r_sign);
2048     tcg_temp_free_i32(r_size);
2049     tcg_temp_free_i32(r_asi);
2050 }
2051
2052 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2053 {
2054     TCGv_i32 r_asi, r_size;
2055
2056     r_asi = gen_get_asi(insn, addr);
2057     r_size = tcg_const_i32(size);
2058     gen_helper_st_asi(cpu_env, addr, src, r_asi, r_size);
2059     tcg_temp_free_i32(r_size);
2060     tcg_temp_free_i32(r_asi);
2061 }
2062
2063 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
2064 {
2065     TCGv_i32 r_asi, r_size, r_rd;
2066
2067     r_asi = gen_get_asi(insn, addr);
2068     r_size = tcg_const_i32(size);
2069     r_rd = tcg_const_i32(rd);
2070     gen_helper_ldf_asi(cpu_env, addr, r_asi, r_size, r_rd);
2071     tcg_temp_free_i32(r_rd);
2072     tcg_temp_free_i32(r_size);
2073     tcg_temp_free_i32(r_asi);
2074 }
2075
2076 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
2077 {
2078     TCGv_i32 r_asi, r_size, r_rd;
2079
2080     r_asi = gen_get_asi(insn, addr);
2081     r_size = tcg_const_i32(size);
2082     r_rd = tcg_const_i32(rd);
2083     gen_helper_stf_asi(cpu_env, addr, r_asi, r_size, r_rd);
2084     tcg_temp_free_i32(r_rd);
2085     tcg_temp_free_i32(r_size);
2086     tcg_temp_free_i32(r_asi);
2087 }
2088
2089 static inline void gen_swap_asi(TCGv dst, TCGv src, TCGv addr, int insn)
2090 {
2091     TCGv_i32 r_asi, r_size, r_sign;
2092
2093     r_asi = gen_get_asi(insn, addr);
2094     r_size = tcg_const_i32(4);
2095     r_sign = tcg_const_i32(0);
2096     gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2097     tcg_temp_free_i32(r_sign);
2098     gen_helper_st_asi(cpu_env, addr, src, r_asi, r_size);
2099     tcg_temp_free_i32(r_size);
2100     tcg_temp_free_i32(r_asi);
2101     tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2102 }
2103
2104 static inline void gen_ldda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2105                                 int insn, int rd)
2106 {
2107     TCGv_i32 r_asi, r_rd;
2108
2109     r_asi = gen_get_asi(insn, addr);
2110     r_rd = tcg_const_i32(rd);
2111     gen_helper_ldda_asi(cpu_env, addr, r_asi, r_rd);
2112     tcg_temp_free_i32(r_rd);
2113     tcg_temp_free_i32(r_asi);
2114 }
2115
2116 static inline void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2117                                 int insn, int rd)
2118 {
2119     TCGv_i32 r_asi, r_size;
2120     TCGv lo = gen_load_gpr(dc, rd + 1);
2121
2122     tcg_gen_concat_tl_i64(cpu_tmp64, lo, hi);
2123     r_asi = gen_get_asi(insn, addr);
2124     r_size = tcg_const_i32(8);
2125     gen_helper_st_asi(cpu_env, addr, cpu_tmp64, r_asi, r_size);
2126     tcg_temp_free_i32(r_size);
2127     tcg_temp_free_i32(r_asi);
2128 }
2129
2130 static inline void gen_cas_asi(DisasContext *dc, TCGv dst, TCGv addr,
2131                                TCGv val2, int insn, int rd)
2132 {
2133     TCGv r_val1 = gen_load_gpr(dc, rd);
2134     TCGv_i32 r_asi = gen_get_asi(insn, addr);
2135
2136     gen_helper_cas_asi(dst, cpu_env, addr, r_val1, val2, r_asi);
2137     tcg_temp_free_i32(r_asi);
2138 }
2139
2140 static inline void gen_casx_asi(DisasContext *dc, TCGv dst, TCGv addr,
2141                                 TCGv val2, int insn, int rd)
2142 {
2143     TCGv r_val1 = gen_load_gpr(dc, rd);
2144     TCGv_i32 r_asi = gen_get_asi(insn, addr);
2145
2146     gen_helper_casx_asi(dst, cpu_env, addr, r_val1, val2, r_asi);
2147     tcg_temp_free_i32(r_asi);
2148 }
2149
2150 #elif !defined(CONFIG_USER_ONLY)
2151
2152 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
2153                               int sign)
2154 {
2155     TCGv_i32 r_asi, r_size, r_sign;
2156
2157     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2158     r_size = tcg_const_i32(size);
2159     r_sign = tcg_const_i32(sign);
2160     gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2161     tcg_temp_free(r_sign);
2162     tcg_temp_free(r_size);
2163     tcg_temp_free(r_asi);
2164     tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2165 }
2166
2167 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2168 {
2169     TCGv_i32 r_asi, r_size;
2170
2171     tcg_gen_extu_tl_i64(cpu_tmp64, src);
2172     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2173     r_size = tcg_const_i32(size);
2174     gen_helper_st_asi(cpu_env, addr, cpu_tmp64, r_asi, r_size);
2175     tcg_temp_free(r_size);
2176     tcg_temp_free(r_asi);
2177 }
2178
2179 static inline void gen_swap_asi(TCGv dst, TCGv src, TCGv addr, int insn)
2180 {
2181     TCGv_i32 r_asi, r_size, r_sign;
2182     TCGv_i64 r_val;
2183
2184     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2185     r_size = tcg_const_i32(4);
2186     r_sign = tcg_const_i32(0);
2187     gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2188     tcg_temp_free(r_sign);
2189     r_val = tcg_temp_new_i64();
2190     tcg_gen_extu_tl_i64(r_val, src);
2191     gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2192     tcg_temp_free_i64(r_val);
2193     tcg_temp_free(r_size);
2194     tcg_temp_free(r_asi);
2195     tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2196 }
2197
2198 static inline void gen_ldda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2199                                 int insn, int rd)
2200 {
2201     TCGv_i32 r_asi, r_size, r_sign;
2202     TCGv t;
2203
2204     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2205     r_size = tcg_const_i32(8);
2206     r_sign = tcg_const_i32(0);
2207     gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2208     tcg_temp_free(r_sign);
2209     tcg_temp_free(r_size);
2210     tcg_temp_free(r_asi);
2211
2212     t = gen_dest_gpr(dc, rd + 1);
2213     tcg_gen_trunc_i64_tl(t, cpu_tmp64);
2214     gen_store_gpr(dc, rd + 1, t);
2215
2216     tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
2217     tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
2218     gen_store_gpr(dc, rd, hi);
2219 }
2220
2221 static inline void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2222                                 int insn, int rd)
2223 {
2224     TCGv_i32 r_asi, r_size;
2225     TCGv lo = gen_load_gpr(dc, rd + 1);
2226
2227     tcg_gen_concat_tl_i64(cpu_tmp64, lo, hi);
2228     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2229     r_size = tcg_const_i32(8);
2230     gen_helper_st_asi(cpu_env, addr, cpu_tmp64, r_asi, r_size);
2231     tcg_temp_free(r_size);
2232     tcg_temp_free(r_asi);
2233 }
2234 #endif
2235
2236 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2237 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
2238 {
2239     TCGv_i64 r_val;
2240     TCGv_i32 r_asi, r_size;
2241
2242     gen_ld_asi(dst, addr, insn, 1, 0);
2243
2244     r_val = tcg_const_i64(0xffULL);
2245     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2246     r_size = tcg_const_i32(1);
2247     gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2248     tcg_temp_free_i32(r_size);
2249     tcg_temp_free_i32(r_asi);
2250     tcg_temp_free_i64(r_val);
2251 }
2252 #endif
2253
2254 static TCGv get_src1(DisasContext *dc, unsigned int insn)
2255 {
2256     unsigned int rs1 = GET_FIELD(insn, 13, 17);
2257     return gen_load_gpr(dc, rs1);
2258 }
2259
2260 static TCGv get_src2(DisasContext *dc, unsigned int insn)
2261 {
2262     if (IS_IMM) { /* immediate */
2263         target_long simm = GET_FIELDs(insn, 19, 31);
2264         TCGv t = get_temp_tl(dc);
2265         tcg_gen_movi_tl(t, simm);
2266         return t;
2267     } else {      /* register */
2268         unsigned int rs2 = GET_FIELD(insn, 27, 31);
2269         return gen_load_gpr(dc, rs2);
2270     }
2271 }
2272
2273 #ifdef TARGET_SPARC64
2274 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2275 {
2276     TCGv_i32 c32, zero, dst, s1, s2;
2277
2278     /* We have two choices here: extend the 32 bit data and use movcond_i64,
2279        or fold the comparison down to 32 bits and use movcond_i32.  Choose
2280        the later.  */
2281     c32 = tcg_temp_new_i32();
2282     if (cmp->is_bool) {
2283         tcg_gen_trunc_i64_i32(c32, cmp->c1);
2284     } else {
2285         TCGv_i64 c64 = tcg_temp_new_i64();
2286         tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2287         tcg_gen_trunc_i64_i32(c32, c64);
2288         tcg_temp_free_i64(c64);
2289     }
2290
2291     s1 = gen_load_fpr_F(dc, rs);
2292     s2 = gen_load_fpr_F(dc, rd);
2293     dst = gen_dest_fpr_F();
2294     zero = tcg_const_i32(0);
2295
2296     tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2297
2298     tcg_temp_free_i32(c32);
2299     tcg_temp_free_i32(zero);
2300     gen_store_fpr_F(dc, rd, dst);
2301 }
2302
2303 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2304 {
2305     TCGv_i64 dst = gen_dest_fpr_D();
2306     tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2307                         gen_load_fpr_D(dc, rs),
2308                         gen_load_fpr_D(dc, rd));
2309     gen_store_fpr_D(dc, rd, dst);
2310 }
2311
2312 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2313 {
2314     int qd = QFPREG(rd);
2315     int qs = QFPREG(rs);
2316
2317     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2318                         cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2319     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2320                         cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2321
2322     gen_update_fprs_dirty(qd);
2323 }
2324
2325 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
2326 {
2327     TCGv_i32 r_tl = tcg_temp_new_i32();
2328
2329     /* load env->tl into r_tl */
2330     tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2331
2332     /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2333     tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2334
2335     /* calculate offset to current trap state from env->ts, reuse r_tl */
2336     tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2337     tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2338
2339     /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2340     {
2341         TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2342         tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2343         tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2344         tcg_temp_free_ptr(r_tl_tmp);
2345     }
2346
2347     tcg_temp_free_i32(r_tl);
2348 }
2349
2350 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2351                      int width, bool cc, bool left)
2352 {
2353     TCGv lo1, lo2, t1, t2;
2354     uint64_t amask, tabl, tabr;
2355     int shift, imask, omask;
2356
2357     if (cc) {
2358         tcg_gen_mov_tl(cpu_cc_src, s1);
2359         tcg_gen_mov_tl(cpu_cc_src2, s2);
2360         tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2361         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2362         dc->cc_op = CC_OP_SUB;
2363     }
2364
2365     /* Theory of operation: there are two tables, left and right (not to
2366        be confused with the left and right versions of the opcode).  These
2367        are indexed by the low 3 bits of the inputs.  To make things "easy",
2368        these tables are loaded into two constants, TABL and TABR below.
2369        The operation index = (input & imask) << shift calculates the index
2370        into the constant, while val = (table >> index) & omask calculates
2371        the value we're looking for.  */
2372     switch (width) {
2373     case 8:
2374         imask = 0x7;
2375         shift = 3;
2376         omask = 0xff;
2377         if (left) {
2378             tabl = 0x80c0e0f0f8fcfeffULL;
2379             tabr = 0xff7f3f1f0f070301ULL;
2380         } else {
2381             tabl = 0x0103070f1f3f7fffULL;
2382             tabr = 0xfffefcf8f0e0c080ULL;
2383         }
2384         break;
2385     case 16:
2386         imask = 0x6;
2387         shift = 1;
2388         omask = 0xf;
2389         if (left) {
2390             tabl = 0x8cef;
2391             tabr = 0xf731;
2392         } else {
2393             tabl = 0x137f;
2394             tabr = 0xfec8;
2395         }
2396         break;
2397     case 32:
2398         imask = 0x4;
2399         shift = 0;
2400         omask = 0x3;
2401         if (left) {
2402             tabl = (2 << 2) | 3;
2403             tabr = (3 << 2) | 1;
2404         } else {
2405             tabl = (1 << 2) | 3;
2406             tabr = (3 << 2) | 2;
2407         }
2408         break;
2409     default:
2410         abort();
2411     }
2412
2413     lo1 = tcg_temp_new();
2414     lo2 = tcg_temp_new();
2415     tcg_gen_andi_tl(lo1, s1, imask);
2416     tcg_gen_andi_tl(lo2, s2, imask);
2417     tcg_gen_shli_tl(lo1, lo1, shift);
2418     tcg_gen_shli_tl(lo2, lo2, shift);
2419
2420     t1 = tcg_const_tl(tabl);
2421     t2 = tcg_const_tl(tabr);
2422     tcg_gen_shr_tl(lo1, t1, lo1);
2423     tcg_gen_shr_tl(lo2, t2, lo2);
2424     tcg_gen_andi_tl(dst, lo1, omask);
2425     tcg_gen_andi_tl(lo2, lo2, omask);
2426
2427     amask = -8;
2428     if (AM_CHECK(dc)) {
2429         amask &= 0xffffffffULL;
2430     }
2431     tcg_gen_andi_tl(s1, s1, amask);
2432     tcg_gen_andi_tl(s2, s2, amask);
2433
2434     /* We want to compute
2435         dst = (s1 == s2 ? lo1 : lo1 & lo2).
2436        We've already done dst = lo1, so this reduces to
2437         dst &= (s1 == s2 ? -1 : lo2)
2438        Which we perform by
2439         lo2 |= -(s1 == s2)
2440         dst &= lo2
2441     */
2442     tcg_gen_setcond_tl(TCG_COND_EQ, t1, s1, s2);
2443     tcg_gen_neg_tl(t1, t1);
2444     tcg_gen_or_tl(lo2, lo2, t1);
2445     tcg_gen_and_tl(dst, dst, lo2);
2446
2447     tcg_temp_free(lo1);
2448     tcg_temp_free(lo2);
2449     tcg_temp_free(t1);
2450     tcg_temp_free(t2);
2451 }
2452
2453 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2454 {
2455     TCGv tmp = tcg_temp_new();
2456
2457     tcg_gen_add_tl(tmp, s1, s2);
2458     tcg_gen_andi_tl(dst, tmp, -8);
2459     if (left) {
2460         tcg_gen_neg_tl(tmp, tmp);
2461     }
2462     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2463
2464     tcg_temp_free(tmp);
2465 }
2466
2467 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2468 {
2469     TCGv t1, t2, shift;
2470
2471     t1 = tcg_temp_new();
2472     t2 = tcg_temp_new();
2473     shift = tcg_temp_new();
2474
2475     tcg_gen_andi_tl(shift, gsr, 7);
2476     tcg_gen_shli_tl(shift, shift, 3);
2477     tcg_gen_shl_tl(t1, s1, shift);
2478
2479     /* A shift of 64 does not produce 0 in TCG.  Divide this into a
2480        shift of (up to 63) followed by a constant shift of 1.  */
2481     tcg_gen_xori_tl(shift, shift, 63);
2482     tcg_gen_shr_tl(t2, s2, shift);
2483     tcg_gen_shri_tl(t2, t2, 1);
2484
2485     tcg_gen_or_tl(dst, t1, t2);
2486
2487     tcg_temp_free(t1);
2488     tcg_temp_free(t2);
2489     tcg_temp_free(shift);
2490 }
2491 #endif
2492
2493 #define CHECK_IU_FEATURE(dc, FEATURE)                      \
2494     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2495         goto illegal_insn;
2496 #define CHECK_FPU_FEATURE(dc, FEATURE)                     \
2497     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2498         goto nfpu_insn;
2499
2500 /* before an instruction, dc->pc must be static */
2501 static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
2502 {
2503     unsigned int opc, rs1, rs2, rd;
2504     TCGv cpu_src1, cpu_src2, cpu_tmp1, cpu_tmp2;
2505     TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
2506     TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
2507     target_long simm;
2508
2509     if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP | CPU_LOG_TB_OP_OPT))) {
2510         tcg_gen_debug_insn_start(dc->pc);
2511     }
2512
2513     opc = GET_FIELD(insn, 0, 1);
2514
2515     rd = GET_FIELD(insn, 2, 6);
2516
2517     cpu_tmp1 = cpu_src1 = tcg_temp_new();
2518     cpu_tmp2 = cpu_src2 = tcg_temp_new();
2519
2520     switch (opc) {
2521     case 0:                     /* branches/sethi */
2522         {
2523             unsigned int xop = GET_FIELD(insn, 7, 9);
2524             int32_t target;
2525             switch (xop) {
2526 #ifdef TARGET_SPARC64
2527             case 0x1:           /* V9 BPcc */
2528                 {
2529                     int cc;
2530
2531                     target = GET_FIELD_SP(insn, 0, 18);
2532                     target = sign_extend(target, 19);
2533                     target <<= 2;
2534                     cc = GET_FIELD_SP(insn, 20, 21);
2535                     if (cc == 0)
2536                         do_branch(dc, target, insn, 0);
2537                     else if (cc == 2)
2538                         do_branch(dc, target, insn, 1);
2539                     else
2540                         goto illegal_insn;
2541                     goto jmp_insn;
2542                 }
2543             case 0x3:           /* V9 BPr */
2544                 {
2545                     target = GET_FIELD_SP(insn, 0, 13) |
2546                         (GET_FIELD_SP(insn, 20, 21) << 14);
2547                     target = sign_extend(target, 16);
2548                     target <<= 2;
2549                     cpu_src1 = get_src1(dc, insn);
2550                     do_branch_reg(dc, target, insn, cpu_src1);
2551                     goto jmp_insn;
2552                 }
2553             case 0x5:           /* V9 FBPcc */
2554                 {
2555                     int cc = GET_FIELD_SP(insn, 20, 21);
2556                     if (gen_trap_ifnofpu(dc)) {
2557                         goto jmp_insn;
2558                     }
2559                     target = GET_FIELD_SP(insn, 0, 18);
2560                     target = sign_extend(target, 19);
2561                     target <<= 2;
2562                     do_fbranch(dc, target, insn, cc);
2563                     goto jmp_insn;
2564                 }
2565 #else
2566             case 0x7:           /* CBN+x */
2567                 {
2568                     goto ncp_insn;
2569                 }
2570 #endif
2571             case 0x2:           /* BN+x */
2572                 {
2573                     target = GET_FIELD(insn, 10, 31);
2574                     target = sign_extend(target, 22);
2575                     target <<= 2;
2576                     do_branch(dc, target, insn, 0);
2577                     goto jmp_insn;
2578                 }
2579             case 0x6:           /* FBN+x */
2580                 {
2581                     if (gen_trap_ifnofpu(dc)) {
2582                         goto jmp_insn;
2583                     }
2584                     target = GET_FIELD(insn, 10, 31);
2585                     target = sign_extend(target, 22);
2586                     target <<= 2;
2587                     do_fbranch(dc, target, insn, 0);
2588                     goto jmp_insn;
2589                 }
2590             case 0x4:           /* SETHI */
2591                 /* Special-case %g0 because that's the canonical nop.  */
2592                 if (rd) {
2593                     uint32_t value = GET_FIELD(insn, 10, 31);
2594                     TCGv t = gen_dest_gpr(dc, rd);
2595                     tcg_gen_movi_tl(t, value << 10);
2596                     gen_store_gpr(dc, rd, t);
2597                 }
2598                 break;
2599             case 0x0:           /* UNIMPL */
2600             default:
2601                 goto illegal_insn;
2602             }
2603             break;
2604         }
2605         break;
2606     case 1:                     /*CALL*/
2607         {
2608             target_long target = GET_FIELDs(insn, 2, 31) << 2;
2609             TCGv o7 = gen_dest_gpr(dc, 15);
2610
2611             tcg_gen_movi_tl(o7, dc->pc);
2612             gen_store_gpr(dc, 15, o7);
2613             target += dc->pc;
2614             gen_mov_pc_npc(dc);
2615 #ifdef TARGET_SPARC64
2616             if (unlikely(AM_CHECK(dc))) {
2617                 target &= 0xffffffffULL;
2618             }
2619 #endif
2620             dc->npc = target;
2621         }
2622         goto jmp_insn;
2623     case 2:                     /* FPU & Logical Operations */
2624         {
2625             unsigned int xop = GET_FIELD(insn, 7, 12);
2626             if (xop == 0x3a) {  /* generate trap */
2627                 int cond = GET_FIELD(insn, 3, 6);
2628                 TCGv_i32 trap;
2629                 int l1 = -1, mask;
2630
2631                 if (cond == 0) {
2632                     /* Trap never.  */
2633                     break;
2634                 }
2635
2636                 save_state(dc);
2637
2638                 if (cond != 8) {
2639                     /* Conditional trap.  */
2640                     DisasCompare cmp;
2641 #ifdef TARGET_SPARC64
2642                     /* V9 icc/xcc */
2643                     int cc = GET_FIELD_SP(insn, 11, 12);
2644                     if (cc == 0) {
2645                         gen_compare(&cmp, 0, cond, dc);
2646                     } else if (cc == 2) {
2647                         gen_compare(&cmp, 1, cond, dc);
2648                     } else {
2649                         goto illegal_insn;
2650                     }
2651 #else
2652                     gen_compare(&cmp, 0, cond, dc);
2653 #endif
2654                     l1 = gen_new_label();
2655                     tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
2656                                       cmp.c1, cmp.c2, l1);
2657                     free_compare(&cmp);
2658                 }
2659
2660                 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
2661                         ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
2662
2663                 /* Don't use the normal temporaries, as they may well have
2664                    gone out of scope with the branch above.  While we're
2665                    doing that we might as well pre-truncate to 32-bit.  */
2666                 trap = tcg_temp_new_i32();
2667
2668                 rs1 = GET_FIELD_SP(insn, 14, 18);
2669                 if (IS_IMM) {
2670                     rs2 = GET_FIELD_SP(insn, 0, 6);
2671                     if (rs1 == 0) {
2672                         tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
2673                         /* Signal that the trap value is fully constant.  */
2674                         mask = 0;
2675                     } else {
2676                         TCGv t1 = gen_load_gpr(dc, rs1);
2677                         tcg_gen_trunc_tl_i32(trap, t1);
2678                         tcg_gen_addi_i32(trap, trap, rs2);
2679                     }
2680                 } else {
2681                     TCGv t1, t2;
2682                     rs2 = GET_FIELD_SP(insn, 0, 4);
2683                     t1 = gen_load_gpr(dc, rs1);
2684                     t2 = gen_load_gpr(dc, rs2);
2685                     tcg_gen_add_tl(t1, t1, t2);
2686                     tcg_gen_trunc_tl_i32(trap, t1);
2687                 }
2688                 if (mask != 0) {
2689                     tcg_gen_andi_i32(trap, trap, mask);
2690                     tcg_gen_addi_i32(trap, trap, TT_TRAP);
2691                 }
2692
2693                 gen_helper_raise_exception(cpu_env, trap);
2694                 tcg_temp_free_i32(trap);
2695
2696                 if (cond == 8) {
2697                     /* An unconditional trap ends the TB.  */
2698                     dc->is_br = 1;
2699                     goto jmp_insn;
2700                 } else {
2701                     /* A conditional trap falls through to the next insn.  */
2702                     gen_set_label(l1);
2703                     break;
2704                 }
2705             } else if (xop == 0x28) {
2706                 rs1 = GET_FIELD(insn, 13, 17);
2707                 switch(rs1) {
2708                 case 0: /* rdy */
2709 #ifndef TARGET_SPARC64
2710                 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2711                                        manual, rdy on the microSPARC
2712                                        II */
2713                 case 0x0f:          /* stbar in the SPARCv8 manual,
2714                                        rdy on the microSPARC II */
2715                 case 0x10 ... 0x1f: /* implementation-dependent in the
2716                                        SPARCv8 manual, rdy on the
2717                                        microSPARC II */
2718                     /* Read Asr17 */
2719                     if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2720                         TCGv t = gen_dest_gpr(dc, rd);
2721                         /* Read Asr17 for a Leon3 monoprocessor */
2722                         tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
2723                         gen_store_gpr(dc, rd, t);
2724                         break;
2725                     }
2726 #endif
2727                     gen_store_gpr(dc, rd, cpu_y);
2728                     break;
2729 #ifdef TARGET_SPARC64
2730                 case 0x2: /* V9 rdccr */
2731                     update_psr(dc);
2732                     gen_helper_rdccr(cpu_dst, cpu_env);
2733                     gen_store_gpr(dc, rd, cpu_dst);
2734                     break;
2735                 case 0x3: /* V9 rdasi */
2736                     tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2737                     gen_store_gpr(dc, rd, cpu_dst);
2738                     break;
2739                 case 0x4: /* V9 rdtick */
2740                     {
2741                         TCGv_ptr r_tickptr;
2742
2743                         r_tickptr = tcg_temp_new_ptr();
2744                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2745                                        offsetof(CPUSPARCState, tick));
2746                         gen_helper_tick_get_count(cpu_dst, r_tickptr);
2747                         tcg_temp_free_ptr(r_tickptr);
2748                         gen_store_gpr(dc, rd, cpu_dst);
2749                     }
2750                     break;
2751                 case 0x5: /* V9 rdpc */
2752                     {
2753                         TCGv t = gen_dest_gpr(dc, rd);
2754                         if (unlikely(AM_CHECK(dc))) {
2755                             tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
2756                         } else {
2757                             tcg_gen_movi_tl(t, dc->pc);
2758                         }
2759                         gen_store_gpr(dc, rd, t);
2760                     }
2761                     break;
2762                 case 0x6: /* V9 rdfprs */
2763                     tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2764                     gen_store_gpr(dc, rd, cpu_dst);
2765                     break;
2766                 case 0xf: /* V9 membar */
2767                     break; /* no effect */
2768                 case 0x13: /* Graphics Status */
2769                     if (gen_trap_ifnofpu(dc)) {
2770                         goto jmp_insn;
2771                     }
2772                     gen_store_gpr(dc, rd, cpu_gsr);
2773                     break;
2774                 case 0x16: /* Softint */
2775                     tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2776                     gen_store_gpr(dc, rd, cpu_dst);
2777                     break;
2778                 case 0x17: /* Tick compare */
2779                     gen_store_gpr(dc, rd, cpu_tick_cmpr);
2780                     break;
2781                 case 0x18: /* System tick */
2782                     {
2783                         TCGv_ptr r_tickptr;
2784
2785                         r_tickptr = tcg_temp_new_ptr();
2786                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2787                                        offsetof(CPUSPARCState, stick));
2788                         gen_helper_tick_get_count(cpu_dst, r_tickptr);
2789                         tcg_temp_free_ptr(r_tickptr);
2790                         gen_store_gpr(dc, rd, cpu_dst);
2791                     }
2792                     break;
2793                 case 0x19: /* System tick compare */
2794                     gen_store_gpr(dc, rd, cpu_stick_cmpr);
2795                     break;
2796                 case 0x10: /* Performance Control */
2797                 case 0x11: /* Performance Instrumentation Counter */
2798                 case 0x12: /* Dispatch Control */
2799                 case 0x14: /* Softint set, WO */
2800                 case 0x15: /* Softint clear, WO */
2801 #endif
2802                 default:
2803                     goto illegal_insn;
2804                 }
2805 #if !defined(CONFIG_USER_ONLY)
2806             } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2807 #ifndef TARGET_SPARC64
2808                 if (!supervisor(dc)) {
2809                     goto priv_insn;
2810                 }
2811                 update_psr(dc);
2812                 gen_helper_rdpsr(cpu_dst, cpu_env);
2813 #else
2814                 CHECK_IU_FEATURE(dc, HYPV);
2815                 if (!hypervisor(dc))
2816                     goto priv_insn;
2817                 rs1 = GET_FIELD(insn, 13, 17);
2818                 switch (rs1) {
2819                 case 0: // hpstate
2820                     // gen_op_rdhpstate();
2821                     break;
2822                 case 1: // htstate
2823                     // gen_op_rdhtstate();
2824                     break;
2825                 case 3: // hintp
2826                     tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2827                     break;
2828                 case 5: // htba
2829                     tcg_gen_mov_tl(cpu_dst, cpu_htba);
2830                     break;
2831                 case 6: // hver
2832                     tcg_gen_mov_tl(cpu_dst, cpu_hver);
2833                     break;
2834                 case 31: // hstick_cmpr
2835                     tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2836                     break;
2837                 default:
2838                     goto illegal_insn;
2839                 }
2840 #endif
2841                 gen_store_gpr(dc, rd, cpu_dst);
2842                 break;
2843             } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2844                 if (!supervisor(dc))
2845                     goto priv_insn;
2846 #ifdef TARGET_SPARC64
2847                 rs1 = GET_FIELD(insn, 13, 17);
2848                 switch (rs1) {
2849                 case 0: // tpc
2850                     {
2851                         TCGv_ptr r_tsptr;
2852
2853                         r_tsptr = tcg_temp_new_ptr();
2854                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2855                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2856                                       offsetof(trap_state, tpc));
2857                         tcg_temp_free_ptr(r_tsptr);
2858                     }
2859                     break;
2860                 case 1: // tnpc
2861                     {
2862                         TCGv_ptr r_tsptr;
2863
2864                         r_tsptr = tcg_temp_new_ptr();
2865                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2866                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2867                                       offsetof(trap_state, tnpc));
2868                         tcg_temp_free_ptr(r_tsptr);
2869                     }
2870                     break;
2871                 case 2: // tstate
2872                     {
2873                         TCGv_ptr r_tsptr;
2874
2875                         r_tsptr = tcg_temp_new_ptr();
2876                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2877                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2878                                       offsetof(trap_state, tstate));
2879                         tcg_temp_free_ptr(r_tsptr);
2880                     }
2881                     break;
2882                 case 3: // tt
2883                     {
2884                         TCGv_ptr r_tsptr;
2885
2886                         r_tsptr = tcg_temp_new_ptr();
2887                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2888                         tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2889                                        offsetof(trap_state, tt));
2890                         tcg_temp_free_ptr(r_tsptr);
2891                         tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2892                     }
2893                     break;
2894                 case 4: // tick
2895                     {
2896                         TCGv_ptr r_tickptr;
2897
2898                         r_tickptr = tcg_temp_new_ptr();
2899                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2900                                        offsetof(CPUSPARCState, tick));
2901                         gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2902                         tcg_temp_free_ptr(r_tickptr);
2903                     }
2904                     break;
2905                 case 5: // tba
2906                     tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2907                     break;
2908                 case 6: // pstate
2909                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2910                                    offsetof(CPUSPARCState, pstate));
2911                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2912                     break;
2913                 case 7: // tl
2914                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2915                                    offsetof(CPUSPARCState, tl));
2916                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2917                     break;
2918                 case 8: // pil
2919                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2920                                    offsetof(CPUSPARCState, psrpil));
2921                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2922                     break;
2923                 case 9: // cwp
2924                     gen_helper_rdcwp(cpu_tmp0, cpu_env);
2925                     break;
2926                 case 10: // cansave
2927                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2928                                    offsetof(CPUSPARCState, cansave));
2929                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2930                     break;
2931                 case 11: // canrestore
2932                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2933                                    offsetof(CPUSPARCState, canrestore));
2934                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2935                     break;
2936                 case 12: // cleanwin
2937                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2938                                    offsetof(CPUSPARCState, cleanwin));
2939                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2940                     break;
2941                 case 13: // otherwin
2942                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2943                                    offsetof(CPUSPARCState, otherwin));
2944                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2945                     break;
2946                 case 14: // wstate
2947                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2948                                    offsetof(CPUSPARCState, wstate));
2949                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2950                     break;
2951                 case 16: // UA2005 gl
2952                     CHECK_IU_FEATURE(dc, GL);
2953                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2954                                    offsetof(CPUSPARCState, gl));
2955                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2956                     break;
2957                 case 26: // UA2005 strand status
2958                     CHECK_IU_FEATURE(dc, HYPV);
2959                     if (!hypervisor(dc))
2960                         goto priv_insn;
2961                     tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2962                     break;
2963                 case 31: // ver
2964                     tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2965                     break;
2966                 case 15: // fq
2967                 default:
2968                     goto illegal_insn;
2969                 }
2970 #else
2971                 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2972 #endif
2973                 gen_store_gpr(dc, rd, cpu_tmp0);
2974                 break;
2975             } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2976 #ifdef TARGET_SPARC64
2977                 save_state(dc);
2978                 gen_helper_flushw(cpu_env);
2979 #else
2980                 if (!supervisor(dc))
2981                     goto priv_insn;
2982                 gen_store_gpr(dc, rd, cpu_tbr);
2983 #endif
2984                 break;
2985 #endif
2986             } else if (xop == 0x34) {   /* FPU Operations */
2987                 if (gen_trap_ifnofpu(dc)) {
2988                     goto jmp_insn;
2989                 }
2990                 gen_op_clear_ieee_excp_and_FTT();
2991                 rs1 = GET_FIELD(insn, 13, 17);
2992                 rs2 = GET_FIELD(insn, 27, 31);
2993                 xop = GET_FIELD(insn, 18, 26);
2994                 save_state(dc);
2995                 switch (xop) {
2996                 case 0x1: /* fmovs */
2997                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2998                     gen_store_fpr_F(dc, rd, cpu_src1_32);
2999                     break;
3000                 case 0x5: /* fnegs */
3001                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
3002                     break;
3003                 case 0x9: /* fabss */
3004                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
3005                     break;
3006                 case 0x29: /* fsqrts */
3007                     CHECK_FPU_FEATURE(dc, FSQRT);
3008                     gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
3009                     break;
3010                 case 0x2a: /* fsqrtd */
3011                     CHECK_FPU_FEATURE(dc, FSQRT);
3012                     gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
3013                     break;
3014                 case 0x2b: /* fsqrtq */
3015                     CHECK_FPU_FEATURE(dc, FLOAT128);
3016                     gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
3017                     break;
3018                 case 0x41: /* fadds */
3019                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
3020                     break;
3021                 case 0x42: /* faddd */
3022                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
3023                     break;
3024                 case 0x43: /* faddq */
3025                     CHECK_FPU_FEATURE(dc, FLOAT128);
3026                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
3027                     break;
3028                 case 0x45: /* fsubs */
3029                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3030                     break;
3031                 case 0x46: /* fsubd */
3032                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3033                     break;
3034                 case 0x47: /* fsubq */
3035                     CHECK_FPU_FEATURE(dc, FLOAT128);
3036                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3037                     break;
3038                 case 0x49: /* fmuls */
3039                     CHECK_FPU_FEATURE(dc, FMUL);
3040                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3041                     break;
3042                 case 0x4a: /* fmuld */
3043                     CHECK_FPU_FEATURE(dc, FMUL);
3044                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3045                     break;
3046                 case 0x4b: /* fmulq */
3047                     CHECK_FPU_FEATURE(dc, FLOAT128);
3048                     CHECK_FPU_FEATURE(dc, FMUL);
3049                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3050                     break;
3051                 case 0x4d: /* fdivs */
3052                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3053                     break;
3054                 case 0x4e: /* fdivd */
3055                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3056                     break;
3057                 case 0x4f: /* fdivq */
3058                     CHECK_FPU_FEATURE(dc, FLOAT128);
3059                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3060                     break;
3061                 case 0x69: /* fsmuld */
3062                     CHECK_FPU_FEATURE(dc, FSMULD);
3063                     gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3064                     break;
3065                 case 0x6e: /* fdmulq */
3066                     CHECK_FPU_FEATURE(dc, FLOAT128);
3067                     gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3068                     break;
3069                 case 0xc4: /* fitos */
3070                     gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3071                     break;
3072                 case 0xc6: /* fdtos */
3073                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3074                     break;
3075                 case 0xc7: /* fqtos */
3076                     CHECK_FPU_FEATURE(dc, FLOAT128);
3077                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3078                     break;
3079                 case 0xc8: /* fitod */
3080                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3081                     break;
3082                 case 0xc9: /* fstod */
3083                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3084                     break;
3085                 case 0xcb: /* fqtod */
3086                     CHECK_FPU_FEATURE(dc, FLOAT128);
3087                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3088                     break;
3089                 case 0xcc: /* fitoq */
3090                     CHECK_FPU_FEATURE(dc, FLOAT128);
3091                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3092                     break;
3093                 case 0xcd: /* fstoq */
3094                     CHECK_FPU_FEATURE(dc, FLOAT128);
3095                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3096                     break;
3097                 case 0xce: /* fdtoq */
3098                     CHECK_FPU_FEATURE(dc, FLOAT128);
3099                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3100                     break;
3101                 case 0xd1: /* fstoi */
3102                     gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3103                     break;
3104                 case 0xd2: /* fdtoi */
3105                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3106                     break;
3107                 case 0xd3: /* fqtoi */
3108                     CHECK_FPU_FEATURE(dc, FLOAT128);
3109                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3110                     break;
3111 #ifdef TARGET_SPARC64
3112                 case 0x2: /* V9 fmovd */
3113                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3114                     gen_store_fpr_D(dc, rd, cpu_src1_64);
3115                     break;
3116                 case 0x3: /* V9 fmovq */
3117                     CHECK_FPU_FEATURE(dc, FLOAT128);
3118                     gen_move_Q(rd, rs2);
3119                     break;
3120                 case 0x6: /* V9 fnegd */
3121                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3122                     break;
3123                 case 0x7: /* V9 fnegq */
3124                     CHECK_FPU_FEATURE(dc, FLOAT128);
3125                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3126                     break;
3127                 case 0xa: /* V9 fabsd */
3128                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3129                     break;
3130                 case 0xb: /* V9 fabsq */
3131                     CHECK_FPU_FEATURE(dc, FLOAT128);
3132                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3133                     break;
3134                 case 0x81: /* V9 fstox */
3135                     gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3136                     break;
3137                 case 0x82: /* V9 fdtox */
3138                     gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3139                     break;
3140                 case 0x83: /* V9 fqtox */
3141                     CHECK_FPU_FEATURE(dc, FLOAT128);
3142                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3143                     break;
3144                 case 0x84: /* V9 fxtos */
3145                     gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3146                     break;
3147                 case 0x88: /* V9 fxtod */
3148                     gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3149                     break;
3150                 case 0x8c: /* V9 fxtoq */
3151                     CHECK_FPU_FEATURE(dc, FLOAT128);
3152                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3153                     break;
3154 #endif
3155                 default:
3156                     goto illegal_insn;
3157                 }
3158             } else if (xop == 0x35) {   /* FPU Operations */
3159 #ifdef TARGET_SPARC64
3160                 int cond;
3161 #endif
3162                 if (gen_trap_ifnofpu(dc)) {
3163                     goto jmp_insn;
3164                 }
3165                 gen_op_clear_ieee_excp_and_FTT();
3166                 rs1 = GET_FIELD(insn, 13, 17);
3167                 rs2 = GET_FIELD(insn, 27, 31);
3168                 xop = GET_FIELD(insn, 18, 26);
3169                 save_state(dc);
3170
3171 #ifdef TARGET_SPARC64
3172 #define FMOVR(sz)                                                  \
3173                 do {                                               \
3174                     DisasCompare cmp;                              \
3175                     cond = GET_FIELD_SP(insn, 14, 17);             \
3176                     cpu_src1 = get_src1(dc, insn);                 \
3177                     gen_compare_reg(&cmp, cond, cpu_src1);         \
3178                     gen_fmov##sz(dc, &cmp, rd, rs2);               \
3179                     free_compare(&cmp);                            \
3180                 } while (0)
3181
3182                 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3183                     FMOVR(s);
3184                     break;
3185                 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3186                     FMOVR(d);
3187                     break;
3188                 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3189                     CHECK_FPU_FEATURE(dc, FLOAT128);
3190                     FMOVR(q);
3191                     break;
3192                 }
3193 #undef FMOVR
3194 #endif
3195                 switch (xop) {
3196 #ifdef TARGET_SPARC64
3197 #define FMOVCC(fcc, sz)                                                 \
3198                     do {                                                \
3199                         DisasCompare cmp;                               \
3200                         cond = GET_FIELD_SP(insn, 14, 17);              \
3201                         gen_fcompare(&cmp, fcc, cond);                  \
3202                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3203                         free_compare(&cmp);                             \
3204                     } while (0)
3205
3206                     case 0x001: /* V9 fmovscc %fcc0 */
3207                         FMOVCC(0, s);
3208                         break;
3209                     case 0x002: /* V9 fmovdcc %fcc0 */
3210                         FMOVCC(0, d);
3211                         break;
3212                     case 0x003: /* V9 fmovqcc %fcc0 */
3213                         CHECK_FPU_FEATURE(dc, FLOAT128);
3214                         FMOVCC(0, q);
3215                         break;
3216                     case 0x041: /* V9 fmovscc %fcc1 */
3217                         FMOVCC(1, s);
3218                         break;
3219                     case 0x042: /* V9 fmovdcc %fcc1 */
3220                         FMOVCC(1, d);
3221                         break;
3222                     case 0x043: /* V9 fmovqcc %fcc1 */
3223                         CHECK_FPU_FEATURE(dc, FLOAT128);
3224                         FMOVCC(1, q);
3225                         break;
3226                     case 0x081: /* V9 fmovscc %fcc2 */
3227                         FMOVCC(2, s);
3228                         break;
3229                     case 0x082: /* V9 fmovdcc %fcc2 */
3230                         FMOVCC(2, d);
3231                         break;
3232                     case 0x083: /* V9 fmovqcc %fcc2 */
3233                         CHECK_FPU_FEATURE(dc, FLOAT128);
3234                         FMOVCC(2, q);
3235                         break;
3236                     case 0x0c1: /* V9 fmovscc %fcc3 */
3237                         FMOVCC(3, s);
3238                         break;
3239                     case 0x0c2: /* V9 fmovdcc %fcc3 */
3240                         FMOVCC(3, d);
3241                         break;
3242                     case 0x0c3: /* V9 fmovqcc %fcc3 */
3243                         CHECK_FPU_FEATURE(dc, FLOAT128);
3244                         FMOVCC(3, q);
3245                         break;
3246 #undef FMOVCC
3247 #define FMOVCC(xcc, sz)                                                 \
3248                     do {                                                \
3249                         DisasCompare cmp;                               \
3250                         cond = GET_FIELD_SP(insn, 14, 17);              \
3251                         gen_compare(&cmp, xcc, cond, dc);               \
3252                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3253                         free_compare(&cmp);                             \
3254                     } while (0)
3255
3256                     case 0x101: /* V9 fmovscc %icc */
3257                         FMOVCC(0, s);
3258                         break;
3259                     case 0x102: /* V9 fmovdcc %icc */
3260                         FMOVCC(0, d);
3261                         break;
3262                     case 0x103: /* V9 fmovqcc %icc */
3263                         CHECK_FPU_FEATURE(dc, FLOAT128);
3264                         FMOVCC(0, q);
3265                         break;
3266                     case 0x181: /* V9 fmovscc %xcc */
3267                         FMOVCC(1, s);
3268                         break;
3269                     case 0x182: /* V9 fmovdcc %xcc */
3270                         FMOVCC(1, d);
3271                         break;
3272                     case 0x183: /* V9 fmovqcc %xcc */
3273                         CHECK_FPU_FEATURE(dc, FLOAT128);
3274                         FMOVCC(1, q);
3275                         break;
3276 #undef FMOVCC
3277 #endif
3278                     case 0x51: /* fcmps, V9 %fcc */
3279                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3280                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3281                         gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3282                         break;
3283                     case 0x52: /* fcmpd, V9 %fcc */
3284                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3285                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3286                         gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3287                         break;
3288                     case 0x53: /* fcmpq, V9 %fcc */
3289                         CHECK_FPU_FEATURE(dc, FLOAT128);
3290                         gen_op_load_fpr_QT0(QFPREG(rs1));
3291                         gen_op_load_fpr_QT1(QFPREG(rs2));
3292                         gen_op_fcmpq(rd & 3);
3293                         break;
3294                     case 0x55: /* fcmpes, V9 %fcc */
3295                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3296                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3297                         gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3298                         break;
3299                     case 0x56: /* fcmped, V9 %fcc */
3300                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3301                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3302                         gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3303                         break;
3304                     case 0x57: /* fcmpeq, V9 %fcc */
3305                         CHECK_FPU_FEATURE(dc, FLOAT128);
3306                         gen_op_load_fpr_QT0(QFPREG(rs1));
3307                         gen_op_load_fpr_QT1(QFPREG(rs2));
3308                         gen_op_fcmpeq(rd & 3);
3309                         break;
3310                     default:
3311                         goto illegal_insn;
3312                 }
3313             } else if (xop == 0x2) {
3314                 TCGv dst = gen_dest_gpr(dc, rd);
3315                 rs1 = GET_FIELD(insn, 13, 17);
3316                 if (rs1 == 0) {
3317                     /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3318                     if (IS_IMM) {       /* immediate */
3319                         simm = GET_FIELDs(insn, 19, 31);
3320                         tcg_gen_movi_tl(dst, simm);
3321                         gen_store_gpr(dc, rd, dst);
3322                     } else {            /* register */
3323                         rs2 = GET_FIELD(insn, 27, 31);
3324                         if (rs2 == 0) {
3325                             tcg_gen_movi_tl(dst, 0);
3326                             gen_store_gpr(dc, rd, dst);
3327                         } else {
3328                             cpu_src2 = gen_load_gpr(dc, rs2);
3329                             gen_store_gpr(dc, rd, cpu_src2);
3330                         }
3331                     }
3332                 } else {
3333                     cpu_src1 = get_src1(dc, insn);
3334                     if (IS_IMM) {       /* immediate */
3335                         simm = GET_FIELDs(insn, 19, 31);
3336                         tcg_gen_ori_tl(dst, cpu_src1, simm);
3337                         gen_store_gpr(dc, rd, dst);
3338                     } else {            /* register */
3339                         rs2 = GET_FIELD(insn, 27, 31);
3340                         if (rs2 == 0) {
3341                             /* mov shortcut:  or x, %g0, y -> mov x, y */
3342                             gen_store_gpr(dc, rd, cpu_src1);
3343                         } else {
3344                             cpu_src2 = gen_load_gpr(dc, rs2);
3345                             tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
3346                             gen_store_gpr(dc, rd, dst);
3347                         }
3348                     }
3349                 }
3350 #ifdef TARGET_SPARC64
3351             } else if (xop == 0x25) { /* sll, V9 sllx */
3352                 cpu_src1 = get_src1(dc, insn);
3353                 if (IS_IMM) {   /* immediate */
3354                     simm = GET_FIELDs(insn, 20, 31);
3355                     if (insn & (1 << 12)) {
3356                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3357                     } else {
3358                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3359                     }
3360                 } else {                /* register */
3361                     rs2 = GET_FIELD(insn, 27, 31);
3362                     cpu_src2 = gen_load_gpr(dc, rs2);
3363                     if (insn & (1 << 12)) {
3364                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3365                     } else {
3366                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3367                     }
3368                     tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3369                 }
3370                 gen_store_gpr(dc, rd, cpu_dst);
3371             } else if (xop == 0x26) { /* srl, V9 srlx */
3372                 cpu_src1 = get_src1(dc, insn);
3373                 if (IS_IMM) {   /* immediate */
3374                     simm = GET_FIELDs(insn, 20, 31);
3375                     if (insn & (1 << 12)) {
3376                         tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3377                     } else {
3378                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3379                         tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3380                     }
3381                 } else {                /* register */
3382                     rs2 = GET_FIELD(insn, 27, 31);
3383                     cpu_src2 = gen_load_gpr(dc, rs2);
3384                     if (insn & (1 << 12)) {
3385                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3386                         tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3387                     } else {
3388                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3389                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3390                         tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3391                     }
3392                 }
3393                 gen_store_gpr(dc, rd, cpu_dst);
3394             } else if (xop == 0x27) { /* sra, V9 srax */
3395                 cpu_src1 = get_src1(dc, insn);
3396                 if (IS_IMM) {   /* immediate */
3397                     simm = GET_FIELDs(insn, 20, 31);
3398                     if (insn & (1 << 12)) {
3399                         tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3400                     } else {
3401                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3402                         tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3403                     }
3404                 } else {                /* register */
3405                     rs2 = GET_FIELD(insn, 27, 31);
3406                     cpu_src2 = gen_load_gpr(dc, rs2);
3407                     if (insn & (1 << 12)) {
3408                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3409                         tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3410                     } else {
3411                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3412                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3413                         tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3414                     }
3415                 }
3416                 gen_store_gpr(dc, rd, cpu_dst);
3417 #endif
3418             } else if (xop < 0x36) {
3419                 if (xop < 0x20) {
3420                     cpu_src1 = get_src1(dc, insn);
3421                     cpu_src2 = get_src2(dc, insn);
3422                     switch (xop & ~0x10) {
3423                     case 0x0: /* add */
3424                         if (xop & 0x10) {
3425                             gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3426                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3427                             dc->cc_op = CC_OP_ADD;
3428                         } else {
3429                             tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3430                         }
3431                         break;
3432                     case 0x1: /* and */
3433                         tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3434                         if (xop & 0x10) {
3435                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3436                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3437                             dc->cc_op = CC_OP_LOGIC;
3438                         }
3439                         break;
3440                     case 0x2: /* or */
3441                         tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3442                         if (xop & 0x10) {
3443                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3444                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3445                             dc->cc_op = CC_OP_LOGIC;
3446                         }
3447                         break;
3448                     case 0x3: /* xor */
3449                         tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3450                         if (xop & 0x10) {
3451                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3452                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3453                             dc->cc_op = CC_OP_LOGIC;
3454                         }
3455                         break;
3456                     case 0x4: /* sub */
3457                         if (xop & 0x10) {
3458                             gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3459                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3460                             dc->cc_op = CC_OP_SUB;
3461                         } else {
3462                             tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3463                         }
3464                         break;
3465                     case 0x5: /* andn */
3466                         tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3467                         if (xop & 0x10) {
3468                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3469                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3470                             dc->cc_op = CC_OP_LOGIC;
3471                         }
3472                         break;
3473                     case 0x6: /* orn */
3474                         tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3475                         if (xop & 0x10) {
3476                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3477                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3478                             dc->cc_op = CC_OP_LOGIC;
3479                         }
3480                         break;
3481                     case 0x7: /* xorn */
3482                         tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
3483                         if (xop & 0x10) {
3484                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3485                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3486                             dc->cc_op = CC_OP_LOGIC;
3487                         }
3488                         break;
3489                     case 0x8: /* addx, V9 addc */
3490                         gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3491                                         (xop & 0x10));
3492                         break;
3493 #ifdef TARGET_SPARC64
3494                     case 0x9: /* V9 mulx */
3495                         tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3496                         break;
3497 #endif
3498                     case 0xa: /* umul */
3499                         CHECK_IU_FEATURE(dc, MUL);
3500                         gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3501                         if (xop & 0x10) {
3502                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3503                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3504                             dc->cc_op = CC_OP_LOGIC;
3505                         }
3506                         break;
3507                     case 0xb: /* smul */
3508                         CHECK_IU_FEATURE(dc, MUL);
3509                         gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3510                         if (xop & 0x10) {
3511                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3512                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3513                             dc->cc_op = CC_OP_LOGIC;
3514                         }
3515                         break;
3516                     case 0xc: /* subx, V9 subc */
3517                         gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3518                                         (xop & 0x10));
3519                         break;
3520 #ifdef TARGET_SPARC64
3521                     case 0xd: /* V9 udivx */
3522                         gen_helper_udivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3523                         break;
3524 #endif
3525                     case 0xe: /* udiv */
3526                         CHECK_IU_FEATURE(dc, DIV);
3527                         if (xop & 0x10) {
3528                             gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
3529                                                cpu_src2);
3530                             dc->cc_op = CC_OP_DIV;
3531                         } else {
3532                             gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
3533                                             cpu_src2);
3534                         }
3535                         break;
3536                     case 0xf: /* sdiv */
3537                         CHECK_IU_FEATURE(dc, DIV);
3538                         if (xop & 0x10) {
3539                             gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
3540                                                cpu_src2);
3541                             dc->cc_op = CC_OP_DIV;
3542                         } else {
3543                             gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
3544                                             cpu_src2);
3545                         }
3546                         break;
3547                     default:
3548                         goto illegal_insn;
3549                     }
3550                     gen_store_gpr(dc, rd, cpu_dst);
3551                 } else {
3552                     cpu_src1 = get_src1(dc, insn);
3553                     cpu_src2 = get_src2(dc, insn);
3554                     switch (xop) {
3555                     case 0x20: /* taddcc */
3556                         gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3557                         gen_store_gpr(dc, rd, cpu_dst);
3558                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3559                         dc->cc_op = CC_OP_TADD;
3560                         break;
3561                     case 0x21: /* tsubcc */
3562                         gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3563                         gen_store_gpr(dc, rd, cpu_dst);
3564                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3565                         dc->cc_op = CC_OP_TSUB;
3566                         break;
3567                     case 0x22: /* taddcctv */
3568                         gen_helper_taddcctv(cpu_dst, cpu_env,
3569                                             cpu_src1, cpu_src2);
3570                         gen_store_gpr(dc, rd, cpu_dst);
3571                         dc->cc_op = CC_OP_TADDTV;
3572                         break;
3573                     case 0x23: /* tsubcctv */
3574                         gen_helper_tsubcctv(cpu_dst, cpu_env,
3575                                             cpu_src1, cpu_src2);
3576                         gen_store_gpr(dc, rd, cpu_dst);
3577                         dc->cc_op = CC_OP_TSUBTV;
3578                         break;
3579                     case 0x24: /* mulscc */
3580                         update_psr(dc);
3581                         gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3582                         gen_store_gpr(dc, rd, cpu_dst);
3583                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3584                         dc->cc_op = CC_OP_ADD;
3585                         break;
3586 #ifndef TARGET_SPARC64
3587                     case 0x25:  /* sll */
3588                         if (IS_IMM) { /* immediate */
3589                             simm = GET_FIELDs(insn, 20, 31);
3590                             tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3591                         } else { /* register */
3592                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3593                             tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3594                         }
3595                         gen_store_gpr(dc, rd, cpu_dst);
3596                         break;
3597                     case 0x26:  /* srl */
3598                         if (IS_IMM) { /* immediate */
3599                             simm = GET_FIELDs(insn, 20, 31);
3600                             tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3601                         } else { /* register */
3602                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3603                             tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3604                         }
3605                         gen_store_gpr(dc, rd, cpu_dst);
3606                         break;
3607                     case 0x27:  /* sra */
3608                         if (IS_IMM) { /* immediate */
3609                             simm = GET_FIELDs(insn, 20, 31);
3610                             tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3611                         } else { /* register */
3612                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3613                             tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3614                         }
3615                         gen_store_gpr(dc, rd, cpu_dst);
3616                         break;
3617 #endif
3618                     case 0x30:
3619                         {
3620                             switch(rd) {
3621                             case 0: /* wry */
3622                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3623                                 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3624                                 break;
3625 #ifndef TARGET_SPARC64
3626                             case 0x01 ... 0x0f: /* undefined in the
3627                                                    SPARCv8 manual, nop
3628                                                    on the microSPARC
3629                                                    II */
3630                             case 0x10 ... 0x1f: /* implementation-dependent
3631                                                    in the SPARCv8
3632                                                    manual, nop on the
3633                                                    microSPARC II */
3634                                 break;
3635 #else
3636                             case 0x2: /* V9 wrccr */
3637                                 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3638                                 gen_helper_wrccr(cpu_env, cpu_dst);
3639                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3640                                 dc->cc_op = CC_OP_FLAGS;
3641                                 break;
3642                             case 0x3: /* V9 wrasi */
3643                                 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3644                                 tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3645                                 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3646                                 break;
3647                             case 0x6: /* V9 wrfprs */
3648                                 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3649                                 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3650                                 save_state(dc);
3651                                 gen_op_next_insn();
3652                                 tcg_gen_exit_tb(0);
3653                                 dc->is_br = 1;
3654                                 break;
3655                             case 0xf: /* V9 sir, nop if user */
3656 #if !defined(CONFIG_USER_ONLY)
3657                                 if (supervisor(dc)) {
3658                                     ; // XXX
3659                                 }
3660 #endif
3661                                 break;
3662                             case 0x13: /* Graphics Status */
3663                                 if (gen_trap_ifnofpu(dc)) {
3664                                     goto jmp_insn;
3665                                 }
3666                                 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3667                                 break;
3668                             case 0x14: /* Softint set */
3669                                 if (!supervisor(dc))
3670                                     goto illegal_insn;
3671                                 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3672                                 gen_helper_set_softint(cpu_env, cpu_tmp64);
3673                                 break;
3674                             case 0x15: /* Softint clear */
3675                                 if (!supervisor(dc))
3676                                     goto illegal_insn;
3677                                 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3678                                 gen_helper_clear_softint(cpu_env, cpu_tmp64);
3679                                 break;
3680                             case 0x16: /* Softint write */
3681                                 if (!supervisor(dc))
3682                                     goto illegal_insn;
3683                                 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3684                                 gen_helper_write_softint(cpu_env, cpu_tmp64);
3685                                 break;
3686                             case 0x17: /* Tick compare */
3687 #if !defined(CONFIG_USER_ONLY)
3688                                 if (!supervisor(dc))
3689                                     goto illegal_insn;
3690 #endif
3691                                 {
3692                                     TCGv_ptr r_tickptr;
3693
3694                                     tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3695                                                    cpu_src2);
3696                                     r_tickptr = tcg_temp_new_ptr();
3697                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3698                                                    offsetof(CPUSPARCState, tick));
3699                                     gen_helper_tick_set_limit(r_tickptr,
3700                                                               cpu_tick_cmpr);
3701                                     tcg_temp_free_ptr(r_tickptr);
3702                                 }
3703                                 break;
3704                             case 0x18: /* System tick */
3705 #if !defined(CONFIG_USER_ONLY)
3706                                 if (!supervisor(dc))
3707                                     goto illegal_insn;
3708 #endif
3709                                 {
3710                                     TCGv_ptr r_tickptr;
3711
3712                                     tcg_gen_xor_tl(cpu_dst, cpu_src1,
3713                                                    cpu_src2);
3714                                     r_tickptr = tcg_temp_new_ptr();
3715                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3716                                                    offsetof(CPUSPARCState, stick));
3717                                     gen_helper_tick_set_count(r_tickptr,
3718                                                               cpu_dst);
3719                                     tcg_temp_free_ptr(r_tickptr);
3720                                 }
3721                                 break;
3722                             case 0x19: /* System tick compare */
3723 #if !defined(CONFIG_USER_ONLY)
3724                                 if (!supervisor(dc))
3725                                     goto illegal_insn;
3726 #endif
3727                                 {
3728                                     TCGv_ptr r_tickptr;
3729
3730                                     tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3731                                                    cpu_src2);
3732                                     r_tickptr = tcg_temp_new_ptr();
3733                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3734                                                    offsetof(CPUSPARCState, stick));
3735                                     gen_helper_tick_set_limit(r_tickptr,
3736                                                               cpu_stick_cmpr);
3737                                     tcg_temp_free_ptr(r_tickptr);
3738                                 }
3739                                 break;
3740
3741                             case 0x10: /* Performance Control */
3742                             case 0x11: /* Performance Instrumentation
3743                                           Counter */
3744                             case 0x12: /* Dispatch Control */
3745 #endif
3746                             default:
3747                                 goto illegal_insn;
3748                             }
3749                         }
3750                         break;
3751 #if !defined(CONFIG_USER_ONLY)
3752                     case 0x31: /* wrpsr, V9 saved, restored */
3753                         {
3754                             if (!supervisor(dc))
3755                                 goto priv_insn;
3756 #ifdef TARGET_SPARC64
3757                             switch (rd) {
3758                             case 0:
3759                                 gen_helper_saved(cpu_env);
3760                                 break;
3761                             case 1:
3762                                 gen_helper_restored(cpu_env);
3763                                 break;
3764                             case 2: /* UA2005 allclean */
3765                             case 3: /* UA2005 otherw */
3766                             case 4: /* UA2005 normalw */
3767                             case 5: /* UA2005 invalw */
3768                                 // XXX
3769                             default:
3770                                 goto illegal_insn;
3771                             }
3772 #else
3773                             tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3774                             gen_helper_wrpsr(cpu_env, cpu_dst);
3775                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3776                             dc->cc_op = CC_OP_FLAGS;
3777                             save_state(dc);
3778                             gen_op_next_insn();
3779                             tcg_gen_exit_tb(0);
3780                             dc->is_br = 1;
3781 #endif
3782                         }
3783                         break;
3784                     case 0x32: /* wrwim, V9 wrpr */
3785                         {
3786                             if (!supervisor(dc))
3787                                 goto priv_insn;
3788                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3789 #ifdef TARGET_SPARC64
3790                             switch (rd) {
3791                             case 0: // tpc
3792                                 {
3793                                     TCGv_ptr r_tsptr;
3794
3795                                     r_tsptr = tcg_temp_new_ptr();
3796                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3797                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3798                                                   offsetof(trap_state, tpc));
3799                                     tcg_temp_free_ptr(r_tsptr);
3800                                 }
3801                                 break;
3802                             case 1: // tnpc
3803                                 {
3804                                     TCGv_ptr r_tsptr;
3805
3806                                     r_tsptr = tcg_temp_new_ptr();
3807                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3808                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3809                                                   offsetof(trap_state, tnpc));
3810                                     tcg_temp_free_ptr(r_tsptr);
3811                                 }
3812                                 break;
3813                             case 2: // tstate
3814                                 {
3815                                     TCGv_ptr r_tsptr;
3816
3817                                     r_tsptr = tcg_temp_new_ptr();
3818                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3819                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3820                                                   offsetof(trap_state,
3821                                                            tstate));
3822                                     tcg_temp_free_ptr(r_tsptr);
3823                                 }
3824                                 break;
3825                             case 3: // tt
3826                                 {
3827                                     TCGv_ptr r_tsptr;
3828
3829                                     r_tsptr = tcg_temp_new_ptr();
3830                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3831                                     tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3832                                     tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3833                                                    offsetof(trap_state, tt));
3834                                     tcg_temp_free_ptr(r_tsptr);
3835                                 }
3836                                 break;
3837                             case 4: // tick
3838                                 {
3839                                     TCGv_ptr r_tickptr;
3840
3841                                     r_tickptr = tcg_temp_new_ptr();
3842                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3843                                                    offsetof(CPUSPARCState, tick));
3844                                     gen_helper_tick_set_count(r_tickptr,
3845                                                               cpu_tmp0);
3846                                     tcg_temp_free_ptr(r_tickptr);
3847                                 }
3848                                 break;
3849                             case 5: // tba
3850                                 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3851                                 break;
3852                             case 6: // pstate
3853                                 save_state(dc);
3854                                 gen_helper_wrpstate(cpu_env, cpu_tmp0);
3855                                 dc->npc = DYNAMIC_PC;
3856                                 break;
3857                             case 7: // tl
3858                                 save_state(dc);
3859                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3860                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3861                                                offsetof(CPUSPARCState, tl));
3862                                 dc->npc = DYNAMIC_PC;
3863                                 break;
3864                             case 8: // pil
3865                                 gen_helper_wrpil(cpu_env, cpu_tmp0);
3866                                 break;
3867                             case 9: // cwp
3868                                 gen_helper_wrcwp(cpu_env, cpu_tmp0);
3869                                 break;
3870                             case 10: // cansave
3871                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3872                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3873                                                offsetof(CPUSPARCState,
3874                                                         cansave));
3875                                 break;
3876                             case 11: // canrestore
3877                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3878                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3879                                                offsetof(CPUSPARCState,
3880                                                         canrestore));
3881                                 break;
3882                             case 12: // cleanwin
3883                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3884                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3885                                                offsetof(CPUSPARCState,
3886                                                         cleanwin));
3887                                 break;
3888                             case 13: // otherwin
3889                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3890                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3891                                                offsetof(CPUSPARCState,
3892                                                         otherwin));
3893                                 break;
3894                             case 14: // wstate
3895                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3896                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3897                                                offsetof(CPUSPARCState,
3898                                                         wstate));
3899                                 break;
3900                             case 16: // UA2005 gl
3901                                 CHECK_IU_FEATURE(dc, GL);
3902                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3903                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3904                                                offsetof(CPUSPARCState, gl));
3905                                 break;
3906                             case 26: // UA2005 strand status
3907                                 CHECK_IU_FEATURE(dc, HYPV);
3908                                 if (!hypervisor(dc))
3909                                     goto priv_insn;
3910                                 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3911                                 break;
3912                             default:
3913                                 goto illegal_insn;
3914                             }
3915 #else
3916                             tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3917                             if (dc->def->nwindows != 32)
3918                                 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3919                                                 (1 << dc->def->nwindows) - 1);
3920                             tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3921 #endif
3922                         }
3923                         break;
3924                     case 0x33: /* wrtbr, UA2005 wrhpr */
3925                         {
3926 #ifndef TARGET_SPARC64
3927                             if (!supervisor(dc))
3928                                 goto priv_insn;
3929                             tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3930 #else
3931                             CHECK_IU_FEATURE(dc, HYPV);
3932                             if (!hypervisor(dc))
3933                                 goto priv_insn;
3934                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3935                             switch (rd) {
3936                             case 0: // hpstate
3937                                 // XXX gen_op_wrhpstate();
3938                                 save_state(dc);
3939                                 gen_op_next_insn();
3940                                 tcg_gen_exit_tb(0);
3941                                 dc->is_br = 1;
3942                                 break;
3943                             case 1: // htstate
3944                                 // XXX gen_op_wrhtstate();
3945                                 break;
3946                             case 3: // hintp
3947                                 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3948                                 break;
3949                             case 5: // htba
3950                                 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3951                                 break;
3952                             case 31: // hstick_cmpr
3953                                 {
3954                                     TCGv_ptr r_tickptr;
3955
3956                                     tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3957                                     r_tickptr = tcg_temp_new_ptr();
3958                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3959                                                    offsetof(CPUSPARCState, hstick));
3960                                     gen_helper_tick_set_limit(r_tickptr,
3961                                                               cpu_hstick_cmpr);
3962                                     tcg_temp_free_ptr(r_tickptr);
3963                                 }
3964                                 break;
3965                             case 6: // hver readonly
3966                             default:
3967                                 goto illegal_insn;
3968                             }
3969 #endif
3970                         }
3971                         break;
3972 #endif
3973 #ifdef TARGET_SPARC64
3974                     case 0x2c: /* V9 movcc */
3975                         {
3976                             int cc = GET_FIELD_SP(insn, 11, 12);
3977                             int cond = GET_FIELD_SP(insn, 14, 17);
3978                             DisasCompare cmp;
3979                             TCGv dst;
3980
3981                             if (insn & (1 << 18)) {
3982                                 if (cc == 0) {
3983                                     gen_compare(&cmp, 0, cond, dc);
3984                                 } else if (cc == 2) {
3985                                     gen_compare(&cmp, 1, cond, dc);
3986                                 } else {
3987                                     goto illegal_insn;
3988                                 }
3989                             } else {
3990                                 gen_fcompare(&cmp, cc, cond);
3991                             }
3992
3993                             /* The get_src2 above loaded the normal 13-bit
3994                                immediate field, not the 11-bit field we have
3995                                in movcc.  But it did handle the reg case.  */
3996                             if (IS_IMM) {
3997                                 simm = GET_FIELD_SPs(insn, 0, 10);
3998                                 tcg_gen_movi_tl(cpu_src2, simm);
3999                             }
4000
4001                             dst = gen_load_gpr(dc, rd);
4002                             tcg_gen_movcond_tl(cmp.cond, dst,
4003                                                cmp.c1, cmp.c2,
4004                                                cpu_src2, dst);
4005                             free_compare(&cmp);
4006                             gen_store_gpr(dc, rd, dst);
4007                             break;
4008                         }
4009                     case 0x2d: /* V9 sdivx */
4010                         gen_helper_sdivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4011                         gen_store_gpr(dc, rd, cpu_dst);
4012                         break;
4013                     case 0x2e: /* V9 popc */
4014                         gen_helper_popc(cpu_dst, cpu_src2);
4015                         gen_store_gpr(dc, rd, cpu_dst);
4016                         break;
4017                     case 0x2f: /* V9 movr */
4018                         {
4019                             int cond = GET_FIELD_SP(insn, 10, 12);
4020                             DisasCompare cmp;
4021                             TCGv dst;
4022
4023                             gen_compare_reg(&cmp, cond, cpu_src1);
4024
4025                             /* The get_src2 above loaded the normal 13-bit
4026                                immediate field, not the 10-bit field we have
4027                                in movr.  But it did handle the reg case.  */
4028                             if (IS_IMM) {
4029                                 simm = GET_FIELD_SPs(insn, 0, 9);
4030                                 tcg_gen_movi_tl(cpu_src2, simm);
4031                             }
4032
4033                             dst = gen_load_gpr(dc, rd);
4034                             tcg_gen_movcond_tl(cmp.cond, dst,
4035                                                cmp.c1, cmp.c2,
4036                                                cpu_src2, dst);
4037                             free_compare(&cmp);
4038                             gen_store_gpr(dc, rd, dst);
4039                             break;
4040                         }
4041 #endif
4042                     default:
4043                         goto illegal_insn;
4044                     }
4045                 }
4046             } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4047 #ifdef TARGET_SPARC64
4048                 int opf = GET_FIELD_SP(insn, 5, 13);
4049                 rs1 = GET_FIELD(insn, 13, 17);
4050                 rs2 = GET_FIELD(insn, 27, 31);
4051                 if (gen_trap_ifnofpu(dc)) {
4052                     goto jmp_insn;
4053                 }
4054
4055                 switch (opf) {
4056                 case 0x000: /* VIS I edge8cc */
4057                     CHECK_FPU_FEATURE(dc, VIS1);
4058                     cpu_src1 = gen_load_gpr(dc, rs1);
4059                     cpu_src2 = gen_load_gpr(dc, rs2);
4060                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4061                     gen_store_gpr(dc, rd, cpu_dst);
4062                     break;
4063                 case 0x001: /* VIS II edge8n */
4064                     CHECK_FPU_FEATURE(dc, VIS2);
4065                     cpu_src1 = gen_load_gpr(dc, rs1);
4066                     cpu_src2 = gen_load_gpr(dc, rs2);
4067                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4068                     gen_store_gpr(dc, rd, cpu_dst);
4069                     break;
4070                 case 0x002: /* VIS I edge8lcc */
4071                     CHECK_FPU_FEATURE(dc, VIS1);
4072                     cpu_src1 = gen_load_gpr(dc, rs1);
4073                     cpu_src2 = gen_load_gpr(dc, rs2);
4074                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4075                     gen_store_gpr(dc, rd, cpu_dst);
4076                     break;
4077                 case 0x003: /* VIS II edge8ln */
4078                     CHECK_FPU_FEATURE(dc, VIS2);
4079                     cpu_src1 = gen_load_gpr(dc, rs1);
4080                     cpu_src2 = gen_load_gpr(dc, rs2);
4081                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4082                     gen_store_gpr(dc, rd, cpu_dst);
4083                     break;
4084                 case 0x004: /* VIS I edge16cc */
4085                     CHECK_FPU_FEATURE(dc, VIS1);
4086                     cpu_src1 = gen_load_gpr(dc, rs1);
4087                     cpu_src2 = gen_load_gpr(dc, rs2);
4088                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4089                     gen_store_gpr(dc, rd, cpu_dst);
4090                     break;
4091                 case 0x005: /* VIS II edge16n */
4092                     CHECK_FPU_FEATURE(dc, VIS2);
4093                     cpu_src1 = gen_load_gpr(dc, rs1);
4094                     cpu_src2 = gen_load_gpr(dc, rs2);
4095                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4096                     gen_store_gpr(dc, rd, cpu_dst);
4097                     break;
4098                 case 0x006: /* VIS I edge16lcc */
4099                     CHECK_FPU_FEATURE(dc, VIS1);
4100                     cpu_src1 = gen_load_gpr(dc, rs1);
4101                     cpu_src2 = gen_load_gpr(dc, rs2);
4102                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4103                     gen_store_gpr(dc, rd, cpu_dst);
4104                     break;
4105                 case 0x007: /* VIS II edge16ln */
4106                     CHECK_FPU_FEATURE(dc, VIS2);
4107                     cpu_src1 = gen_load_gpr(dc, rs1);
4108                     cpu_src2 = gen_load_gpr(dc, rs2);
4109                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4110                     gen_store_gpr(dc, rd, cpu_dst);
4111                     break;
4112                 case 0x008: /* VIS I edge32cc */
4113                     CHECK_FPU_FEATURE(dc, VIS1);
4114                     cpu_src1 = gen_load_gpr(dc, rs1);
4115                     cpu_src2 = gen_load_gpr(dc, rs2);
4116                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4117                     gen_store_gpr(dc, rd, cpu_dst);
4118                     break;
4119                 case 0x009: /* VIS II edge32n */
4120                     CHECK_FPU_FEATURE(dc, VIS2);
4121                     cpu_src1 = gen_load_gpr(dc, rs1);
4122                     cpu_src2 = gen_load_gpr(dc, rs2);
4123                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4124                     gen_store_gpr(dc, rd, cpu_dst);
4125                     break;
4126                 case 0x00a: /* VIS I edge32lcc */
4127                     CHECK_FPU_FEATURE(dc, VIS1);
4128                     cpu_src1 = gen_load_gpr(dc, rs1);
4129                     cpu_src2 = gen_load_gpr(dc, rs2);
4130                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4131                     gen_store_gpr(dc, rd, cpu_dst);
4132                     break;
4133                 case 0x00b: /* VIS II edge32ln */
4134                     CHECK_FPU_FEATURE(dc, VIS2);
4135                     cpu_src1 = gen_load_gpr(dc, rs1);
4136                     cpu_src2 = gen_load_gpr(dc, rs2);
4137                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4138                     gen_store_gpr(dc, rd, cpu_dst);
4139                     break;
4140                 case 0x010: /* VIS I array8 */
4141                     CHECK_FPU_FEATURE(dc, VIS1);
4142                     cpu_src1 = gen_load_gpr(dc, rs1);
4143                     cpu_src2 = gen_load_gpr(dc, rs2);
4144                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4145                     gen_store_gpr(dc, rd, cpu_dst);
4146                     break;
4147                 case 0x012: /* VIS I array16 */
4148                     CHECK_FPU_FEATURE(dc, VIS1);
4149                     cpu_src1 = gen_load_gpr(dc, rs1);
4150                     cpu_src2 = gen_load_gpr(dc, rs2);
4151                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4152                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4153                     gen_store_gpr(dc, rd, cpu_dst);
4154                     break;
4155                 case 0x014: /* VIS I array32 */
4156                     CHECK_FPU_FEATURE(dc, VIS1);
4157                     cpu_src1 = gen_load_gpr(dc, rs1);
4158                     cpu_src2 = gen_load_gpr(dc, rs2);
4159                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4160                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4161                     gen_store_gpr(dc, rd, cpu_dst);
4162                     break;
4163                 case 0x018: /* VIS I alignaddr */
4164                     CHECK_FPU_FEATURE(dc, VIS1);
4165                     cpu_src1 = gen_load_gpr(dc, rs1);
4166                     cpu_src2 = gen_load_gpr(dc, rs2);
4167                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4168                     gen_store_gpr(dc, rd, cpu_dst);
4169                     break;
4170                 case 0x01a: /* VIS I alignaddrl */
4171                     CHECK_FPU_FEATURE(dc, VIS1);
4172                     cpu_src1 = gen_load_gpr(dc, rs1);
4173                     cpu_src2 = gen_load_gpr(dc, rs2);
4174                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4175                     gen_store_gpr(dc, rd, cpu_dst);
4176                     break;
4177                 case 0x019: /* VIS II bmask */
4178                     CHECK_FPU_FEATURE(dc, VIS2);
4179                     cpu_src1 = gen_load_gpr(dc, rs1);
4180                     cpu_src2 = gen_load_gpr(dc, rs2);
4181                     tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4182                     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4183                     gen_store_gpr(dc, rd, cpu_dst);
4184                     break;
4185                 case 0x020: /* VIS I fcmple16 */
4186                     CHECK_FPU_FEATURE(dc, VIS1);
4187                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4188                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4189                     gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4190                     gen_store_gpr(dc, rd, cpu_dst);
4191                     break;
4192                 case 0x022: /* VIS I fcmpne16 */
4193                     CHECK_FPU_FEATURE(dc, VIS1);
4194                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4195                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4196                     gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4197                     gen_store_gpr(dc, rd, cpu_dst);
4198                     break;
4199                 case 0x024: /* VIS I fcmple32 */
4200                     CHECK_FPU_FEATURE(dc, VIS1);
4201                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4202                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4203                     gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4204                     gen_store_gpr(dc, rd, cpu_dst);
4205                     break;
4206                 case 0x026: /* VIS I fcmpne32 */
4207                     CHECK_FPU_FEATURE(dc, VIS1);
4208                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4209                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4210                     gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4211                     gen_store_gpr(dc, rd, cpu_dst);
4212                     break;
4213                 case 0x028: /* VIS I fcmpgt16 */
4214                     CHECK_FPU_FEATURE(dc, VIS1);
4215                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4216                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4217                     gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4218                     gen_store_gpr(dc, rd, cpu_dst);
4219                     break;
4220                 case 0x02a: /* VIS I fcmpeq16 */
4221                     CHECK_FPU_FEATURE(dc, VIS1);
4222                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4223                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4224                     gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4225                     gen_store_gpr(dc, rd, cpu_dst);
4226                     break;
4227                 case 0x02c: /* VIS I fcmpgt32 */
4228                     CHECK_FPU_FEATURE(dc, VIS1);
4229                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4230                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4231                     gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4232                     gen_store_gpr(dc, rd, cpu_dst);
4233                     break;
4234                 case 0x02e: /* VIS I fcmpeq32 */
4235                     CHECK_FPU_FEATURE(dc, VIS1);
4236                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4237                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4238                     gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4239                     gen_store_gpr(dc, rd, cpu_dst);
4240                     break;
4241                 case 0x031: /* VIS I fmul8x16 */
4242                     CHECK_FPU_FEATURE(dc, VIS1);
4243                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4244                     break;
4245                 case 0x033: /* VIS I fmul8x16au */
4246                     CHECK_FPU_FEATURE(dc, VIS1);
4247                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4248                     break;
4249                 case 0x035: /* VIS I fmul8x16al */
4250                     CHECK_FPU_FEATURE(dc, VIS1);
4251                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4252                     break;
4253                 case 0x036: /* VIS I fmul8sux16 */
4254                     CHECK_FPU_FEATURE(dc, VIS1);
4255                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4256                     break;
4257                 case 0x037: /* VIS I fmul8ulx16 */
4258                     CHECK_FPU_FEATURE(dc, VIS1);
4259                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4260                     break;
4261                 case 0x038: /* VIS I fmuld8sux16 */
4262                     CHECK_FPU_FEATURE(dc, VIS1);
4263                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4264                     break;
4265                 case 0x039: /* VIS I fmuld8ulx16 */
4266                     CHECK_FPU_FEATURE(dc, VIS1);
4267                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4268                     break;
4269                 case 0x03a: /* VIS I fpack32 */
4270                     CHECK_FPU_FEATURE(dc, VIS1);
4271                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4272                     break;
4273                 case 0x03b: /* VIS I fpack16 */
4274                     CHECK_FPU_FEATURE(dc, VIS1);
4275                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4276                     cpu_dst_32 = gen_dest_fpr_F();
4277                     gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4278                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4279                     break;
4280                 case 0x03d: /* VIS I fpackfix */
4281                     CHECK_FPU_FEATURE(dc, VIS1);
4282                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4283                     cpu_dst_32 = gen_dest_fpr_F();
4284                     gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4285                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4286                     break;
4287                 case 0x03e: /* VIS I pdist */
4288                     CHECK_FPU_FEATURE(dc, VIS1);
4289                     gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4290                     break;
4291                 case 0x048: /* VIS I faligndata */
4292                     CHECK_FPU_FEATURE(dc, VIS1);
4293                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4294                     break;
4295                 case 0x04b: /* VIS I fpmerge */
4296                     CHECK_FPU_FEATURE(dc, VIS1);
4297                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4298                     break;
4299                 case 0x04c: /* VIS II bshuffle */
4300                     CHECK_FPU_FEATURE(dc, VIS2);
4301                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4302                     break;
4303                 case 0x04d: /* VIS I fexpand */
4304                     CHECK_FPU_FEATURE(dc, VIS1);
4305                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4306                     break;
4307                 case 0x050: /* VIS I fpadd16 */
4308                     CHECK_FPU_FEATURE(dc, VIS1);
4309                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4310                     break;
4311                 case 0x051: /* VIS I fpadd16s */
4312                     CHECK_FPU_FEATURE(dc, VIS1);
4313                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4314                     break;
4315                 case 0x052: /* VIS I fpadd32 */
4316                     CHECK_FPU_FEATURE(dc, VIS1);
4317                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4318                     break;
4319                 case 0x053: /* VIS I fpadd32s */
4320                     CHECK_FPU_FEATURE(dc, VIS1);
4321                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4322                     break;
4323                 case 0x054: /* VIS I fpsub16 */
4324                     CHECK_FPU_FEATURE(dc, VIS1);
4325                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4326                     break;
4327                 case 0x055: /* VIS I fpsub16s */
4328                     CHECK_FPU_FEATURE(dc, VIS1);
4329                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4330                     break;
4331                 case 0x056: /* VIS I fpsub32 */
4332                     CHECK_FPU_FEATURE(dc, VIS1);
4333                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4334                     break;
4335                 case 0x057: /* VIS I fpsub32s */
4336                     CHECK_FPU_FEATURE(dc, VIS1);
4337                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4338                     break;
4339                 case 0x060: /* VIS I fzero */
4340                     CHECK_FPU_FEATURE(dc, VIS1);
4341                     cpu_dst_64 = gen_dest_fpr_D();
4342                     tcg_gen_movi_i64(cpu_dst_64, 0);
4343                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4344                     break;
4345                 case 0x061: /* VIS I fzeros */
4346                     CHECK_FPU_FEATURE(dc, VIS1);
4347                     cpu_dst_32 = gen_dest_fpr_F();
4348                     tcg_gen_movi_i32(cpu_dst_32, 0);
4349                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4350                     break;
4351                 case 0x062: /* VIS I fnor */
4352                     CHECK_FPU_FEATURE(dc, VIS1);
4353                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4354                     break;
4355                 case 0x063: /* VIS I fnors */
4356                     CHECK_FPU_FEATURE(dc, VIS1);
4357                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4358                     break;
4359                 case 0x064: /* VIS I fandnot2 */
4360                     CHECK_FPU_FEATURE(dc, VIS1);
4361                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4362                     break;
4363                 case 0x065: /* VIS I fandnot2s */
4364                     CHECK_FPU_FEATURE(dc, VIS1);
4365                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4366                     break;
4367                 case 0x066: /* VIS I fnot2 */
4368                     CHECK_FPU_FEATURE(dc, VIS1);
4369                     gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4370                     break;
4371                 case 0x067: /* VIS I fnot2s */
4372                     CHECK_FPU_FEATURE(dc, VIS1);
4373                     gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4374                     break;
4375                 case 0x068: /* VIS I fandnot1 */
4376                     CHECK_FPU_FEATURE(dc, VIS1);
4377                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4378                     break;
4379                 case 0x069: /* VIS I fandnot1s */
4380                     CHECK_FPU_FEATURE(dc, VIS1);
4381                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4382                     break;
4383                 case 0x06a: /* VIS I fnot1 */
4384                     CHECK_FPU_FEATURE(dc, VIS1);
4385                     gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4386                     break;
4387                 case 0x06b: /* VIS I fnot1s */
4388                     CHECK_FPU_FEATURE(dc, VIS1);
4389                     gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4390                     break;
4391                 case 0x06c: /* VIS I fxor */
4392                     CHECK_FPU_FEATURE(dc, VIS1);
4393                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4394                     break;
4395                 case 0x06d: /* VIS I fxors */
4396                     CHECK_FPU_FEATURE(dc, VIS1);
4397                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4398                     break;
4399                 case 0x06e: /* VIS I fnand */
4400                     CHECK_FPU_FEATURE(dc, VIS1);
4401                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4402                     break;
4403                 case 0x06f: /* VIS I fnands */
4404                     CHECK_FPU_FEATURE(dc, VIS1);
4405                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4406                     break;
4407                 case 0x070: /* VIS I fand */
4408                     CHECK_FPU_FEATURE(dc, VIS1);
4409                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4410                     break;
4411                 case 0x071: /* VIS I fands */
4412                     CHECK_FPU_FEATURE(dc, VIS1);
4413                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4414                     break;
4415                 case 0x072: /* VIS I fxnor */
4416                     CHECK_FPU_FEATURE(dc, VIS1);
4417                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4418                     break;
4419                 case 0x073: /* VIS I fxnors */
4420                     CHECK_FPU_FEATURE(dc, VIS1);
4421                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4422                     break;
4423                 case 0x074: /* VIS I fsrc1 */
4424                     CHECK_FPU_FEATURE(dc, VIS1);
4425                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4426                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4427                     break;
4428                 case 0x075: /* VIS I fsrc1s */
4429                     CHECK_FPU_FEATURE(dc, VIS1);
4430                     cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4431                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4432                     break;
4433                 case 0x076: /* VIS I fornot2 */
4434                     CHECK_FPU_FEATURE(dc, VIS1);
4435                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4436                     break;
4437                 case 0x077: /* VIS I fornot2s */
4438                     CHECK_FPU_FEATURE(dc, VIS1);
4439                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4440                     break;
4441                 case 0x078: /* VIS I fsrc2 */
4442                     CHECK_FPU_FEATURE(dc, VIS1);
4443                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4444                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4445                     break;
4446                 case 0x079: /* VIS I fsrc2s */
4447                     CHECK_FPU_FEATURE(dc, VIS1);
4448                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4449                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4450                     break;
4451                 case 0x07a: /* VIS I fornot1 */
4452                     CHECK_FPU_FEATURE(dc, VIS1);
4453                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4454                     break;
4455                 case 0x07b: /* VIS I fornot1s */
4456                     CHECK_FPU_FEATURE(dc, VIS1);
4457                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
4458                     break;
4459                 case 0x07c: /* VIS I for */
4460                     CHECK_FPU_FEATURE(dc, VIS1);
4461                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
4462                     break;
4463                 case 0x07d: /* VIS I fors */
4464                     CHECK_FPU_FEATURE(dc, VIS1);
4465                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
4466                     break;
4467                 case 0x07e: /* VIS I fone */
4468                     CHECK_FPU_FEATURE(dc, VIS1);
4469                     cpu_dst_64 = gen_dest_fpr_D();
4470                     tcg_gen_movi_i64(cpu_dst_64, -1);
4471                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4472                     break;
4473                 case 0x07f: /* VIS I fones */
4474                     CHECK_FPU_FEATURE(dc, VIS1);
4475                     cpu_dst_32 = gen_dest_fpr_F();
4476                     tcg_gen_movi_i32(cpu_dst_32, -1);
4477                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4478                     break;
4479                 case 0x080: /* VIS I shutdown */
4480                 case 0x081: /* VIS II siam */
4481                     // XXX
4482                     goto illegal_insn;
4483                 default:
4484                     goto illegal_insn;
4485                 }
4486 #else
4487                 goto ncp_insn;
4488 #endif
4489             } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4490 #ifdef TARGET_SPARC64
4491                 goto illegal_insn;
4492 #else
4493                 goto ncp_insn;
4494 #endif
4495 #ifdef TARGET_SPARC64
4496             } else if (xop == 0x39) { /* V9 return */
4497                 TCGv_i32 r_const;
4498
4499                 save_state(dc);
4500                 cpu_src1 = get_src1(dc, insn);
4501                 if (IS_IMM) {   /* immediate */
4502                     simm = GET_FIELDs(insn, 19, 31);
4503                     tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4504                 } else {                /* register */
4505                     rs2 = GET_FIELD(insn, 27, 31);
4506                     if (rs2) {
4507                         cpu_src2 = gen_load_gpr(dc, rs2);
4508                         tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4509                     } else {
4510                         tcg_gen_mov_tl(cpu_dst, cpu_src1);
4511                     }
4512                 }
4513                 gen_helper_restore(cpu_env);
4514                 gen_mov_pc_npc(dc);
4515                 r_const = tcg_const_i32(3);
4516                 gen_helper_check_align(cpu_env, cpu_dst, r_const);
4517                 tcg_temp_free_i32(r_const);
4518                 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4519                 dc->npc = DYNAMIC_PC;
4520                 goto jmp_insn;
4521 #endif
4522             } else {
4523                 cpu_src1 = get_src1(dc, insn);
4524                 if (IS_IMM) {   /* immediate */
4525                     simm = GET_FIELDs(insn, 19, 31);
4526                     tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4527                 } else {                /* register */
4528                     rs2 = GET_FIELD(insn, 27, 31);
4529                     if (rs2) {
4530                         cpu_src2 = gen_load_gpr(dc, rs2);
4531                         tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4532                     } else {
4533                         tcg_gen_mov_tl(cpu_dst, cpu_src1);
4534                     }
4535                 }
4536                 switch (xop) {
4537                 case 0x38:      /* jmpl */
4538                     {
4539                         TCGv t;
4540                         TCGv_i32 r_const;
4541
4542                         t = gen_dest_gpr(dc, rd);
4543                         tcg_gen_movi_tl(t, dc->pc);
4544                         gen_store_gpr(dc, rd, t);
4545                         gen_mov_pc_npc(dc);
4546                         r_const = tcg_const_i32(3);
4547                         gen_helper_check_align(cpu_env, cpu_dst, r_const);
4548                         tcg_temp_free_i32(r_const);
4549                         gen_address_mask(dc, cpu_dst);
4550                         tcg_gen_mov_tl(cpu_npc, cpu_dst);
4551                         dc->npc = DYNAMIC_PC;
4552                     }
4553                     goto jmp_insn;
4554 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4555                 case 0x39:      /* rett, V9 return */
4556                     {
4557                         TCGv_i32 r_const;
4558
4559                         if (!supervisor(dc))
4560                             goto priv_insn;
4561                         gen_mov_pc_npc(dc);
4562                         r_const = tcg_const_i32(3);
4563                         gen_helper_check_align(cpu_env, cpu_dst, r_const);
4564                         tcg_temp_free_i32(r_const);
4565                         tcg_gen_mov_tl(cpu_npc, cpu_dst);
4566                         dc->npc = DYNAMIC_PC;
4567                         gen_helper_rett(cpu_env);
4568                     }
4569                     goto jmp_insn;
4570 #endif
4571                 case 0x3b: /* flush */
4572                     if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4573                         goto unimp_flush;
4574                     /* nop */
4575                     break;
4576                 case 0x3c:      /* save */
4577                     save_state(dc);
4578                     gen_helper_save(cpu_env);
4579                     gen_store_gpr(dc, rd, cpu_dst);
4580                     break;
4581                 case 0x3d:      /* restore */
4582                     save_state(dc);
4583                     gen_helper_restore(cpu_env);
4584                     gen_store_gpr(dc, rd, cpu_dst);
4585                     break;
4586 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4587                 case 0x3e:      /* V9 done/retry */
4588                     {
4589                         switch (rd) {
4590                         case 0:
4591                             if (!supervisor(dc))
4592                                 goto priv_insn;
4593                             dc->npc = DYNAMIC_PC;
4594                             dc->pc = DYNAMIC_PC;
4595                             gen_helper_done(cpu_env);
4596                             goto jmp_insn;
4597                         case 1:
4598                             if (!supervisor(dc))
4599                                 goto priv_insn;
4600                             dc->npc = DYNAMIC_PC;
4601                             dc->pc = DYNAMIC_PC;
4602                             gen_helper_retry(cpu_env);
4603                             goto jmp_insn;
4604                         default:
4605                             goto illegal_insn;
4606                         }
4607                     }
4608                     break;
4609 #endif
4610                 default:
4611                     goto illegal_insn;
4612                 }
4613             }
4614             break;
4615         }
4616         break;
4617     case 3:                     /* load/store instructions */
4618         {
4619             unsigned int xop = GET_FIELD(insn, 7, 12);
4620
4621             cpu_src1 = get_src1(dc, insn);
4622             if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4623                 rs2 = GET_FIELD(insn, 27, 31);
4624                 cpu_src2 = gen_load_gpr(dc, rs2);
4625                 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4626             } else if (IS_IMM) {     /* immediate */
4627                 simm = GET_FIELDs(insn, 19, 31);
4628                 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4629             } else {            /* register */
4630                 rs2 = GET_FIELD(insn, 27, 31);
4631                 if (rs2 != 0) {
4632                     cpu_src2 = gen_load_gpr(dc, rs2);
4633                     tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4634                 } else {
4635                     tcg_gen_mov_tl(cpu_addr, cpu_src1);
4636                 }
4637             }
4638             if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4639                 (xop > 0x17 && xop <= 0x1d ) ||
4640                 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4641                 switch (xop) {
4642                 case 0x0:       /* ld, V9 lduw, load unsigned word */
4643                     gen_address_mask(dc, cpu_addr);
4644                     tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4645                     break;
4646                 case 0x1:       /* ldub, load unsigned byte */
4647                     gen_address_mask(dc, cpu_addr);
4648                     tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4649                     break;
4650                 case 0x2:       /* lduh, load unsigned halfword */
4651                     gen_address_mask(dc, cpu_addr);
4652                     tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4653                     break;
4654                 case 0x3:       /* ldd, load double word */
4655                     if (rd & 1)
4656                         goto illegal_insn;
4657                     else {
4658                         TCGv_i32 r_const;
4659
4660                         save_state(dc);
4661                         r_const = tcg_const_i32(7);
4662                         /* XXX remove alignment check */
4663                         gen_helper_check_align(cpu_env, cpu_addr, r_const);
4664                         tcg_temp_free_i32(r_const);
4665                         gen_address_mask(dc, cpu_addr);
4666                         tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4667                         tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4668                         tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4669                         gen_store_gpr(dc, rd + 1, cpu_tmp0);
4670                         tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4671                         tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4672                         tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4673                     }
4674                     break;
4675                 case 0x9:       /* ldsb, load signed byte */
4676                     gen_address_mask(dc, cpu_addr);
4677                     tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4678                     break;
4679                 case 0xa:       /* ldsh, load signed halfword */
4680                     gen_address_mask(dc, cpu_addr);
4681                     tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4682                     break;
4683                 case 0xd:       /* ldstub -- XXX: should be atomically */
4684                     {
4685                         TCGv r_const;
4686
4687                         gen_address_mask(dc, cpu_addr);
4688                         tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4689                         r_const = tcg_const_tl(0xff);
4690                         tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4691                         tcg_temp_free(r_const);
4692                     }
4693                     break;
4694                 case 0x0f:      /* swap, swap register with memory. Also
4695                                    atomically */
4696                     CHECK_IU_FEATURE(dc, SWAP);
4697                     cpu_src1 = gen_load_gpr(dc, rd);
4698                     gen_address_mask(dc, cpu_addr);
4699                     tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4700                     tcg_gen_qemu_st32(cpu_src1, cpu_addr, dc->mem_idx);
4701                     tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4702                     break;
4703 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4704                 case 0x10:      /* lda, V9 lduwa, load word alternate */
4705 #ifndef TARGET_SPARC64
4706                     if (IS_IMM)
4707                         goto illegal_insn;
4708                     if (!supervisor(dc))
4709                         goto priv_insn;
4710 #endif
4711                     save_state(dc);
4712                     gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4713                     break;
4714                 case 0x11:      /* lduba, load unsigned byte alternate */
4715 #ifndef TARGET_SPARC64
4716                     if (IS_IMM)
4717                         goto illegal_insn;
4718                     if (!supervisor(dc))
4719                         goto priv_insn;
4720 #endif
4721                     save_state(dc);
4722                     gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4723                     break;
4724                 case 0x12:      /* lduha, load unsigned halfword alternate */
4725 #ifndef TARGET_SPARC64
4726                     if (IS_IMM)
4727                         goto illegal_insn;
4728                     if (!supervisor(dc))
4729                         goto priv_insn;
4730 #endif
4731                     save_state(dc);
4732                     gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4733                     break;
4734                 case 0x13:      /* ldda, load double word alternate */
4735 #ifndef TARGET_SPARC64
4736                     if (IS_IMM)
4737                         goto illegal_insn;
4738                     if (!supervisor(dc))
4739                         goto priv_insn;
4740 #endif
4741                     if (rd & 1)
4742                         goto illegal_insn;
4743                     save_state(dc);
4744                     gen_ldda_asi(dc, cpu_val, cpu_addr, insn, rd);
4745                     goto skip_move;
4746                 case 0x19:      /* ldsba, load signed byte alternate */
4747 #ifndef TARGET_SPARC64
4748                     if (IS_IMM)
4749                         goto illegal_insn;
4750                     if (!supervisor(dc))
4751                         goto priv_insn;
4752 #endif
4753                     save_state(dc);
4754                     gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4755                     break;
4756                 case 0x1a:      /* ldsha, load signed halfword alternate */
4757 #ifndef TARGET_SPARC64
4758                     if (IS_IMM)
4759                         goto illegal_insn;
4760                     if (!supervisor(dc))
4761                         goto priv_insn;
4762 #endif
4763                     save_state(dc);
4764                     gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4765                     break;
4766                 case 0x1d:      /* ldstuba -- XXX: should be atomically */
4767 #ifndef TARGET_SPARC64
4768                     if (IS_IMM)
4769                         goto illegal_insn;
4770                     if (!supervisor(dc))
4771                         goto priv_insn;
4772 #endif
4773                     save_state(dc);
4774                     gen_ldstub_asi(cpu_val, cpu_addr, insn);
4775                     break;
4776                 case 0x1f:      /* swapa, swap reg with alt. memory. Also
4777                                    atomically */
4778                     CHECK_IU_FEATURE(dc, SWAP);
4779 #ifndef TARGET_SPARC64
4780                     if (IS_IMM)
4781                         goto illegal_insn;
4782                     if (!supervisor(dc))
4783                         goto priv_insn;
4784 #endif
4785                     save_state(dc);
4786                     cpu_src1 = gen_load_gpr(dc, rd);
4787                     gen_swap_asi(cpu_val, cpu_src1, cpu_addr, insn);
4788                     break;
4789
4790 #ifndef TARGET_SPARC64
4791                 case 0x30: /* ldc */
4792                 case 0x31: /* ldcsr */
4793                 case 0x33: /* lddc */
4794                     goto ncp_insn;
4795 #endif
4796 #endif
4797 #ifdef TARGET_SPARC64
4798                 case 0x08: /* V9 ldsw */
4799                     gen_address_mask(dc, cpu_addr);
4800                     tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4801                     break;
4802                 case 0x0b: /* V9 ldx */
4803                     gen_address_mask(dc, cpu_addr);
4804                     tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4805                     break;
4806                 case 0x18: /* V9 ldswa */
4807                     save_state(dc);
4808                     gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4809                     break;
4810                 case 0x1b: /* V9 ldxa */
4811                     save_state(dc);
4812                     gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4813                     break;
4814                 case 0x2d: /* V9 prefetch, no effect */
4815                     goto skip_move;
4816                 case 0x30: /* V9 ldfa */
4817                     if (gen_trap_ifnofpu(dc)) {
4818                         goto jmp_insn;
4819                     }
4820                     save_state(dc);
4821                     gen_ldf_asi(cpu_addr, insn, 4, rd);
4822                     gen_update_fprs_dirty(rd);
4823                     goto skip_move;
4824                 case 0x33: /* V9 lddfa */
4825                     if (gen_trap_ifnofpu(dc)) {
4826                         goto jmp_insn;
4827                     }
4828                     save_state(dc);
4829                     gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4830                     gen_update_fprs_dirty(DFPREG(rd));
4831                     goto skip_move;
4832                 case 0x3d: /* V9 prefetcha, no effect */
4833                     goto skip_move;
4834                 case 0x32: /* V9 ldqfa */
4835                     CHECK_FPU_FEATURE(dc, FLOAT128);
4836                     if (gen_trap_ifnofpu(dc)) {
4837                         goto jmp_insn;
4838                     }
4839                     save_state(dc);
4840                     gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4841                     gen_update_fprs_dirty(QFPREG(rd));
4842                     goto skip_move;
4843 #endif
4844                 default:
4845                     goto illegal_insn;
4846                 }
4847                 gen_store_gpr(dc, rd, cpu_val);
4848 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4849             skip_move: ;
4850 #endif
4851             } else if (xop >= 0x20 && xop < 0x24) {
4852                 if (gen_trap_ifnofpu(dc)) {
4853                     goto jmp_insn;
4854                 }
4855                 save_state(dc);
4856                 switch (xop) {
4857                 case 0x20:      /* ldf, load fpreg */
4858                     gen_address_mask(dc, cpu_addr);
4859                     tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4860                     cpu_dst_32 = gen_dest_fpr_F();
4861                     tcg_gen_trunc_tl_i32(cpu_dst_32, cpu_tmp0);
4862                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4863                     break;
4864                 case 0x21:      /* ldfsr, V9 ldxfsr */
4865 #ifdef TARGET_SPARC64
4866                     gen_address_mask(dc, cpu_addr);
4867                     if (rd == 1) {
4868                         tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4869                         gen_helper_ldxfsr(cpu_env, cpu_tmp64);
4870                     } else {
4871                         tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4872                         tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
4873                         gen_helper_ldfsr(cpu_env, cpu_tmp32);
4874                     }
4875 #else
4876                     {
4877                         tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4878                         gen_helper_ldfsr(cpu_env, cpu_tmp32);
4879                     }
4880 #endif
4881                     break;
4882                 case 0x22:      /* ldqf, load quad fpreg */
4883                     {
4884                         TCGv_i32 r_const;
4885
4886                         CHECK_FPU_FEATURE(dc, FLOAT128);
4887                         r_const = tcg_const_i32(dc->mem_idx);
4888                         gen_address_mask(dc, cpu_addr);
4889                         gen_helper_ldqf(cpu_env, cpu_addr, r_const);
4890                         tcg_temp_free_i32(r_const);
4891                         gen_op_store_QT0_fpr(QFPREG(rd));
4892                         gen_update_fprs_dirty(QFPREG(rd));
4893                     }
4894                     break;
4895                 case 0x23:      /* lddf, load double fpreg */
4896                     gen_address_mask(dc, cpu_addr);
4897                     cpu_dst_64 = gen_dest_fpr_D();
4898                     tcg_gen_qemu_ld64(cpu_dst_64, cpu_addr, dc->mem_idx);
4899                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4900                     break;
4901                 default:
4902                     goto illegal_insn;
4903                 }
4904             } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4905                        xop == 0xe || xop == 0x1e) {
4906                 gen_movl_reg_TN(rd, cpu_val);
4907                 switch (xop) {
4908                 case 0x4: /* st, store word */
4909                     gen_address_mask(dc, cpu_addr);
4910                     tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4911                     break;
4912                 case 0x5: /* stb, store byte */
4913                     gen_address_mask(dc, cpu_addr);
4914                     tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4915                     break;
4916                 case 0x6: /* sth, store halfword */
4917                     gen_address_mask(dc, cpu_addr);
4918                     tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4919                     break;
4920                 case 0x7: /* std, store double word */
4921                     if (rd & 1)
4922                         goto illegal_insn;
4923                     else {
4924                         TCGv_i32 r_const;
4925
4926                         save_state(dc);
4927                         gen_address_mask(dc, cpu_addr);
4928                         r_const = tcg_const_i32(7);
4929                         /* XXX remove alignment check */
4930                         gen_helper_check_align(cpu_env, cpu_addr, r_const);
4931                         tcg_temp_free_i32(r_const);
4932                         gen_movl_reg_TN(rd + 1, cpu_tmp0);
4933                         tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4934                         tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4935                     }
4936                     break;
4937 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4938                 case 0x14: /* sta, V9 stwa, store word alternate */
4939 #ifndef TARGET_SPARC64
4940                     if (IS_IMM)
4941                         goto illegal_insn;
4942                     if (!supervisor(dc))
4943                         goto priv_insn;
4944 #endif
4945                     save_state(dc);
4946                     gen_st_asi(cpu_val, cpu_addr, insn, 4);
4947                     dc->npc = DYNAMIC_PC;
4948                     break;
4949                 case 0x15: /* stba, store byte alternate */
4950 #ifndef TARGET_SPARC64
4951                     if (IS_IMM)
4952                         goto illegal_insn;
4953                     if (!supervisor(dc))
4954                         goto priv_insn;
4955 #endif
4956                     save_state(dc);
4957                     gen_st_asi(cpu_val, cpu_addr, insn, 1);
4958                     dc->npc = DYNAMIC_PC;
4959                     break;
4960                 case 0x16: /* stha, store halfword alternate */
4961 #ifndef TARGET_SPARC64
4962                     if (IS_IMM)
4963                         goto illegal_insn;
4964                     if (!supervisor(dc))
4965                         goto priv_insn;
4966 #endif
4967                     save_state(dc);
4968                     gen_st_asi(cpu_val, cpu_addr, insn, 2);
4969                     dc->npc = DYNAMIC_PC;
4970                     break;
4971                 case 0x17: /* stda, store double word alternate */
4972 #ifndef TARGET_SPARC64
4973                     if (IS_IMM)
4974                         goto illegal_insn;
4975                     if (!supervisor(dc))
4976                         goto priv_insn;
4977 #endif
4978                     if (rd & 1)
4979                         goto illegal_insn;
4980                     else {
4981                         save_state(dc);
4982                         gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
4983                     }
4984                     break;
4985 #endif
4986 #ifdef TARGET_SPARC64
4987                 case 0x0e: /* V9 stx */
4988                     gen_address_mask(dc, cpu_addr);
4989                     tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4990                     break;
4991                 case 0x1e: /* V9 stxa */
4992                     save_state(dc);
4993                     gen_st_asi(cpu_val, cpu_addr, insn, 8);
4994                     dc->npc = DYNAMIC_PC;
4995                     break;
4996 #endif
4997                 default:
4998                     goto illegal_insn;
4999                 }
5000             } else if (xop > 0x23 && xop < 0x28) {
5001                 if (gen_trap_ifnofpu(dc)) {
5002                     goto jmp_insn;
5003                 }
5004                 save_state(dc);
5005                 switch (xop) {
5006                 case 0x24: /* stf, store fpreg */
5007                     gen_address_mask(dc, cpu_addr);
5008                     cpu_src1_32 = gen_load_fpr_F(dc, rd);
5009                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_src1_32);
5010                     tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
5011                     break;
5012                 case 0x25: /* stfsr, V9 stxfsr */
5013 #ifdef TARGET_SPARC64
5014                     gen_address_mask(dc, cpu_addr);
5015                     tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUSPARCState, fsr));
5016                     if (rd == 1)
5017                         tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
5018                     else
5019                         tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
5020 #else
5021                     tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fsr));
5022                     tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
5023 #endif
5024                     break;
5025                 case 0x26:
5026 #ifdef TARGET_SPARC64
5027                     /* V9 stqf, store quad fpreg */
5028                     {
5029                         TCGv_i32 r_const;
5030
5031                         CHECK_FPU_FEATURE(dc, FLOAT128);
5032                         gen_op_load_fpr_QT0(QFPREG(rd));
5033                         r_const = tcg_const_i32(dc->mem_idx);
5034                         gen_address_mask(dc, cpu_addr);
5035                         gen_helper_stqf(cpu_env, cpu_addr, r_const);
5036                         tcg_temp_free_i32(r_const);
5037                     }
5038                     break;
5039 #else /* !TARGET_SPARC64 */
5040                     /* stdfq, store floating point queue */
5041 #if defined(CONFIG_USER_ONLY)
5042                     goto illegal_insn;
5043 #else
5044                     if (!supervisor(dc))
5045                         goto priv_insn;
5046                     if (gen_trap_ifnofpu(dc)) {
5047                         goto jmp_insn;
5048                     }
5049                     goto nfq_insn;
5050 #endif
5051 #endif
5052                 case 0x27: /* stdf, store double fpreg */
5053                     gen_address_mask(dc, cpu_addr);
5054                     cpu_src1_64 = gen_load_fpr_D(dc, rd);
5055                     tcg_gen_qemu_st64(cpu_src1_64, cpu_addr, dc->mem_idx);
5056                     break;
5057                 default:
5058                     goto illegal_insn;
5059                 }
5060             } else if (xop > 0x33 && xop < 0x3f) {
5061                 save_state(dc);
5062                 switch (xop) {
5063 #ifdef TARGET_SPARC64
5064                 case 0x34: /* V9 stfa */
5065                     if (gen_trap_ifnofpu(dc)) {
5066                         goto jmp_insn;
5067                     }
5068                     gen_stf_asi(cpu_addr, insn, 4, rd);
5069                     break;
5070                 case 0x36: /* V9 stqfa */
5071                     {
5072                         TCGv_i32 r_const;
5073
5074                         CHECK_FPU_FEATURE(dc, FLOAT128);
5075                         if (gen_trap_ifnofpu(dc)) {
5076                             goto jmp_insn;
5077                         }
5078                         r_const = tcg_const_i32(7);
5079                         gen_helper_check_align(cpu_env, cpu_addr, r_const);
5080                         tcg_temp_free_i32(r_const);
5081                         gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
5082                     }
5083                     break;
5084                 case 0x37: /* V9 stdfa */
5085                     if (gen_trap_ifnofpu(dc)) {
5086                         goto jmp_insn;
5087                     }
5088                     gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
5089                     break;
5090                 case 0x3c: /* V9 casa */
5091                     gen_cas_asi(dc, cpu_val, cpu_addr, cpu_src2, insn, rd);
5092                     gen_store_gpr(dc, rd, cpu_val);
5093                     break;
5094                 case 0x3e: /* V9 casxa */
5095                     gen_casx_asi(dc, cpu_val, cpu_addr, cpu_src2, insn, rd);
5096                     gen_store_gpr(dc, rd, cpu_val);
5097                     break;
5098 #else
5099                 case 0x34: /* stc */
5100                 case 0x35: /* stcsr */
5101                 case 0x36: /* stdcq */
5102                 case 0x37: /* stdc */
5103                     goto ncp_insn;
5104 #endif
5105                 default:
5106                     goto illegal_insn;
5107                 }
5108             } else
5109                 goto illegal_insn;
5110         }
5111         break;
5112     }
5113     /* default case for non jump instructions */
5114     if (dc->npc == DYNAMIC_PC) {
5115         dc->pc = DYNAMIC_PC;
5116         gen_op_next_insn();
5117     } else if (dc->npc == JUMP_PC) {
5118         /* we can do a static jump */
5119         gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5120         dc->is_br = 1;
5121     } else {
5122         dc->pc = dc->npc;
5123         dc->npc = dc->npc + 4;
5124     }
5125  jmp_insn:
5126     goto egress;
5127  illegal_insn:
5128     {
5129         TCGv_i32 r_const;
5130
5131         save_state(dc);
5132         r_const = tcg_const_i32(TT_ILL_INSN);
5133         gen_helper_raise_exception(cpu_env, r_const);
5134         tcg_temp_free_i32(r_const);
5135         dc->is_br = 1;
5136     }
5137     goto egress;
5138  unimp_flush:
5139     {
5140         TCGv_i32 r_const;
5141
5142         save_state(dc);
5143         r_const = tcg_const_i32(TT_UNIMP_FLUSH);
5144         gen_helper_raise_exception(cpu_env, r_const);
5145         tcg_temp_free_i32(r_const);
5146         dc->is_br = 1;
5147     }
5148     goto egress;
5149 #if !defined(CONFIG_USER_ONLY)
5150  priv_insn:
5151     {
5152         TCGv_i32 r_const;
5153
5154         save_state(dc);
5155         r_const = tcg_const_i32(TT_PRIV_INSN);
5156         gen_helper_raise_exception(cpu_env, r_const);
5157         tcg_temp_free_i32(r_const);
5158         dc->is_br = 1;
5159     }
5160     goto egress;
5161 #endif
5162  nfpu_insn:
5163     save_state(dc);
5164     gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
5165     dc->is_br = 1;
5166     goto egress;
5167 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5168  nfq_insn:
5169     save_state(dc);
5170     gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
5171     dc->is_br = 1;
5172     goto egress;
5173 #endif
5174 #ifndef TARGET_SPARC64
5175  ncp_insn:
5176     {
5177         TCGv r_const;
5178
5179         save_state(dc);
5180         r_const = tcg_const_i32(TT_NCP_INSN);
5181         gen_helper_raise_exception(cpu_env, r_const);
5182         tcg_temp_free(r_const);
5183         dc->is_br = 1;
5184     }
5185     goto egress;
5186 #endif
5187  egress:
5188     tcg_temp_free(cpu_tmp1);
5189     tcg_temp_free(cpu_tmp2);
5190     if (dc->n_t32 != 0) {
5191         int i;
5192         for (i = dc->n_t32 - 1; i >= 0; --i) {
5193             tcg_temp_free_i32(dc->t32[i]);
5194         }
5195         dc->n_t32 = 0;
5196     }
5197     if (dc->n_ttl != 0) {
5198         int i;
5199         for (i = dc->n_ttl - 1; i >= 0; --i) {
5200             tcg_temp_free(dc->ttl[i]);
5201         }
5202         dc->n_ttl = 0;
5203     }
5204 }
5205
5206 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
5207                                                   int spc, CPUSPARCState *env)
5208 {
5209     target_ulong pc_start, last_pc;
5210     uint16_t *gen_opc_end;
5211     DisasContext dc1, *dc = &dc1;
5212     CPUBreakpoint *bp;
5213     int j, lj = -1;
5214     int num_insns;
5215     int max_insns;
5216     unsigned int insn;
5217
5218     memset(dc, 0, sizeof(DisasContext));
5219     dc->tb = tb;
5220     pc_start = tb->pc;
5221     dc->pc = pc_start;
5222     last_pc = dc->pc;
5223     dc->npc = (target_ulong) tb->cs_base;
5224     dc->cc_op = CC_OP_DYNAMIC;
5225     dc->mem_idx = cpu_mmu_index(env);
5226     dc->def = env->def;
5227     dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5228     dc->address_mask_32bit = tb_am_enabled(tb->flags);
5229     dc->singlestep = (env->singlestep_enabled || singlestep);
5230     gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
5231
5232     num_insns = 0;
5233     max_insns = tb->cflags & CF_COUNT_MASK;
5234     if (max_insns == 0)
5235         max_insns = CF_COUNT_MASK;
5236     gen_icount_start();
5237     do {
5238         if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
5239             QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
5240                 if (bp->pc == dc->pc) {
5241                     if (dc->pc != pc_start)
5242                         save_state(dc);
5243                     gen_helper_debug(cpu_env);
5244                     tcg_gen_exit_tb(0);
5245                     dc->is_br = 1;
5246                     goto exit_gen_loop;
5247                 }
5248             }
5249         }
5250         if (spc) {
5251             qemu_log("Search PC...\n");
5252             j = gen_opc_ptr - gen_opc_buf;
5253             if (lj < j) {
5254                 lj++;
5255                 while (lj < j)
5256                     gen_opc_instr_start[lj++] = 0;
5257                 gen_opc_pc[lj] = dc->pc;
5258                 gen_opc_npc[lj] = dc->npc;
5259                 gen_opc_instr_start[lj] = 1;
5260                 gen_opc_icount[lj] = num_insns;
5261             }
5262         }
5263         if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
5264             gen_io_start();
5265         last_pc = dc->pc;
5266         insn = cpu_ldl_code(env, dc->pc);
5267
5268         cpu_tmp0 = tcg_temp_new();
5269         cpu_tmp32 = tcg_temp_new_i32();
5270         cpu_tmp64 = tcg_temp_new_i64();
5271         cpu_dst = tcg_temp_new();
5272         cpu_val = tcg_temp_new();
5273         cpu_addr = tcg_temp_new();
5274
5275         disas_sparc_insn(dc, insn);
5276         num_insns++;
5277
5278         tcg_temp_free(cpu_addr);
5279         tcg_temp_free(cpu_val);
5280         tcg_temp_free(cpu_dst);
5281         tcg_temp_free_i64(cpu_tmp64);
5282         tcg_temp_free_i32(cpu_tmp32);
5283         tcg_temp_free(cpu_tmp0);
5284
5285         if (dc->is_br)
5286             break;
5287         /* if the next PC is different, we abort now */
5288         if (dc->pc != (last_pc + 4))
5289             break;
5290         /* if we reach a page boundary, we stop generation so that the
5291            PC of a TT_TFAULT exception is always in the right page */
5292         if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5293             break;
5294         /* if single step mode, we generate only one instruction and
5295            generate an exception */
5296         if (dc->singlestep) {
5297             break;
5298         }
5299     } while ((gen_opc_ptr < gen_opc_end) &&
5300              (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5301              num_insns < max_insns);
5302
5303  exit_gen_loop:
5304     if (tb->cflags & CF_LAST_IO) {
5305         gen_io_end();
5306     }
5307     if (!dc->is_br) {
5308         if (dc->pc != DYNAMIC_PC &&
5309             (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5310             /* static PC and NPC: we can use direct chaining */
5311             gen_goto_tb(dc, 0, dc->pc, dc->npc);
5312         } else {
5313             if (dc->pc != DYNAMIC_PC) {
5314                 tcg_gen_movi_tl(cpu_pc, dc->pc);
5315             }
5316             save_npc(dc);
5317             tcg_gen_exit_tb(0);
5318         }
5319     }
5320     gen_icount_end(tb, num_insns);
5321     *gen_opc_ptr = INDEX_op_end;
5322     if (spc) {
5323         j = gen_opc_ptr - gen_opc_buf;
5324         lj++;
5325         while (lj <= j)
5326             gen_opc_instr_start[lj++] = 0;
5327 #if 0
5328         log_page_dump();
5329 #endif
5330         gen_opc_jump_pc[0] = dc->jump_pc[0];
5331         gen_opc_jump_pc[1] = dc->jump_pc[1];
5332     } else {
5333         tb->size = last_pc + 4 - pc_start;
5334         tb->icount = num_insns;
5335     }
5336 #ifdef DEBUG_DISAS
5337     if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5338         qemu_log("--------------\n");
5339         qemu_log("IN: %s\n", lookup_symbol(pc_start));
5340         log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
5341         qemu_log("\n");
5342     }
5343 #endif
5344 }
5345
5346 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5347 {
5348     gen_intermediate_code_internal(tb, 0, env);
5349 }
5350
5351 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5352 {
5353     gen_intermediate_code_internal(tb, 1, env);
5354 }
5355
5356 void gen_intermediate_code_init(CPUSPARCState *env)
5357 {
5358     unsigned int i;
5359     static int inited;
5360     static const char * const gregnames[8] = {
5361         NULL, // g0 not used
5362         "g1",
5363         "g2",
5364         "g3",
5365         "g4",
5366         "g5",
5367         "g6",
5368         "g7",
5369     };
5370     static const char * const fregnames[32] = {
5371         "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5372         "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5373         "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5374         "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5375     };
5376
5377     /* init various static tables */
5378     if (!inited) {
5379         inited = 1;
5380
5381         cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5382         cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5383                                              offsetof(CPUSPARCState, regwptr),
5384                                              "regwptr");
5385 #ifdef TARGET_SPARC64
5386         cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, xcc),
5387                                          "xcc");
5388         cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, asi),
5389                                          "asi");
5390         cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, fprs),
5391                                           "fprs");
5392         cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, gsr),
5393                                      "gsr");
5394         cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5395                                            offsetof(CPUSPARCState, tick_cmpr),
5396                                            "tick_cmpr");
5397         cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5398                                             offsetof(CPUSPARCState, stick_cmpr),
5399                                             "stick_cmpr");
5400         cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5401                                              offsetof(CPUSPARCState, hstick_cmpr),
5402                                              "hstick_cmpr");
5403         cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hintp),
5404                                        "hintp");
5405         cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, htba),
5406                                       "htba");
5407         cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hver),
5408                                       "hver");
5409         cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5410                                      offsetof(CPUSPARCState, ssr), "ssr");
5411         cpu_ver = tcg_global_mem_new(TCG_AREG0,
5412                                      offsetof(CPUSPARCState, version), "ver");
5413         cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5414                                              offsetof(CPUSPARCState, softint),
5415                                              "softint");
5416 #else
5417         cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, wim),
5418                                      "wim");
5419 #endif
5420         cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cond),
5421                                       "cond");
5422         cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_src),
5423                                         "cc_src");
5424         cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5425                                          offsetof(CPUSPARCState, cc_src2),
5426                                          "cc_src2");
5427         cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_dst),
5428                                         "cc_dst");
5429         cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, cc_op),
5430                                            "cc_op");
5431         cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, psr),
5432                                          "psr");
5433         cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, fsr),
5434                                      "fsr");
5435         cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, pc),
5436                                     "pc");
5437         cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, npc),
5438                                      "npc");
5439         cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, y), "y");
5440 #ifndef CONFIG_USER_ONLY
5441         cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, tbr),
5442                                      "tbr");
5443 #endif
5444         for (i = 1; i < 8; i++) {
5445             cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5446                                               offsetof(CPUSPARCState, gregs[i]),
5447                                               gregnames[i]);
5448         }
5449         for (i = 0; i < TARGET_DPREGS; i++) {
5450             cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
5451                                                 offsetof(CPUSPARCState, fpr[i]),
5452                                                 fregnames[i]);
5453         }
5454
5455         /* register helpers */
5456
5457 #define GEN_HELPER 2
5458 #include "helper.h"
5459     }
5460 }
5461
5462 void restore_state_to_opc(CPUSPARCState *env, TranslationBlock *tb, int pc_pos)
5463 {
5464     target_ulong npc;
5465     env->pc = gen_opc_pc[pc_pos];
5466     npc = gen_opc_npc[pc_pos];
5467     if (npc == 1) {
5468         /* dynamic NPC: already stored */
5469     } else if (npc == 2) {
5470         /* jump PC: use 'cond' and the jump targets of the translation */
5471         if (env->cond) {
5472             env->npc = gen_opc_jump_pc[0];
5473         } else {
5474             env->npc = gen_opc_jump_pc[1];
5475         }
5476     } else {
5477         env->npc = npc;
5478     }
5479 }
This page took 0.334445 seconds and 4 git commands to generate.