]> Git Repo - qemu.git/blob - target-sparc/translate.c
48c245eeb26fe6f8f0e59dbccb0d91c1fcb5a8e9
[qemu.git] / target-sparc / translate.c
1 /*
2    SPARC translation
3
4    Copyright (C) 2003 Thomas M. Ogrisegg <[email protected]>
5    Copyright (C) 2003-2005 Fabrice Bellard
6
7    This library is free software; you can redistribute it and/or
8    modify it under the terms of the GNU Lesser General Public
9    License as published by the Free Software Foundation; either
10    version 2 of the License, or (at your option) any later version.
11
12    This library is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15    Lesser General Public License for more details.
16
17    You should have received a copy of the GNU Lesser General Public
18    License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20
21 #include <stdarg.h>
22 #include <stdlib.h>
23 #include <stdio.h>
24 #include <string.h>
25 #include <inttypes.h>
26
27 #include "cpu.h"
28 #include "disas.h"
29 #include "helper.h"
30 #include "tcg-op.h"
31
32 #define GEN_HELPER 1
33 #include "helper.h"
34
35 #define DEBUG_DISAS
36
37 #define DYNAMIC_PC  1 /* dynamic pc value */
38 #define JUMP_PC     2 /* dynamic pc value which takes only two values
39                          according to jump_pc[T2] */
40
41 /* global register indexes */
42 static TCGv_ptr cpu_env, cpu_regwptr;
43 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
44 static TCGv_i32 cpu_cc_op;
45 static TCGv_i32 cpu_psr;
46 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
47 static TCGv cpu_y;
48 #ifndef CONFIG_USER_ONLY
49 static TCGv cpu_tbr;
50 #endif
51 static TCGv cpu_cond, cpu_dst, cpu_addr, cpu_val;
52 #ifdef TARGET_SPARC64
53 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
54 static TCGv cpu_gsr;
55 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
56 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
57 static TCGv_i32 cpu_softint;
58 #else
59 static TCGv cpu_wim;
60 #endif
61 /* local register indexes (only used inside old micro ops) */
62 static TCGv cpu_tmp0;
63 static TCGv_i32 cpu_tmp32;
64 static TCGv_i64 cpu_tmp64;
65 /* Floating point registers */
66 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
67
68 static target_ulong gen_opc_npc[OPC_BUF_SIZE];
69 static target_ulong gen_opc_jump_pc[2];
70
71 #include "gen-icount.h"
72
73 typedef struct DisasContext {
74     target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
75     target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
76     target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
77     int is_br;
78     int mem_idx;
79     int fpu_enabled;
80     int address_mask_32bit;
81     int singlestep;
82     uint32_t cc_op;  /* current CC operation */
83     struct TranslationBlock *tb;
84     sparc_def_t *def;
85     TCGv_i32 t32[3];
86     int n_t32;
87 } DisasContext;
88
89 typedef struct {
90     TCGCond cond;
91     bool is_bool;
92     bool g1, g2;
93     TCGv c1, c2;
94 } DisasCompare;
95
96 // This function uses non-native bit order
97 #define GET_FIELD(X, FROM, TO)                                  \
98     ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
99
100 // This function uses the order in the manuals, i.e. bit 0 is 2^0
101 #define GET_FIELD_SP(X, FROM, TO)               \
102     GET_FIELD(X, 31 - (TO), 31 - (FROM))
103
104 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
105 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
106
107 #ifdef TARGET_SPARC64
108 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
109 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
110 #else
111 #define DFPREG(r) (r & 0x1e)
112 #define QFPREG(r) (r & 0x1c)
113 #endif
114
115 #define UA2005_HTRAP_MASK 0xff
116 #define V8_TRAP_MASK 0x7f
117
118 static int sign_extend(int x, int len)
119 {
120     len = 32 - len;
121     return (x << len) >> len;
122 }
123
124 #define IS_IMM (insn & (1<<13))
125
126 static inline void gen_update_fprs_dirty(int rd)
127 {
128 #if defined(TARGET_SPARC64)
129     tcg_gen_ori_i32(cpu_fprs, cpu_fprs, (rd < 32) ? 1 : 2);
130 #endif
131 }
132
133 /* floating point registers moves */
134 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
135 {
136 #if TCG_TARGET_REG_BITS == 32
137     if (src & 1) {
138         return TCGV_LOW(cpu_fpr[src / 2]);
139     } else {
140         return TCGV_HIGH(cpu_fpr[src / 2]);
141     }
142 #else
143     if (src & 1) {
144         return MAKE_TCGV_I32(GET_TCGV_I64(cpu_fpr[src / 2]));
145     } else {
146         TCGv_i32 ret = tcg_temp_local_new_i32();
147         TCGv_i64 t = tcg_temp_new_i64();
148
149         tcg_gen_shri_i64(t, cpu_fpr[src / 2], 32);
150         tcg_gen_trunc_i64_i32(ret, t);
151         tcg_temp_free_i64(t);
152
153         dc->t32[dc->n_t32++] = ret;
154         assert(dc->n_t32 <= ARRAY_SIZE(dc->t32));
155
156         return ret;
157     }
158 #endif
159 }
160
161 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
162 {
163 #if TCG_TARGET_REG_BITS == 32
164     if (dst & 1) {
165         tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
166     } else {
167         tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
168     }
169 #else
170     TCGv_i64 t = MAKE_TCGV_I64(GET_TCGV_I32(v));
171     tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
172                         (dst & 1 ? 0 : 32), 32);
173 #endif
174     gen_update_fprs_dirty(dst);
175 }
176
177 static TCGv_i32 gen_dest_fpr_F(void)
178 {
179     return cpu_tmp32;
180 }
181
182 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
183 {
184     src = DFPREG(src);
185     return cpu_fpr[src / 2];
186 }
187
188 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
189 {
190     dst = DFPREG(dst);
191     tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
192     gen_update_fprs_dirty(dst);
193 }
194
195 static TCGv_i64 gen_dest_fpr_D(void)
196 {
197     return cpu_tmp64;
198 }
199
200 static void gen_op_load_fpr_QT0(unsigned int src)
201 {
202     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
203                    offsetof(CPU_QuadU, ll.upper));
204     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
205                    offsetof(CPU_QuadU, ll.lower));
206 }
207
208 static void gen_op_load_fpr_QT1(unsigned int src)
209 {
210     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
211                    offsetof(CPU_QuadU, ll.upper));
212     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
213                    offsetof(CPU_QuadU, ll.lower));
214 }
215
216 static void gen_op_store_QT0_fpr(unsigned int dst)
217 {
218     tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
219                    offsetof(CPU_QuadU, ll.upper));
220     tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
221                    offsetof(CPU_QuadU, ll.lower));
222 }
223
224 #ifdef TARGET_SPARC64
225 static void gen_move_Q(unsigned int rd, unsigned int rs)
226 {
227     rd = QFPREG(rd);
228     rs = QFPREG(rs);
229
230     tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
231     tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
232     gen_update_fprs_dirty(rd);
233 }
234 #endif
235
236 /* moves */
237 #ifdef CONFIG_USER_ONLY
238 #define supervisor(dc) 0
239 #ifdef TARGET_SPARC64
240 #define hypervisor(dc) 0
241 #endif
242 #else
243 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
244 #ifdef TARGET_SPARC64
245 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
246 #else
247 #endif
248 #endif
249
250 #ifdef TARGET_SPARC64
251 #ifndef TARGET_ABI32
252 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
253 #else
254 #define AM_CHECK(dc) (1)
255 #endif
256 #endif
257
258 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
259 {
260 #ifdef TARGET_SPARC64
261     if (AM_CHECK(dc))
262         tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
263 #endif
264 }
265
266 static inline void gen_movl_reg_TN(int reg, TCGv tn)
267 {
268     if (reg == 0)
269         tcg_gen_movi_tl(tn, 0);
270     else if (reg < 8)
271         tcg_gen_mov_tl(tn, cpu_gregs[reg]);
272     else {
273         tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
274     }
275 }
276
277 static inline void gen_movl_TN_reg(int reg, TCGv tn)
278 {
279     if (reg == 0)
280         return;
281     else if (reg < 8)
282         tcg_gen_mov_tl(cpu_gregs[reg], tn);
283     else {
284         tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
285     }
286 }
287
288 static inline void gen_goto_tb(DisasContext *s, int tb_num,
289                                target_ulong pc, target_ulong npc)
290 {
291     TranslationBlock *tb;
292
293     tb = s->tb;
294     if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
295         (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
296         !s->singlestep)  {
297         /* jump to same page: we can use a direct jump */
298         tcg_gen_goto_tb(tb_num);
299         tcg_gen_movi_tl(cpu_pc, pc);
300         tcg_gen_movi_tl(cpu_npc, npc);
301         tcg_gen_exit_tb((tcg_target_long)tb + tb_num);
302     } else {
303         /* jump to another page: currently not optimized */
304         tcg_gen_movi_tl(cpu_pc, pc);
305         tcg_gen_movi_tl(cpu_npc, npc);
306         tcg_gen_exit_tb(0);
307     }
308 }
309
310 // XXX suboptimal
311 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
312 {
313     tcg_gen_extu_i32_tl(reg, src);
314     tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
315     tcg_gen_andi_tl(reg, reg, 0x1);
316 }
317
318 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
319 {
320     tcg_gen_extu_i32_tl(reg, src);
321     tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
322     tcg_gen_andi_tl(reg, reg, 0x1);
323 }
324
325 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
326 {
327     tcg_gen_extu_i32_tl(reg, src);
328     tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
329     tcg_gen_andi_tl(reg, reg, 0x1);
330 }
331
332 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
333 {
334     tcg_gen_extu_i32_tl(reg, src);
335     tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
336     tcg_gen_andi_tl(reg, reg, 0x1);
337 }
338
339 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
340 {
341     tcg_gen_mov_tl(cpu_cc_src, src1);
342     tcg_gen_movi_tl(cpu_cc_src2, src2);
343     tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
344     tcg_gen_mov_tl(dst, cpu_cc_dst);
345 }
346
347 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
348 {
349     tcg_gen_mov_tl(cpu_cc_src, src1);
350     tcg_gen_mov_tl(cpu_cc_src2, src2);
351     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
352     tcg_gen_mov_tl(dst, cpu_cc_dst);
353 }
354
355 static TCGv_i32 gen_add32_carry32(void)
356 {
357     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
358
359     /* Carry is computed from a previous add: (dst < src)  */
360 #if TARGET_LONG_BITS == 64
361     cc_src1_32 = tcg_temp_new_i32();
362     cc_src2_32 = tcg_temp_new_i32();
363     tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_dst);
364     tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src);
365 #else
366     cc_src1_32 = cpu_cc_dst;
367     cc_src2_32 = cpu_cc_src;
368 #endif
369
370     carry_32 = tcg_temp_new_i32();
371     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
372
373 #if TARGET_LONG_BITS == 64
374     tcg_temp_free_i32(cc_src1_32);
375     tcg_temp_free_i32(cc_src2_32);
376 #endif
377
378     return carry_32;
379 }
380
381 static TCGv_i32 gen_sub32_carry32(void)
382 {
383     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
384
385     /* Carry is computed from a previous borrow: (src1 < src2)  */
386 #if TARGET_LONG_BITS == 64
387     cc_src1_32 = tcg_temp_new_i32();
388     cc_src2_32 = tcg_temp_new_i32();
389     tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_src);
390     tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src2);
391 #else
392     cc_src1_32 = cpu_cc_src;
393     cc_src2_32 = cpu_cc_src2;
394 #endif
395
396     carry_32 = tcg_temp_new_i32();
397     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
398
399 #if TARGET_LONG_BITS == 64
400     tcg_temp_free_i32(cc_src1_32);
401     tcg_temp_free_i32(cc_src2_32);
402 #endif
403
404     return carry_32;
405 }
406
407 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
408                             TCGv src2, int update_cc)
409 {
410     TCGv_i32 carry_32;
411     TCGv carry;
412
413     switch (dc->cc_op) {
414     case CC_OP_DIV:
415     case CC_OP_LOGIC:
416         /* Carry is known to be zero.  Fall back to plain ADD.  */
417         if (update_cc) {
418             gen_op_add_cc(dst, src1, src2);
419         } else {
420             tcg_gen_add_tl(dst, src1, src2);
421         }
422         return;
423
424     case CC_OP_ADD:
425     case CC_OP_TADD:
426     case CC_OP_TADDTV:
427 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
428         {
429             /* For 32-bit hosts, we can re-use the host's hardware carry
430                generation by using an ADD2 opcode.  We discard the low
431                part of the output.  Ideally we'd combine this operation
432                with the add that generated the carry in the first place.  */
433             TCGv dst_low = tcg_temp_new();
434             tcg_gen_op6_i32(INDEX_op_add2_i32, dst_low, dst,
435                             cpu_cc_src, src1, cpu_cc_src2, src2);
436             tcg_temp_free(dst_low);
437             goto add_done;
438         }
439 #endif
440         carry_32 = gen_add32_carry32();
441         break;
442
443     case CC_OP_SUB:
444     case CC_OP_TSUB:
445     case CC_OP_TSUBTV:
446         carry_32 = gen_sub32_carry32();
447         break;
448
449     default:
450         /* We need external help to produce the carry.  */
451         carry_32 = tcg_temp_new_i32();
452         gen_helper_compute_C_icc(carry_32, cpu_env);
453         break;
454     }
455
456 #if TARGET_LONG_BITS == 64
457     carry = tcg_temp_new();
458     tcg_gen_extu_i32_i64(carry, carry_32);
459 #else
460     carry = carry_32;
461 #endif
462
463     tcg_gen_add_tl(dst, src1, src2);
464     tcg_gen_add_tl(dst, dst, carry);
465
466     tcg_temp_free_i32(carry_32);
467 #if TARGET_LONG_BITS == 64
468     tcg_temp_free(carry);
469 #endif
470
471 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
472  add_done:
473 #endif
474     if (update_cc) {
475         tcg_gen_mov_tl(cpu_cc_src, src1);
476         tcg_gen_mov_tl(cpu_cc_src2, src2);
477         tcg_gen_mov_tl(cpu_cc_dst, dst);
478         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
479         dc->cc_op = CC_OP_ADDX;
480     }
481 }
482
483 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
484 {
485     tcg_gen_mov_tl(cpu_cc_src, src1);
486     tcg_gen_movi_tl(cpu_cc_src2, src2);
487     if (src2 == 0) {
488         tcg_gen_mov_tl(cpu_cc_dst, src1);
489         tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
490         dc->cc_op = CC_OP_LOGIC;
491     } else {
492         tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
493         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
494         dc->cc_op = CC_OP_SUB;
495     }
496     tcg_gen_mov_tl(dst, cpu_cc_dst);
497 }
498
499 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
500 {
501     tcg_gen_mov_tl(cpu_cc_src, src1);
502     tcg_gen_mov_tl(cpu_cc_src2, src2);
503     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
504     tcg_gen_mov_tl(dst, cpu_cc_dst);
505 }
506
507 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
508                             TCGv src2, int update_cc)
509 {
510     TCGv_i32 carry_32;
511     TCGv carry;
512
513     switch (dc->cc_op) {
514     case CC_OP_DIV:
515     case CC_OP_LOGIC:
516         /* Carry is known to be zero.  Fall back to plain SUB.  */
517         if (update_cc) {
518             gen_op_sub_cc(dst, src1, src2);
519         } else {
520             tcg_gen_sub_tl(dst, src1, src2);
521         }
522         return;
523
524     case CC_OP_ADD:
525     case CC_OP_TADD:
526     case CC_OP_TADDTV:
527         carry_32 = gen_add32_carry32();
528         break;
529
530     case CC_OP_SUB:
531     case CC_OP_TSUB:
532     case CC_OP_TSUBTV:
533 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
534         {
535             /* For 32-bit hosts, we can re-use the host's hardware carry
536                generation by using a SUB2 opcode.  We discard the low
537                part of the output.  Ideally we'd combine this operation
538                with the add that generated the carry in the first place.  */
539             TCGv dst_low = tcg_temp_new();
540             tcg_gen_op6_i32(INDEX_op_sub2_i32, dst_low, dst,
541                             cpu_cc_src, src1, cpu_cc_src2, src2);
542             tcg_temp_free(dst_low);
543             goto sub_done;
544         }
545 #endif
546         carry_32 = gen_sub32_carry32();
547         break;
548
549     default:
550         /* We need external help to produce the carry.  */
551         carry_32 = tcg_temp_new_i32();
552         gen_helper_compute_C_icc(carry_32, cpu_env);
553         break;
554     }
555
556 #if TARGET_LONG_BITS == 64
557     carry = tcg_temp_new();
558     tcg_gen_extu_i32_i64(carry, carry_32);
559 #else
560     carry = carry_32;
561 #endif
562
563     tcg_gen_sub_tl(dst, src1, src2);
564     tcg_gen_sub_tl(dst, dst, carry);
565
566     tcg_temp_free_i32(carry_32);
567 #if TARGET_LONG_BITS == 64
568     tcg_temp_free(carry);
569 #endif
570
571 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
572  sub_done:
573 #endif
574     if (update_cc) {
575         tcg_gen_mov_tl(cpu_cc_src, src1);
576         tcg_gen_mov_tl(cpu_cc_src2, src2);
577         tcg_gen_mov_tl(cpu_cc_dst, dst);
578         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
579         dc->cc_op = CC_OP_SUBX;
580     }
581 }
582
583 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
584 {
585     TCGv r_temp, zero;
586
587     r_temp = tcg_temp_new();
588
589     /* old op:
590     if (!(env->y & 1))
591         T1 = 0;
592     */
593     zero = tcg_const_tl(0);
594     tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
595     tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
596     tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
597     tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
598                        zero, cpu_cc_src2);
599     tcg_temp_free(zero);
600
601     // b2 = T0 & 1;
602     // env->y = (b2 << 31) | (env->y >> 1);
603     tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
604     tcg_gen_shli_tl(r_temp, r_temp, 31);
605     tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
606     tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
607     tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
608     tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
609
610     // b1 = N ^ V;
611     gen_mov_reg_N(cpu_tmp0, cpu_psr);
612     gen_mov_reg_V(r_temp, cpu_psr);
613     tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
614     tcg_temp_free(r_temp);
615
616     // T0 = (b1 << 31) | (T0 >> 1);
617     // src1 = T0;
618     tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
619     tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
620     tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
621
622     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
623
624     tcg_gen_mov_tl(dst, cpu_cc_dst);
625 }
626
627 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
628 {
629     TCGv_i32 r_src1, r_src2;
630     TCGv_i64 r_temp, r_temp2;
631
632     r_src1 = tcg_temp_new_i32();
633     r_src2 = tcg_temp_new_i32();
634
635     tcg_gen_trunc_tl_i32(r_src1, src1);
636     tcg_gen_trunc_tl_i32(r_src2, src2);
637
638     r_temp = tcg_temp_new_i64();
639     r_temp2 = tcg_temp_new_i64();
640
641     if (sign_ext) {
642         tcg_gen_ext_i32_i64(r_temp, r_src2);
643         tcg_gen_ext_i32_i64(r_temp2, r_src1);
644     } else {
645         tcg_gen_extu_i32_i64(r_temp, r_src2);
646         tcg_gen_extu_i32_i64(r_temp2, r_src1);
647     }
648
649     tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
650
651     tcg_gen_shri_i64(r_temp, r_temp2, 32);
652     tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
653     tcg_temp_free_i64(r_temp);
654     tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
655
656     tcg_gen_trunc_i64_tl(dst, r_temp2);
657
658     tcg_temp_free_i64(r_temp2);
659
660     tcg_temp_free_i32(r_src1);
661     tcg_temp_free_i32(r_src2);
662 }
663
664 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
665 {
666     /* zero-extend truncated operands before multiplication */
667     gen_op_multiply(dst, src1, src2, 0);
668 }
669
670 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
671 {
672     /* sign-extend truncated operands before multiplication */
673     gen_op_multiply(dst, src1, src2, 1);
674 }
675
676 // 1
677 static inline void gen_op_eval_ba(TCGv dst)
678 {
679     tcg_gen_movi_tl(dst, 1);
680 }
681
682 // Z
683 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
684 {
685     gen_mov_reg_Z(dst, src);
686 }
687
688 // Z | (N ^ V)
689 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
690 {
691     gen_mov_reg_N(cpu_tmp0, src);
692     gen_mov_reg_V(dst, src);
693     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
694     gen_mov_reg_Z(cpu_tmp0, src);
695     tcg_gen_or_tl(dst, dst, cpu_tmp0);
696 }
697
698 // N ^ V
699 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
700 {
701     gen_mov_reg_V(cpu_tmp0, src);
702     gen_mov_reg_N(dst, src);
703     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
704 }
705
706 // C | Z
707 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
708 {
709     gen_mov_reg_Z(cpu_tmp0, src);
710     gen_mov_reg_C(dst, src);
711     tcg_gen_or_tl(dst, dst, cpu_tmp0);
712 }
713
714 // C
715 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
716 {
717     gen_mov_reg_C(dst, src);
718 }
719
720 // V
721 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
722 {
723     gen_mov_reg_V(dst, src);
724 }
725
726 // 0
727 static inline void gen_op_eval_bn(TCGv dst)
728 {
729     tcg_gen_movi_tl(dst, 0);
730 }
731
732 // N
733 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
734 {
735     gen_mov_reg_N(dst, src);
736 }
737
738 // !Z
739 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
740 {
741     gen_mov_reg_Z(dst, src);
742     tcg_gen_xori_tl(dst, dst, 0x1);
743 }
744
745 // !(Z | (N ^ V))
746 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
747 {
748     gen_mov_reg_N(cpu_tmp0, src);
749     gen_mov_reg_V(dst, src);
750     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
751     gen_mov_reg_Z(cpu_tmp0, src);
752     tcg_gen_or_tl(dst, dst, cpu_tmp0);
753     tcg_gen_xori_tl(dst, dst, 0x1);
754 }
755
756 // !(N ^ V)
757 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
758 {
759     gen_mov_reg_V(cpu_tmp0, src);
760     gen_mov_reg_N(dst, src);
761     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
762     tcg_gen_xori_tl(dst, dst, 0x1);
763 }
764
765 // !(C | Z)
766 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
767 {
768     gen_mov_reg_Z(cpu_tmp0, src);
769     gen_mov_reg_C(dst, src);
770     tcg_gen_or_tl(dst, dst, cpu_tmp0);
771     tcg_gen_xori_tl(dst, dst, 0x1);
772 }
773
774 // !C
775 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
776 {
777     gen_mov_reg_C(dst, src);
778     tcg_gen_xori_tl(dst, dst, 0x1);
779 }
780
781 // !N
782 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
783 {
784     gen_mov_reg_N(dst, src);
785     tcg_gen_xori_tl(dst, dst, 0x1);
786 }
787
788 // !V
789 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
790 {
791     gen_mov_reg_V(dst, src);
792     tcg_gen_xori_tl(dst, dst, 0x1);
793 }
794
795 /*
796   FPSR bit field FCC1 | FCC0:
797    0 =
798    1 <
799    2 >
800    3 unordered
801 */
802 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
803                                     unsigned int fcc_offset)
804 {
805     tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
806     tcg_gen_andi_tl(reg, reg, 0x1);
807 }
808
809 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
810                                     unsigned int fcc_offset)
811 {
812     tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
813     tcg_gen_andi_tl(reg, reg, 0x1);
814 }
815
816 // !0: FCC0 | FCC1
817 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
818                                     unsigned int fcc_offset)
819 {
820     gen_mov_reg_FCC0(dst, src, fcc_offset);
821     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
822     tcg_gen_or_tl(dst, dst, cpu_tmp0);
823 }
824
825 // 1 or 2: FCC0 ^ FCC1
826 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
827                                     unsigned int fcc_offset)
828 {
829     gen_mov_reg_FCC0(dst, src, fcc_offset);
830     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
831     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
832 }
833
834 // 1 or 3: FCC0
835 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
836                                     unsigned int fcc_offset)
837 {
838     gen_mov_reg_FCC0(dst, src, fcc_offset);
839 }
840
841 // 1: FCC0 & !FCC1
842 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
843                                     unsigned int fcc_offset)
844 {
845     gen_mov_reg_FCC0(dst, src, fcc_offset);
846     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
847     tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
848     tcg_gen_and_tl(dst, dst, cpu_tmp0);
849 }
850
851 // 2 or 3: FCC1
852 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
853                                     unsigned int fcc_offset)
854 {
855     gen_mov_reg_FCC1(dst, src, fcc_offset);
856 }
857
858 // 2: !FCC0 & FCC1
859 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
860                                     unsigned int fcc_offset)
861 {
862     gen_mov_reg_FCC0(dst, src, fcc_offset);
863     tcg_gen_xori_tl(dst, dst, 0x1);
864     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
865     tcg_gen_and_tl(dst, dst, cpu_tmp0);
866 }
867
868 // 3: FCC0 & FCC1
869 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
870                                     unsigned int fcc_offset)
871 {
872     gen_mov_reg_FCC0(dst, src, fcc_offset);
873     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
874     tcg_gen_and_tl(dst, dst, cpu_tmp0);
875 }
876
877 // 0: !(FCC0 | FCC1)
878 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
879                                     unsigned int fcc_offset)
880 {
881     gen_mov_reg_FCC0(dst, src, fcc_offset);
882     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
883     tcg_gen_or_tl(dst, dst, cpu_tmp0);
884     tcg_gen_xori_tl(dst, dst, 0x1);
885 }
886
887 // 0 or 3: !(FCC0 ^ FCC1)
888 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
889                                     unsigned int fcc_offset)
890 {
891     gen_mov_reg_FCC0(dst, src, fcc_offset);
892     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
893     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
894     tcg_gen_xori_tl(dst, dst, 0x1);
895 }
896
897 // 0 or 2: !FCC0
898 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
899                                     unsigned int fcc_offset)
900 {
901     gen_mov_reg_FCC0(dst, src, fcc_offset);
902     tcg_gen_xori_tl(dst, dst, 0x1);
903 }
904
905 // !1: !(FCC0 & !FCC1)
906 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
907                                     unsigned int fcc_offset)
908 {
909     gen_mov_reg_FCC0(dst, src, fcc_offset);
910     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
911     tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
912     tcg_gen_and_tl(dst, dst, cpu_tmp0);
913     tcg_gen_xori_tl(dst, dst, 0x1);
914 }
915
916 // 0 or 1: !FCC1
917 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
918                                     unsigned int fcc_offset)
919 {
920     gen_mov_reg_FCC1(dst, src, fcc_offset);
921     tcg_gen_xori_tl(dst, dst, 0x1);
922 }
923
924 // !2: !(!FCC0 & FCC1)
925 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
926                                     unsigned int fcc_offset)
927 {
928     gen_mov_reg_FCC0(dst, src, fcc_offset);
929     tcg_gen_xori_tl(dst, dst, 0x1);
930     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
931     tcg_gen_and_tl(dst, dst, cpu_tmp0);
932     tcg_gen_xori_tl(dst, dst, 0x1);
933 }
934
935 // !3: !(FCC0 & FCC1)
936 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
937                                     unsigned int fcc_offset)
938 {
939     gen_mov_reg_FCC0(dst, src, fcc_offset);
940     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
941     tcg_gen_and_tl(dst, dst, cpu_tmp0);
942     tcg_gen_xori_tl(dst, dst, 0x1);
943 }
944
945 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
946                                target_ulong pc2, TCGv r_cond)
947 {
948     int l1;
949
950     l1 = gen_new_label();
951
952     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
953
954     gen_goto_tb(dc, 0, pc1, pc1 + 4);
955
956     gen_set_label(l1);
957     gen_goto_tb(dc, 1, pc2, pc2 + 4);
958 }
959
960 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
961                                 target_ulong pc2, TCGv r_cond)
962 {
963     int l1;
964
965     l1 = gen_new_label();
966
967     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
968
969     gen_goto_tb(dc, 0, pc2, pc1);
970
971     gen_set_label(l1);
972     gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
973 }
974
975 static inline void gen_generic_branch(DisasContext *dc)
976 {
977     TCGv npc0 = tcg_const_tl(dc->jump_pc[0]);
978     TCGv npc1 = tcg_const_tl(dc->jump_pc[1]);
979     TCGv zero = tcg_const_tl(0);
980
981     tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
982
983     tcg_temp_free(npc0);
984     tcg_temp_free(npc1);
985     tcg_temp_free(zero);
986 }
987
988 /* call this function before using the condition register as it may
989    have been set for a jump */
990 static inline void flush_cond(DisasContext *dc)
991 {
992     if (dc->npc == JUMP_PC) {
993         gen_generic_branch(dc);
994         dc->npc = DYNAMIC_PC;
995     }
996 }
997
998 static inline void save_npc(DisasContext *dc)
999 {
1000     if (dc->npc == JUMP_PC) {
1001         gen_generic_branch(dc);
1002         dc->npc = DYNAMIC_PC;
1003     } else if (dc->npc != DYNAMIC_PC) {
1004         tcg_gen_movi_tl(cpu_npc, dc->npc);
1005     }
1006 }
1007
1008 static inline void save_state(DisasContext *dc)
1009 {
1010     tcg_gen_movi_tl(cpu_pc, dc->pc);
1011     /* flush pending conditional evaluations before exposing cpu state */
1012     if (dc->cc_op != CC_OP_FLAGS) {
1013         dc->cc_op = CC_OP_FLAGS;
1014         gen_helper_compute_psr(cpu_env);
1015     }
1016     save_npc(dc);
1017 }
1018
1019 static inline void gen_mov_pc_npc(DisasContext *dc)
1020 {
1021     if (dc->npc == JUMP_PC) {
1022         gen_generic_branch(dc);
1023         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1024         dc->pc = DYNAMIC_PC;
1025     } else if (dc->npc == DYNAMIC_PC) {
1026         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1027         dc->pc = DYNAMIC_PC;
1028     } else {
1029         dc->pc = dc->npc;
1030     }
1031 }
1032
1033 static inline void gen_op_next_insn(void)
1034 {
1035     tcg_gen_mov_tl(cpu_pc, cpu_npc);
1036     tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1037 }
1038
1039 static void free_compare(DisasCompare *cmp)
1040 {
1041     if (!cmp->g1) {
1042         tcg_temp_free(cmp->c1);
1043     }
1044     if (!cmp->g2) {
1045         tcg_temp_free(cmp->c2);
1046     }
1047 }
1048
1049 static void gen_compare(DisasCompare *cmp, unsigned int cc, unsigned int cond,
1050                         DisasContext *dc)
1051 {
1052     TCGv_i32 r_src;
1053     TCGv r_dst;
1054
1055     /* For now we still generate a straight boolean result.  */
1056     cmp->cond = TCG_COND_NE;
1057     cmp->is_bool = true;
1058     cmp->g1 = cmp->g2 = false;
1059     cmp->c1 = r_dst = tcg_temp_new();
1060     cmp->c2 = tcg_const_tl(0);
1061
1062 #ifdef TARGET_SPARC64
1063     if (cc)
1064         r_src = cpu_xcc;
1065     else
1066         r_src = cpu_psr;
1067 #else
1068     r_src = cpu_psr;
1069 #endif
1070     switch (dc->cc_op) {
1071     case CC_OP_FLAGS:
1072         break;
1073     default:
1074         gen_helper_compute_psr(cpu_env);
1075         dc->cc_op = CC_OP_FLAGS;
1076         break;
1077     }
1078     switch (cond) {
1079     case 0x0:
1080         gen_op_eval_bn(r_dst);
1081         break;
1082     case 0x1:
1083         gen_op_eval_be(r_dst, r_src);
1084         break;
1085     case 0x2:
1086         gen_op_eval_ble(r_dst, r_src);
1087         break;
1088     case 0x3:
1089         gen_op_eval_bl(r_dst, r_src);
1090         break;
1091     case 0x4:
1092         gen_op_eval_bleu(r_dst, r_src);
1093         break;
1094     case 0x5:
1095         gen_op_eval_bcs(r_dst, r_src);
1096         break;
1097     case 0x6:
1098         gen_op_eval_bneg(r_dst, r_src);
1099         break;
1100     case 0x7:
1101         gen_op_eval_bvs(r_dst, r_src);
1102         break;
1103     case 0x8:
1104         gen_op_eval_ba(r_dst);
1105         break;
1106     case 0x9:
1107         gen_op_eval_bne(r_dst, r_src);
1108         break;
1109     case 0xa:
1110         gen_op_eval_bg(r_dst, r_src);
1111         break;
1112     case 0xb:
1113         gen_op_eval_bge(r_dst, r_src);
1114         break;
1115     case 0xc:
1116         gen_op_eval_bgu(r_dst, r_src);
1117         break;
1118     case 0xd:
1119         gen_op_eval_bcc(r_dst, r_src);
1120         break;
1121     case 0xe:
1122         gen_op_eval_bpos(r_dst, r_src);
1123         break;
1124     case 0xf:
1125         gen_op_eval_bvc(r_dst, r_src);
1126         break;
1127     }
1128 }
1129
1130 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1131 {
1132     unsigned int offset;
1133     TCGv r_dst;
1134
1135     /* For now we still generate a straight boolean result.  */
1136     cmp->cond = TCG_COND_NE;
1137     cmp->is_bool = true;
1138     cmp->g1 = cmp->g2 = false;
1139     cmp->c1 = r_dst = tcg_temp_new();
1140     cmp->c2 = tcg_const_tl(0);
1141
1142     switch (cc) {
1143     default:
1144     case 0x0:
1145         offset = 0;
1146         break;
1147     case 0x1:
1148         offset = 32 - 10;
1149         break;
1150     case 0x2:
1151         offset = 34 - 10;
1152         break;
1153     case 0x3:
1154         offset = 36 - 10;
1155         break;
1156     }
1157
1158     switch (cond) {
1159     case 0x0:
1160         gen_op_eval_bn(r_dst);
1161         break;
1162     case 0x1:
1163         gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1164         break;
1165     case 0x2:
1166         gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1167         break;
1168     case 0x3:
1169         gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1170         break;
1171     case 0x4:
1172         gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1173         break;
1174     case 0x5:
1175         gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1176         break;
1177     case 0x6:
1178         gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1179         break;
1180     case 0x7:
1181         gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1182         break;
1183     case 0x8:
1184         gen_op_eval_ba(r_dst);
1185         break;
1186     case 0x9:
1187         gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1188         break;
1189     case 0xa:
1190         gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1191         break;
1192     case 0xb:
1193         gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1194         break;
1195     case 0xc:
1196         gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1197         break;
1198     case 0xd:
1199         gen_op_eval_fble(r_dst, cpu_fsr, offset);
1200         break;
1201     case 0xe:
1202         gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1203         break;
1204     case 0xf:
1205         gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1206         break;
1207     }
1208 }
1209
1210 static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1211                      DisasContext *dc)
1212 {
1213     DisasCompare cmp;
1214     gen_compare(&cmp, cc, cond, dc);
1215
1216     /* The interface is to return a boolean in r_dst.  */
1217     if (cmp.is_bool) {
1218         tcg_gen_mov_tl(r_dst, cmp.c1);
1219     } else {
1220         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1221     }
1222
1223     free_compare(&cmp);
1224 }
1225
1226 static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1227 {
1228     DisasCompare cmp;
1229     gen_fcompare(&cmp, cc, cond);
1230
1231     /* The interface is to return a boolean in r_dst.  */
1232     if (cmp.is_bool) {
1233         tcg_gen_mov_tl(r_dst, cmp.c1);
1234     } else {
1235         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1236     }
1237
1238     free_compare(&cmp);
1239 }
1240
1241 #ifdef TARGET_SPARC64
1242 // Inverted logic
1243 static const int gen_tcg_cond_reg[8] = {
1244     -1,
1245     TCG_COND_NE,
1246     TCG_COND_GT,
1247     TCG_COND_GE,
1248     -1,
1249     TCG_COND_EQ,
1250     TCG_COND_LE,
1251     TCG_COND_LT,
1252 };
1253
1254 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1255 {
1256     cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1257     cmp->is_bool = false;
1258     cmp->g1 = true;
1259     cmp->g2 = false;
1260     cmp->c1 = r_src;
1261     cmp->c2 = tcg_const_tl(0);
1262 }
1263
1264 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1265 {
1266     DisasCompare cmp;
1267     gen_compare_reg(&cmp, cond, r_src);
1268
1269     /* The interface is to return a boolean in r_dst.  */
1270     tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1271
1272     free_compare(&cmp);
1273 }
1274 #endif
1275
1276 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1277 {
1278     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1279     target_ulong target = dc->pc + offset;
1280
1281 #ifdef TARGET_SPARC64
1282     if (unlikely(AM_CHECK(dc))) {
1283         target &= 0xffffffffULL;
1284     }
1285 #endif
1286     if (cond == 0x0) {
1287         /* unconditional not taken */
1288         if (a) {
1289             dc->pc = dc->npc + 4;
1290             dc->npc = dc->pc + 4;
1291         } else {
1292             dc->pc = dc->npc;
1293             dc->npc = dc->pc + 4;
1294         }
1295     } else if (cond == 0x8) {
1296         /* unconditional taken */
1297         if (a) {
1298             dc->pc = target;
1299             dc->npc = dc->pc + 4;
1300         } else {
1301             dc->pc = dc->npc;
1302             dc->npc = target;
1303             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1304         }
1305     } else {
1306         flush_cond(dc);
1307         gen_cond(cpu_cond, cc, cond, dc);
1308         if (a) {
1309             gen_branch_a(dc, target, dc->npc, cpu_cond);
1310             dc->is_br = 1;
1311         } else {
1312             dc->pc = dc->npc;
1313             dc->jump_pc[0] = target;
1314             if (unlikely(dc->npc == DYNAMIC_PC)) {
1315                 dc->jump_pc[1] = DYNAMIC_PC;
1316                 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1317             } else {
1318                 dc->jump_pc[1] = dc->npc + 4;
1319                 dc->npc = JUMP_PC;
1320             }
1321         }
1322     }
1323 }
1324
1325 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1326 {
1327     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1328     target_ulong target = dc->pc + offset;
1329
1330 #ifdef TARGET_SPARC64
1331     if (unlikely(AM_CHECK(dc))) {
1332         target &= 0xffffffffULL;
1333     }
1334 #endif
1335     if (cond == 0x0) {
1336         /* unconditional not taken */
1337         if (a) {
1338             dc->pc = dc->npc + 4;
1339             dc->npc = dc->pc + 4;
1340         } else {
1341             dc->pc = dc->npc;
1342             dc->npc = dc->pc + 4;
1343         }
1344     } else if (cond == 0x8) {
1345         /* unconditional taken */
1346         if (a) {
1347             dc->pc = target;
1348             dc->npc = dc->pc + 4;
1349         } else {
1350             dc->pc = dc->npc;
1351             dc->npc = target;
1352             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1353         }
1354     } else {
1355         flush_cond(dc);
1356         gen_fcond(cpu_cond, cc, cond);
1357         if (a) {
1358             gen_branch_a(dc, target, dc->npc, cpu_cond);
1359             dc->is_br = 1;
1360         } else {
1361             dc->pc = dc->npc;
1362             dc->jump_pc[0] = target;
1363             if (unlikely(dc->npc == DYNAMIC_PC)) {
1364                 dc->jump_pc[1] = DYNAMIC_PC;
1365                 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1366             } else {
1367                 dc->jump_pc[1] = dc->npc + 4;
1368                 dc->npc = JUMP_PC;
1369             }
1370         }
1371     }
1372 }
1373
1374 #ifdef TARGET_SPARC64
1375 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1376                           TCGv r_reg)
1377 {
1378     unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1379     target_ulong target = dc->pc + offset;
1380
1381     if (unlikely(AM_CHECK(dc))) {
1382         target &= 0xffffffffULL;
1383     }
1384     flush_cond(dc);
1385     gen_cond_reg(cpu_cond, cond, r_reg);
1386     if (a) {
1387         gen_branch_a(dc, target, dc->npc, cpu_cond);
1388         dc->is_br = 1;
1389     } else {
1390         dc->pc = dc->npc;
1391         dc->jump_pc[0] = target;
1392         if (unlikely(dc->npc == DYNAMIC_PC)) {
1393             dc->jump_pc[1] = DYNAMIC_PC;
1394             tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1395         } else {
1396             dc->jump_pc[1] = dc->npc + 4;
1397             dc->npc = JUMP_PC;
1398         }
1399     }
1400 }
1401
1402 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1403 {
1404     switch (fccno) {
1405     case 0:
1406         gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1407         break;
1408     case 1:
1409         gen_helper_fcmps_fcc1(cpu_env, r_rs1, r_rs2);
1410         break;
1411     case 2:
1412         gen_helper_fcmps_fcc2(cpu_env, r_rs1, r_rs2);
1413         break;
1414     case 3:
1415         gen_helper_fcmps_fcc3(cpu_env, r_rs1, r_rs2);
1416         break;
1417     }
1418 }
1419
1420 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1421 {
1422     switch (fccno) {
1423     case 0:
1424         gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1425         break;
1426     case 1:
1427         gen_helper_fcmpd_fcc1(cpu_env, r_rs1, r_rs2);
1428         break;
1429     case 2:
1430         gen_helper_fcmpd_fcc2(cpu_env, r_rs1, r_rs2);
1431         break;
1432     case 3:
1433         gen_helper_fcmpd_fcc3(cpu_env, r_rs1, r_rs2);
1434         break;
1435     }
1436 }
1437
1438 static inline void gen_op_fcmpq(int fccno)
1439 {
1440     switch (fccno) {
1441     case 0:
1442         gen_helper_fcmpq(cpu_env);
1443         break;
1444     case 1:
1445         gen_helper_fcmpq_fcc1(cpu_env);
1446         break;
1447     case 2:
1448         gen_helper_fcmpq_fcc2(cpu_env);
1449         break;
1450     case 3:
1451         gen_helper_fcmpq_fcc3(cpu_env);
1452         break;
1453     }
1454 }
1455
1456 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1457 {
1458     switch (fccno) {
1459     case 0:
1460         gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1461         break;
1462     case 1:
1463         gen_helper_fcmpes_fcc1(cpu_env, r_rs1, r_rs2);
1464         break;
1465     case 2:
1466         gen_helper_fcmpes_fcc2(cpu_env, r_rs1, r_rs2);
1467         break;
1468     case 3:
1469         gen_helper_fcmpes_fcc3(cpu_env, r_rs1, r_rs2);
1470         break;
1471     }
1472 }
1473
1474 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1475 {
1476     switch (fccno) {
1477     case 0:
1478         gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1479         break;
1480     case 1:
1481         gen_helper_fcmped_fcc1(cpu_env, r_rs1, r_rs2);
1482         break;
1483     case 2:
1484         gen_helper_fcmped_fcc2(cpu_env, r_rs1, r_rs2);
1485         break;
1486     case 3:
1487         gen_helper_fcmped_fcc3(cpu_env, r_rs1, r_rs2);
1488         break;
1489     }
1490 }
1491
1492 static inline void gen_op_fcmpeq(int fccno)
1493 {
1494     switch (fccno) {
1495     case 0:
1496         gen_helper_fcmpeq(cpu_env);
1497         break;
1498     case 1:
1499         gen_helper_fcmpeq_fcc1(cpu_env);
1500         break;
1501     case 2:
1502         gen_helper_fcmpeq_fcc2(cpu_env);
1503         break;
1504     case 3:
1505         gen_helper_fcmpeq_fcc3(cpu_env);
1506         break;
1507     }
1508 }
1509
1510 #else
1511
1512 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1513 {
1514     gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1515 }
1516
1517 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1518 {
1519     gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1520 }
1521
1522 static inline void gen_op_fcmpq(int fccno)
1523 {
1524     gen_helper_fcmpq(cpu_env);
1525 }
1526
1527 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1528 {
1529     gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1530 }
1531
1532 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1533 {
1534     gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1535 }
1536
1537 static inline void gen_op_fcmpeq(int fccno)
1538 {
1539     gen_helper_fcmpeq(cpu_env);
1540 }
1541 #endif
1542
1543 static inline void gen_op_fpexception_im(int fsr_flags)
1544 {
1545     TCGv_i32 r_const;
1546
1547     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1548     tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1549     r_const = tcg_const_i32(TT_FP_EXCP);
1550     gen_helper_raise_exception(cpu_env, r_const);
1551     tcg_temp_free_i32(r_const);
1552 }
1553
1554 static int gen_trap_ifnofpu(DisasContext *dc)
1555 {
1556 #if !defined(CONFIG_USER_ONLY)
1557     if (!dc->fpu_enabled) {
1558         TCGv_i32 r_const;
1559
1560         save_state(dc);
1561         r_const = tcg_const_i32(TT_NFPU_INSN);
1562         gen_helper_raise_exception(cpu_env, r_const);
1563         tcg_temp_free_i32(r_const);
1564         dc->is_br = 1;
1565         return 1;
1566     }
1567 #endif
1568     return 0;
1569 }
1570
1571 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1572 {
1573     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1574 }
1575
1576 static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1577                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1578 {
1579     TCGv_i32 dst, src;
1580
1581     src = gen_load_fpr_F(dc, rs);
1582     dst = gen_dest_fpr_F();
1583
1584     gen(dst, cpu_env, src);
1585
1586     gen_store_fpr_F(dc, rd, dst);
1587 }
1588
1589 static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1590                                  void (*gen)(TCGv_i32, TCGv_i32))
1591 {
1592     TCGv_i32 dst, src;
1593
1594     src = gen_load_fpr_F(dc, rs);
1595     dst = gen_dest_fpr_F();
1596
1597     gen(dst, src);
1598
1599     gen_store_fpr_F(dc, rd, dst);
1600 }
1601
1602 static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1603                         void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1604 {
1605     TCGv_i32 dst, src1, src2;
1606
1607     src1 = gen_load_fpr_F(dc, rs1);
1608     src2 = gen_load_fpr_F(dc, rs2);
1609     dst = gen_dest_fpr_F();
1610
1611     gen(dst, cpu_env, src1, src2);
1612
1613     gen_store_fpr_F(dc, rd, dst);
1614 }
1615
1616 #ifdef TARGET_SPARC64
1617 static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1618                                   void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1619 {
1620     TCGv_i32 dst, src1, src2;
1621
1622     src1 = gen_load_fpr_F(dc, rs1);
1623     src2 = gen_load_fpr_F(dc, rs2);
1624     dst = gen_dest_fpr_F();
1625
1626     gen(dst, src1, src2);
1627
1628     gen_store_fpr_F(dc, rd, dst);
1629 }
1630 #endif
1631
1632 static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1633                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1634 {
1635     TCGv_i64 dst, src;
1636
1637     src = gen_load_fpr_D(dc, rs);
1638     dst = gen_dest_fpr_D();
1639
1640     gen(dst, cpu_env, src);
1641
1642     gen_store_fpr_D(dc, rd, dst);
1643 }
1644
1645 #ifdef TARGET_SPARC64
1646 static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1647                                  void (*gen)(TCGv_i64, TCGv_i64))
1648 {
1649     TCGv_i64 dst, src;
1650
1651     src = gen_load_fpr_D(dc, rs);
1652     dst = gen_dest_fpr_D();
1653
1654     gen(dst, src);
1655
1656     gen_store_fpr_D(dc, rd, dst);
1657 }
1658 #endif
1659
1660 static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1661                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1662 {
1663     TCGv_i64 dst, src1, src2;
1664
1665     src1 = gen_load_fpr_D(dc, rs1);
1666     src2 = gen_load_fpr_D(dc, rs2);
1667     dst = gen_dest_fpr_D();
1668
1669     gen(dst, cpu_env, src1, src2);
1670
1671     gen_store_fpr_D(dc, rd, dst);
1672 }
1673
1674 #ifdef TARGET_SPARC64
1675 static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1676                                   void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1677 {
1678     TCGv_i64 dst, src1, src2;
1679
1680     src1 = gen_load_fpr_D(dc, rs1);
1681     src2 = gen_load_fpr_D(dc, rs2);
1682     dst = gen_dest_fpr_D();
1683
1684     gen(dst, src1, src2);
1685
1686     gen_store_fpr_D(dc, rd, dst);
1687 }
1688
1689 static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1690                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1691 {
1692     TCGv_i64 dst, src1, src2;
1693
1694     src1 = gen_load_fpr_D(dc, rs1);
1695     src2 = gen_load_fpr_D(dc, rs2);
1696     dst = gen_dest_fpr_D();
1697
1698     gen(dst, cpu_gsr, src1, src2);
1699
1700     gen_store_fpr_D(dc, rd, dst);
1701 }
1702
1703 static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1704                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1705 {
1706     TCGv_i64 dst, src0, src1, src2;
1707
1708     src1 = gen_load_fpr_D(dc, rs1);
1709     src2 = gen_load_fpr_D(dc, rs2);
1710     src0 = gen_load_fpr_D(dc, rd);
1711     dst = gen_dest_fpr_D();
1712
1713     gen(dst, src0, src1, src2);
1714
1715     gen_store_fpr_D(dc, rd, dst);
1716 }
1717 #endif
1718
1719 static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1720                               void (*gen)(TCGv_ptr))
1721 {
1722     gen_op_load_fpr_QT1(QFPREG(rs));
1723
1724     gen(cpu_env);
1725
1726     gen_op_store_QT0_fpr(QFPREG(rd));
1727     gen_update_fprs_dirty(QFPREG(rd));
1728 }
1729
1730 #ifdef TARGET_SPARC64
1731 static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1732                                  void (*gen)(TCGv_ptr))
1733 {
1734     gen_op_load_fpr_QT1(QFPREG(rs));
1735
1736     gen(cpu_env);
1737
1738     gen_op_store_QT0_fpr(QFPREG(rd));
1739     gen_update_fprs_dirty(QFPREG(rd));
1740 }
1741 #endif
1742
1743 static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1744                                void (*gen)(TCGv_ptr))
1745 {
1746     gen_op_load_fpr_QT0(QFPREG(rs1));
1747     gen_op_load_fpr_QT1(QFPREG(rs2));
1748
1749     gen(cpu_env);
1750
1751     gen_op_store_QT0_fpr(QFPREG(rd));
1752     gen_update_fprs_dirty(QFPREG(rd));
1753 }
1754
1755 static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1756                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1757 {
1758     TCGv_i64 dst;
1759     TCGv_i32 src1, src2;
1760
1761     src1 = gen_load_fpr_F(dc, rs1);
1762     src2 = gen_load_fpr_F(dc, rs2);
1763     dst = gen_dest_fpr_D();
1764
1765     gen(dst, cpu_env, src1, src2);
1766
1767     gen_store_fpr_D(dc, rd, dst);
1768 }
1769
1770 static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1771                                void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1772 {
1773     TCGv_i64 src1, src2;
1774
1775     src1 = gen_load_fpr_D(dc, rs1);
1776     src2 = gen_load_fpr_D(dc, rs2);
1777
1778     gen(cpu_env, src1, src2);
1779
1780     gen_op_store_QT0_fpr(QFPREG(rd));
1781     gen_update_fprs_dirty(QFPREG(rd));
1782 }
1783
1784 #ifdef TARGET_SPARC64
1785 static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1786                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1787 {
1788     TCGv_i64 dst;
1789     TCGv_i32 src;
1790
1791     src = gen_load_fpr_F(dc, rs);
1792     dst = gen_dest_fpr_D();
1793
1794     gen(dst, cpu_env, src);
1795
1796     gen_store_fpr_D(dc, rd, dst);
1797 }
1798 #endif
1799
1800 static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1801                                  void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1802 {
1803     TCGv_i64 dst;
1804     TCGv_i32 src;
1805
1806     src = gen_load_fpr_F(dc, rs);
1807     dst = gen_dest_fpr_D();
1808
1809     gen(dst, cpu_env, src);
1810
1811     gen_store_fpr_D(dc, rd, dst);
1812 }
1813
1814 static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1815                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1816 {
1817     TCGv_i32 dst;
1818     TCGv_i64 src;
1819
1820     src = gen_load_fpr_D(dc, rs);
1821     dst = gen_dest_fpr_F();
1822
1823     gen(dst, cpu_env, src);
1824
1825     gen_store_fpr_F(dc, rd, dst);
1826 }
1827
1828 static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1829                               void (*gen)(TCGv_i32, TCGv_ptr))
1830 {
1831     TCGv_i32 dst;
1832
1833     gen_op_load_fpr_QT1(QFPREG(rs));
1834     dst = gen_dest_fpr_F();
1835
1836     gen(dst, cpu_env);
1837
1838     gen_store_fpr_F(dc, rd, dst);
1839 }
1840
1841 static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1842                               void (*gen)(TCGv_i64, TCGv_ptr))
1843 {
1844     TCGv_i64 dst;
1845
1846     gen_op_load_fpr_QT1(QFPREG(rs));
1847     dst = gen_dest_fpr_D();
1848
1849     gen(dst, cpu_env);
1850
1851     gen_store_fpr_D(dc, rd, dst);
1852 }
1853
1854 static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1855                                  void (*gen)(TCGv_ptr, TCGv_i32))
1856 {
1857     TCGv_i32 src;
1858
1859     src = gen_load_fpr_F(dc, rs);
1860
1861     gen(cpu_env, src);
1862
1863     gen_op_store_QT0_fpr(QFPREG(rd));
1864     gen_update_fprs_dirty(QFPREG(rd));
1865 }
1866
1867 static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1868                                  void (*gen)(TCGv_ptr, TCGv_i64))
1869 {
1870     TCGv_i64 src;
1871
1872     src = gen_load_fpr_D(dc, rs);
1873
1874     gen(cpu_env, src);
1875
1876     gen_op_store_QT0_fpr(QFPREG(rd));
1877     gen_update_fprs_dirty(QFPREG(rd));
1878 }
1879
1880 /* asi moves */
1881 #ifdef TARGET_SPARC64
1882 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1883 {
1884     int asi;
1885     TCGv_i32 r_asi;
1886
1887     if (IS_IMM) {
1888         r_asi = tcg_temp_new_i32();
1889         tcg_gen_mov_i32(r_asi, cpu_asi);
1890     } else {
1891         asi = GET_FIELD(insn, 19, 26);
1892         r_asi = tcg_const_i32(asi);
1893     }
1894     return r_asi;
1895 }
1896
1897 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1898                               int sign)
1899 {
1900     TCGv_i32 r_asi, r_size, r_sign;
1901
1902     r_asi = gen_get_asi(insn, addr);
1903     r_size = tcg_const_i32(size);
1904     r_sign = tcg_const_i32(sign);
1905     gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_size, r_sign);
1906     tcg_temp_free_i32(r_sign);
1907     tcg_temp_free_i32(r_size);
1908     tcg_temp_free_i32(r_asi);
1909 }
1910
1911 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1912 {
1913     TCGv_i32 r_asi, r_size;
1914
1915     r_asi = gen_get_asi(insn, addr);
1916     r_size = tcg_const_i32(size);
1917     gen_helper_st_asi(cpu_env, addr, src, r_asi, r_size);
1918     tcg_temp_free_i32(r_size);
1919     tcg_temp_free_i32(r_asi);
1920 }
1921
1922 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1923 {
1924     TCGv_i32 r_asi, r_size, r_rd;
1925
1926     r_asi = gen_get_asi(insn, addr);
1927     r_size = tcg_const_i32(size);
1928     r_rd = tcg_const_i32(rd);
1929     gen_helper_ldf_asi(cpu_env, addr, r_asi, r_size, r_rd);
1930     tcg_temp_free_i32(r_rd);
1931     tcg_temp_free_i32(r_size);
1932     tcg_temp_free_i32(r_asi);
1933 }
1934
1935 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1936 {
1937     TCGv_i32 r_asi, r_size, r_rd;
1938
1939     r_asi = gen_get_asi(insn, addr);
1940     r_size = tcg_const_i32(size);
1941     r_rd = tcg_const_i32(rd);
1942     gen_helper_stf_asi(cpu_env, addr, r_asi, r_size, r_rd);
1943     tcg_temp_free_i32(r_rd);
1944     tcg_temp_free_i32(r_size);
1945     tcg_temp_free_i32(r_asi);
1946 }
1947
1948 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1949 {
1950     TCGv_i32 r_asi, r_size, r_sign;
1951
1952     r_asi = gen_get_asi(insn, addr);
1953     r_size = tcg_const_i32(4);
1954     r_sign = tcg_const_i32(0);
1955     gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
1956     tcg_temp_free_i32(r_sign);
1957     gen_helper_st_asi(cpu_env, addr, dst, r_asi, r_size);
1958     tcg_temp_free_i32(r_size);
1959     tcg_temp_free_i32(r_asi);
1960     tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1961 }
1962
1963 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1964 {
1965     TCGv_i32 r_asi, r_rd;
1966
1967     r_asi = gen_get_asi(insn, addr);
1968     r_rd = tcg_const_i32(rd);
1969     gen_helper_ldda_asi(cpu_env, addr, r_asi, r_rd);
1970     tcg_temp_free_i32(r_rd);
1971     tcg_temp_free_i32(r_asi);
1972 }
1973
1974 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1975 {
1976     TCGv_i32 r_asi, r_size;
1977
1978     gen_movl_reg_TN(rd + 1, cpu_tmp0);
1979     tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1980     r_asi = gen_get_asi(insn, addr);
1981     r_size = tcg_const_i32(8);
1982     gen_helper_st_asi(cpu_env, addr, cpu_tmp64, r_asi, r_size);
1983     tcg_temp_free_i32(r_size);
1984     tcg_temp_free_i32(r_asi);
1985 }
1986
1987 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1988                                int rd)
1989 {
1990     TCGv r_val1;
1991     TCGv_i32 r_asi;
1992
1993     r_val1 = tcg_temp_new();
1994     gen_movl_reg_TN(rd, r_val1);
1995     r_asi = gen_get_asi(insn, addr);
1996     gen_helper_cas_asi(dst, cpu_env, addr, r_val1, val2, r_asi);
1997     tcg_temp_free_i32(r_asi);
1998     tcg_temp_free(r_val1);
1999 }
2000
2001 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
2002                                 int rd)
2003 {
2004     TCGv_i32 r_asi;
2005
2006     gen_movl_reg_TN(rd, cpu_tmp64);
2007     r_asi = gen_get_asi(insn, addr);
2008     gen_helper_casx_asi(dst, cpu_env, addr, cpu_tmp64, val2, r_asi);
2009     tcg_temp_free_i32(r_asi);
2010 }
2011
2012 #elif !defined(CONFIG_USER_ONLY)
2013
2014 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
2015                               int sign)
2016 {
2017     TCGv_i32 r_asi, r_size, r_sign;
2018
2019     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2020     r_size = tcg_const_i32(size);
2021     r_sign = tcg_const_i32(sign);
2022     gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2023     tcg_temp_free(r_sign);
2024     tcg_temp_free(r_size);
2025     tcg_temp_free(r_asi);
2026     tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2027 }
2028
2029 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2030 {
2031     TCGv_i32 r_asi, r_size;
2032
2033     tcg_gen_extu_tl_i64(cpu_tmp64, src);
2034     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2035     r_size = tcg_const_i32(size);
2036     gen_helper_st_asi(cpu_env, addr, cpu_tmp64, r_asi, r_size);
2037     tcg_temp_free(r_size);
2038     tcg_temp_free(r_asi);
2039 }
2040
2041 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
2042 {
2043     TCGv_i32 r_asi, r_size, r_sign;
2044     TCGv_i64 r_val;
2045
2046     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2047     r_size = tcg_const_i32(4);
2048     r_sign = tcg_const_i32(0);
2049     gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2050     tcg_temp_free(r_sign);
2051     r_val = tcg_temp_new_i64();
2052     tcg_gen_extu_tl_i64(r_val, dst);
2053     gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2054     tcg_temp_free_i64(r_val);
2055     tcg_temp_free(r_size);
2056     tcg_temp_free(r_asi);
2057     tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2058 }
2059
2060 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
2061 {
2062     TCGv_i32 r_asi, r_size, r_sign;
2063
2064     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2065     r_size = tcg_const_i32(8);
2066     r_sign = tcg_const_i32(0);
2067     gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2068     tcg_temp_free(r_sign);
2069     tcg_temp_free(r_size);
2070     tcg_temp_free(r_asi);
2071     tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
2072     gen_movl_TN_reg(rd + 1, cpu_tmp0);
2073     tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
2074     tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
2075     gen_movl_TN_reg(rd, hi);
2076 }
2077
2078 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
2079 {
2080     TCGv_i32 r_asi, r_size;
2081
2082     gen_movl_reg_TN(rd + 1, cpu_tmp0);
2083     tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
2084     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2085     r_size = tcg_const_i32(8);
2086     gen_helper_st_asi(cpu_env, addr, cpu_tmp64, r_asi, r_size);
2087     tcg_temp_free(r_size);
2088     tcg_temp_free(r_asi);
2089 }
2090 #endif
2091
2092 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2093 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
2094 {
2095     TCGv_i64 r_val;
2096     TCGv_i32 r_asi, r_size;
2097
2098     gen_ld_asi(dst, addr, insn, 1, 0);
2099
2100     r_val = tcg_const_i64(0xffULL);
2101     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2102     r_size = tcg_const_i32(1);
2103     gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2104     tcg_temp_free_i32(r_size);
2105     tcg_temp_free_i32(r_asi);
2106     tcg_temp_free_i64(r_val);
2107 }
2108 #endif
2109
2110 static inline TCGv get_src1(unsigned int insn, TCGv def)
2111 {
2112     TCGv r_rs1 = def;
2113     unsigned int rs1;
2114
2115     rs1 = GET_FIELD(insn, 13, 17);
2116     if (rs1 == 0) {
2117         tcg_gen_movi_tl(def, 0);
2118     } else if (rs1 < 8) {
2119         r_rs1 = cpu_gregs[rs1];
2120     } else {
2121         tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
2122     }
2123     return r_rs1;
2124 }
2125
2126 static inline TCGv get_src2(unsigned int insn, TCGv def)
2127 {
2128     TCGv r_rs2 = def;
2129
2130     if (IS_IMM) { /* immediate */
2131         target_long simm = GET_FIELDs(insn, 19, 31);
2132         tcg_gen_movi_tl(def, simm);
2133     } else { /* register */
2134         unsigned int rs2 = GET_FIELD(insn, 27, 31);
2135         if (rs2 == 0) {
2136             tcg_gen_movi_tl(def, 0);
2137         } else if (rs2 < 8) {
2138             r_rs2 = cpu_gregs[rs2];
2139         } else {
2140             tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
2141         }
2142     }
2143     return r_rs2;
2144 }
2145
2146 #ifdef TARGET_SPARC64
2147 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2148 {
2149     TCGv_i32 c32, zero, dst, s1, s2;
2150
2151     /* We have two choices here: extend the 32 bit data and use movcond_i64,
2152        or fold the comparison down to 32 bits and use movcond_i32.  Choose
2153        the later.  */
2154     c32 = tcg_temp_new_i32();
2155     if (cmp->is_bool) {
2156         tcg_gen_trunc_i64_i32(c32, cmp->c1);
2157     } else {
2158         TCGv_i64 c64 = tcg_temp_new_i64();
2159         tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2160         tcg_gen_trunc_i64_i32(c32, c64);
2161         tcg_temp_free_i64(c64);
2162     }
2163
2164     s1 = gen_load_fpr_F(dc, rs);
2165     s2 = gen_load_fpr_F(dc, rd);
2166     dst = gen_dest_fpr_F();
2167     zero = tcg_const_i32(0);
2168
2169     tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2170
2171     tcg_temp_free_i32(c32);
2172     tcg_temp_free_i32(zero);
2173     gen_store_fpr_F(dc, rd, dst);
2174 }
2175
2176 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2177 {
2178     TCGv_i64 dst = gen_dest_fpr_D();
2179     tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2180                         gen_load_fpr_D(dc, rs),
2181                         gen_load_fpr_D(dc, rd));
2182     gen_store_fpr_D(dc, rd, dst);
2183 }
2184
2185 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2186 {
2187     int qd = QFPREG(rd);
2188     int qs = QFPREG(rs);
2189
2190     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2191                         cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2192     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2193                         cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2194
2195     gen_update_fprs_dirty(qd);
2196 }
2197
2198 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
2199 {
2200     TCGv_i32 r_tl = tcg_temp_new_i32();
2201
2202     /* load env->tl into r_tl */
2203     tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2204
2205     /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2206     tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2207
2208     /* calculate offset to current trap state from env->ts, reuse r_tl */
2209     tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2210     tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2211
2212     /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2213     {
2214         TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2215         tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2216         tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2217         tcg_temp_free_ptr(r_tl_tmp);
2218     }
2219
2220     tcg_temp_free_i32(r_tl);
2221 }
2222
2223 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2224                      int width, bool cc, bool left)
2225 {
2226     TCGv lo1, lo2, t1, t2;
2227     uint64_t amask, tabl, tabr;
2228     int shift, imask, omask;
2229
2230     if (cc) {
2231         tcg_gen_mov_tl(cpu_cc_src, s1);
2232         tcg_gen_mov_tl(cpu_cc_src2, s2);
2233         tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2234         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2235         dc->cc_op = CC_OP_SUB;
2236     }
2237
2238     /* Theory of operation: there are two tables, left and right (not to
2239        be confused with the left and right versions of the opcode).  These
2240        are indexed by the low 3 bits of the inputs.  To make things "easy",
2241        these tables are loaded into two constants, TABL and TABR below.
2242        The operation index = (input & imask) << shift calculates the index
2243        into the constant, while val = (table >> index) & omask calculates
2244        the value we're looking for.  */
2245     switch (width) {
2246     case 8:
2247         imask = 0x7;
2248         shift = 3;
2249         omask = 0xff;
2250         if (left) {
2251             tabl = 0x80c0e0f0f8fcfeffULL;
2252             tabr = 0xff7f3f1f0f070301ULL;
2253         } else {
2254             tabl = 0x0103070f1f3f7fffULL;
2255             tabr = 0xfffefcf8f0e0c080ULL;
2256         }
2257         break;
2258     case 16:
2259         imask = 0x6;
2260         shift = 1;
2261         omask = 0xf;
2262         if (left) {
2263             tabl = 0x8cef;
2264             tabr = 0xf731;
2265         } else {
2266             tabl = 0x137f;
2267             tabr = 0xfec8;
2268         }
2269         break;
2270     case 32:
2271         imask = 0x4;
2272         shift = 0;
2273         omask = 0x3;
2274         if (left) {
2275             tabl = (2 << 2) | 3;
2276             tabr = (3 << 2) | 1;
2277         } else {
2278             tabl = (1 << 2) | 3;
2279             tabr = (3 << 2) | 2;
2280         }
2281         break;
2282     default:
2283         abort();
2284     }
2285
2286     lo1 = tcg_temp_new();
2287     lo2 = tcg_temp_new();
2288     tcg_gen_andi_tl(lo1, s1, imask);
2289     tcg_gen_andi_tl(lo2, s2, imask);
2290     tcg_gen_shli_tl(lo1, lo1, shift);
2291     tcg_gen_shli_tl(lo2, lo2, shift);
2292
2293     t1 = tcg_const_tl(tabl);
2294     t2 = tcg_const_tl(tabr);
2295     tcg_gen_shr_tl(lo1, t1, lo1);
2296     tcg_gen_shr_tl(lo2, t2, lo2);
2297     tcg_gen_andi_tl(dst, lo1, omask);
2298     tcg_gen_andi_tl(lo2, lo2, omask);
2299
2300     amask = -8;
2301     if (AM_CHECK(dc)) {
2302         amask &= 0xffffffffULL;
2303     }
2304     tcg_gen_andi_tl(s1, s1, amask);
2305     tcg_gen_andi_tl(s2, s2, amask);
2306
2307     /* We want to compute
2308         dst = (s1 == s2 ? lo1 : lo1 & lo2).
2309        We've already done dst = lo1, so this reduces to
2310         dst &= (s1 == s2 ? -1 : lo2)
2311        Which we perform by
2312         lo2 |= -(s1 == s2)
2313         dst &= lo2
2314     */
2315     tcg_gen_setcond_tl(TCG_COND_EQ, t1, s1, s2);
2316     tcg_gen_neg_tl(t1, t1);
2317     tcg_gen_or_tl(lo2, lo2, t1);
2318     tcg_gen_and_tl(dst, dst, lo2);
2319
2320     tcg_temp_free(lo1);
2321     tcg_temp_free(lo2);
2322     tcg_temp_free(t1);
2323     tcg_temp_free(t2);
2324 }
2325
2326 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2327 {
2328     TCGv tmp = tcg_temp_new();
2329
2330     tcg_gen_add_tl(tmp, s1, s2);
2331     tcg_gen_andi_tl(dst, tmp, -8);
2332     if (left) {
2333         tcg_gen_neg_tl(tmp, tmp);
2334     }
2335     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2336
2337     tcg_temp_free(tmp);
2338 }
2339
2340 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2341 {
2342     TCGv t1, t2, shift;
2343
2344     t1 = tcg_temp_new();
2345     t2 = tcg_temp_new();
2346     shift = tcg_temp_new();
2347
2348     tcg_gen_andi_tl(shift, gsr, 7);
2349     tcg_gen_shli_tl(shift, shift, 3);
2350     tcg_gen_shl_tl(t1, s1, shift);
2351
2352     /* A shift of 64 does not produce 0 in TCG.  Divide this into a
2353        shift of (up to 63) followed by a constant shift of 1.  */
2354     tcg_gen_xori_tl(shift, shift, 63);
2355     tcg_gen_shr_tl(t2, s2, shift);
2356     tcg_gen_shri_tl(t2, t2, 1);
2357
2358     tcg_gen_or_tl(dst, t1, t2);
2359
2360     tcg_temp_free(t1);
2361     tcg_temp_free(t2);
2362     tcg_temp_free(shift);
2363 }
2364 #endif
2365
2366 #define CHECK_IU_FEATURE(dc, FEATURE)                      \
2367     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2368         goto illegal_insn;
2369 #define CHECK_FPU_FEATURE(dc, FEATURE)                     \
2370     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2371         goto nfpu_insn;
2372
2373 /* before an instruction, dc->pc must be static */
2374 static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
2375 {
2376     unsigned int opc, rs1, rs2, rd;
2377     TCGv cpu_src1, cpu_src2, cpu_tmp1, cpu_tmp2;
2378     TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
2379     TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
2380     target_long simm;
2381
2382     if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP | CPU_LOG_TB_OP_OPT))) {
2383         tcg_gen_debug_insn_start(dc->pc);
2384     }
2385
2386     opc = GET_FIELD(insn, 0, 1);
2387
2388     rd = GET_FIELD(insn, 2, 6);
2389
2390     cpu_tmp1 = cpu_src1 = tcg_temp_new();
2391     cpu_tmp2 = cpu_src2 = tcg_temp_new();
2392
2393     switch (opc) {
2394     case 0:                     /* branches/sethi */
2395         {
2396             unsigned int xop = GET_FIELD(insn, 7, 9);
2397             int32_t target;
2398             switch (xop) {
2399 #ifdef TARGET_SPARC64
2400             case 0x1:           /* V9 BPcc */
2401                 {
2402                     int cc;
2403
2404                     target = GET_FIELD_SP(insn, 0, 18);
2405                     target = sign_extend(target, 19);
2406                     target <<= 2;
2407                     cc = GET_FIELD_SP(insn, 20, 21);
2408                     if (cc == 0)
2409                         do_branch(dc, target, insn, 0);
2410                     else if (cc == 2)
2411                         do_branch(dc, target, insn, 1);
2412                     else
2413                         goto illegal_insn;
2414                     goto jmp_insn;
2415                 }
2416             case 0x3:           /* V9 BPr */
2417                 {
2418                     target = GET_FIELD_SP(insn, 0, 13) |
2419                         (GET_FIELD_SP(insn, 20, 21) << 14);
2420                     target = sign_extend(target, 16);
2421                     target <<= 2;
2422                     cpu_src1 = get_src1(insn, cpu_src1);
2423                     do_branch_reg(dc, target, insn, cpu_src1);
2424                     goto jmp_insn;
2425                 }
2426             case 0x5:           /* V9 FBPcc */
2427                 {
2428                     int cc = GET_FIELD_SP(insn, 20, 21);
2429                     if (gen_trap_ifnofpu(dc)) {
2430                         goto jmp_insn;
2431                     }
2432                     target = GET_FIELD_SP(insn, 0, 18);
2433                     target = sign_extend(target, 19);
2434                     target <<= 2;
2435                     do_fbranch(dc, target, insn, cc);
2436                     goto jmp_insn;
2437                 }
2438 #else
2439             case 0x7:           /* CBN+x */
2440                 {
2441                     goto ncp_insn;
2442                 }
2443 #endif
2444             case 0x2:           /* BN+x */
2445                 {
2446                     target = GET_FIELD(insn, 10, 31);
2447                     target = sign_extend(target, 22);
2448                     target <<= 2;
2449                     do_branch(dc, target, insn, 0);
2450                     goto jmp_insn;
2451                 }
2452             case 0x6:           /* FBN+x */
2453                 {
2454                     if (gen_trap_ifnofpu(dc)) {
2455                         goto jmp_insn;
2456                     }
2457                     target = GET_FIELD(insn, 10, 31);
2458                     target = sign_extend(target, 22);
2459                     target <<= 2;
2460                     do_fbranch(dc, target, insn, 0);
2461                     goto jmp_insn;
2462                 }
2463             case 0x4:           /* SETHI */
2464                 if (rd) { // nop
2465                     uint32_t value = GET_FIELD(insn, 10, 31);
2466                     TCGv r_const;
2467
2468                     r_const = tcg_const_tl(value << 10);
2469                     gen_movl_TN_reg(rd, r_const);
2470                     tcg_temp_free(r_const);
2471                 }
2472                 break;
2473             case 0x0:           /* UNIMPL */
2474             default:
2475                 goto illegal_insn;
2476             }
2477             break;
2478         }
2479         break;
2480     case 1:                     /*CALL*/
2481         {
2482             target_long target = GET_FIELDs(insn, 2, 31) << 2;
2483             TCGv r_const;
2484
2485             r_const = tcg_const_tl(dc->pc);
2486             gen_movl_TN_reg(15, r_const);
2487             tcg_temp_free(r_const);
2488             target += dc->pc;
2489             gen_mov_pc_npc(dc);
2490 #ifdef TARGET_SPARC64
2491             if (unlikely(AM_CHECK(dc))) {
2492                 target &= 0xffffffffULL;
2493             }
2494 #endif
2495             dc->npc = target;
2496         }
2497         goto jmp_insn;
2498     case 2:                     /* FPU & Logical Operations */
2499         {
2500             unsigned int xop = GET_FIELD(insn, 7, 12);
2501             if (xop == 0x3a) {  /* generate trap */
2502                 int cond = GET_FIELD(insn, 3, 6);
2503                 TCGv_i32 trap;
2504                 int l1 = -1, mask;
2505
2506                 if (cond == 0) {
2507                     /* Trap never.  */
2508                     break;
2509                 }
2510
2511                 save_state(dc);
2512
2513                 if (cond != 8) {
2514                     /* Conditional trap.  */
2515                     DisasCompare cmp;
2516 #ifdef TARGET_SPARC64
2517                     /* V9 icc/xcc */
2518                     int cc = GET_FIELD_SP(insn, 11, 12);
2519                     if (cc == 0) {
2520                         gen_compare(&cmp, 0, cond, dc);
2521                     } else if (cc == 2) {
2522                         gen_compare(&cmp, 1, cond, dc);
2523                     } else {
2524                         goto illegal_insn;
2525                     }
2526 #else
2527                     gen_compare(&cmp, 0, cond, dc);
2528 #endif
2529                     l1 = gen_new_label();
2530                     tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
2531                                       cmp.c1, cmp.c2, l1);
2532                     free_compare(&cmp);
2533                 }
2534
2535                 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
2536                         ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
2537
2538                 /* Don't use the normal temporaries, as they may well have
2539                    gone out of scope with the branch above.  While we're
2540                    doing that we might as well pre-truncate to 32-bit.  */
2541                 trap = tcg_temp_new_i32();
2542
2543                 rs1 = GET_FIELD_SP(insn, 14, 18);
2544                 if (IS_IMM) {
2545                     rs2 = GET_FIELD_SP(insn, 0, 6);
2546                     if (rs1 == 0) {
2547                         tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
2548                         /* Signal that the trap value is fully constant.  */
2549                         mask = 0;
2550                     } else {
2551                         TCGv t1 = tcg_temp_new();
2552                         gen_movl_reg_TN(rs1, t1);
2553                         tcg_gen_trunc_tl_i32(trap, t1);
2554                         tcg_temp_free(t1);
2555                         tcg_gen_addi_i32(trap, trap, rs2);
2556                     }
2557                 } else {
2558                     TCGv t1 = tcg_temp_new();
2559                     TCGv t2 = tcg_temp_new();
2560                     rs2 = GET_FIELD_SP(insn, 0, 4);
2561                     gen_movl_reg_TN(rs1, t1);
2562                     gen_movl_reg_TN(rs2, t2);
2563                     tcg_gen_add_tl(t1, t1, t2);
2564                     tcg_gen_trunc_tl_i32(trap, t1);
2565                     tcg_temp_free(t1);
2566                     tcg_temp_free(t2);
2567                 }
2568                 if (mask != 0) {
2569                     tcg_gen_andi_i32(trap, trap, mask);
2570                     tcg_gen_addi_i32(trap, trap, TT_TRAP);
2571                 }
2572
2573                 gen_helper_raise_exception(cpu_env, trap);
2574                 tcg_temp_free_i32(trap);
2575
2576                 if (cond != 8) {
2577                     gen_set_label(l1);
2578                     gen_op_next_insn();
2579                     tcg_gen_exit_tb(0);
2580                 }
2581                 dc->is_br = 1;
2582                 goto jmp_insn;
2583             } else if (xop == 0x28) {
2584                 rs1 = GET_FIELD(insn, 13, 17);
2585                 switch(rs1) {
2586                 case 0: /* rdy */
2587 #ifndef TARGET_SPARC64
2588                 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2589                                        manual, rdy on the microSPARC
2590                                        II */
2591                 case 0x0f:          /* stbar in the SPARCv8 manual,
2592                                        rdy on the microSPARC II */
2593                 case 0x10 ... 0x1f: /* implementation-dependent in the
2594                                        SPARCv8 manual, rdy on the
2595                                        microSPARC II */
2596                     /* Read Asr17 */
2597                     if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2598                         TCGv r_const;
2599
2600                         /* Read Asr17 for a Leon3 monoprocessor */
2601                         r_const = tcg_const_tl((1 << 8)
2602                                                | (dc->def->nwindows - 1));
2603                         gen_movl_TN_reg(rd, r_const);
2604                         tcg_temp_free(r_const);
2605                         break;
2606                     }
2607 #endif
2608                     gen_movl_TN_reg(rd, cpu_y);
2609                     break;
2610 #ifdef TARGET_SPARC64
2611                 case 0x2: /* V9 rdccr */
2612                     gen_helper_compute_psr(cpu_env);
2613                     gen_helper_rdccr(cpu_dst, cpu_env);
2614                     gen_movl_TN_reg(rd, cpu_dst);
2615                     break;
2616                 case 0x3: /* V9 rdasi */
2617                     tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2618                     gen_movl_TN_reg(rd, cpu_dst);
2619                     break;
2620                 case 0x4: /* V9 rdtick */
2621                     {
2622                         TCGv_ptr r_tickptr;
2623
2624                         r_tickptr = tcg_temp_new_ptr();
2625                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2626                                        offsetof(CPUSPARCState, tick));
2627                         gen_helper_tick_get_count(cpu_dst, r_tickptr);
2628                         tcg_temp_free_ptr(r_tickptr);
2629                         gen_movl_TN_reg(rd, cpu_dst);
2630                     }
2631                     break;
2632                 case 0x5: /* V9 rdpc */
2633                     {
2634                         TCGv r_const;
2635
2636                         if (unlikely(AM_CHECK(dc))) {
2637                             r_const = tcg_const_tl(dc->pc & 0xffffffffULL);
2638                         } else {
2639                            r_const = tcg_const_tl(dc->pc);
2640                         }
2641                         gen_movl_TN_reg(rd, r_const);
2642                         tcg_temp_free(r_const);
2643                     }
2644                     break;
2645                 case 0x6: /* V9 rdfprs */
2646                     tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2647                     gen_movl_TN_reg(rd, cpu_dst);
2648                     break;
2649                 case 0xf: /* V9 membar */
2650                     break; /* no effect */
2651                 case 0x13: /* Graphics Status */
2652                     if (gen_trap_ifnofpu(dc)) {
2653                         goto jmp_insn;
2654                     }
2655                     gen_movl_TN_reg(rd, cpu_gsr);
2656                     break;
2657                 case 0x16: /* Softint */
2658                     tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2659                     gen_movl_TN_reg(rd, cpu_dst);
2660                     break;
2661                 case 0x17: /* Tick compare */
2662                     gen_movl_TN_reg(rd, cpu_tick_cmpr);
2663                     break;
2664                 case 0x18: /* System tick */
2665                     {
2666                         TCGv_ptr r_tickptr;
2667
2668                         r_tickptr = tcg_temp_new_ptr();
2669                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2670                                        offsetof(CPUSPARCState, stick));
2671                         gen_helper_tick_get_count(cpu_dst, r_tickptr);
2672                         tcg_temp_free_ptr(r_tickptr);
2673                         gen_movl_TN_reg(rd, cpu_dst);
2674                     }
2675                     break;
2676                 case 0x19: /* System tick compare */
2677                     gen_movl_TN_reg(rd, cpu_stick_cmpr);
2678                     break;
2679                 case 0x10: /* Performance Control */
2680                 case 0x11: /* Performance Instrumentation Counter */
2681                 case 0x12: /* Dispatch Control */
2682                 case 0x14: /* Softint set, WO */
2683                 case 0x15: /* Softint clear, WO */
2684 #endif
2685                 default:
2686                     goto illegal_insn;
2687                 }
2688 #if !defined(CONFIG_USER_ONLY)
2689             } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2690 #ifndef TARGET_SPARC64
2691                 if (!supervisor(dc))
2692                     goto priv_insn;
2693                 gen_helper_compute_psr(cpu_env);
2694                 dc->cc_op = CC_OP_FLAGS;
2695                 gen_helper_rdpsr(cpu_dst, cpu_env);
2696 #else
2697                 CHECK_IU_FEATURE(dc, HYPV);
2698                 if (!hypervisor(dc))
2699                     goto priv_insn;
2700                 rs1 = GET_FIELD(insn, 13, 17);
2701                 switch (rs1) {
2702                 case 0: // hpstate
2703                     // gen_op_rdhpstate();
2704                     break;
2705                 case 1: // htstate
2706                     // gen_op_rdhtstate();
2707                     break;
2708                 case 3: // hintp
2709                     tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2710                     break;
2711                 case 5: // htba
2712                     tcg_gen_mov_tl(cpu_dst, cpu_htba);
2713                     break;
2714                 case 6: // hver
2715                     tcg_gen_mov_tl(cpu_dst, cpu_hver);
2716                     break;
2717                 case 31: // hstick_cmpr
2718                     tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2719                     break;
2720                 default:
2721                     goto illegal_insn;
2722                 }
2723 #endif
2724                 gen_movl_TN_reg(rd, cpu_dst);
2725                 break;
2726             } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2727                 if (!supervisor(dc))
2728                     goto priv_insn;
2729 #ifdef TARGET_SPARC64
2730                 rs1 = GET_FIELD(insn, 13, 17);
2731                 switch (rs1) {
2732                 case 0: // tpc
2733                     {
2734                         TCGv_ptr r_tsptr;
2735
2736                         r_tsptr = tcg_temp_new_ptr();
2737                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2738                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2739                                       offsetof(trap_state, tpc));
2740                         tcg_temp_free_ptr(r_tsptr);
2741                     }
2742                     break;
2743                 case 1: // tnpc
2744                     {
2745                         TCGv_ptr r_tsptr;
2746
2747                         r_tsptr = tcg_temp_new_ptr();
2748                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2749                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2750                                       offsetof(trap_state, tnpc));
2751                         tcg_temp_free_ptr(r_tsptr);
2752                     }
2753                     break;
2754                 case 2: // tstate
2755                     {
2756                         TCGv_ptr r_tsptr;
2757
2758                         r_tsptr = tcg_temp_new_ptr();
2759                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2760                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2761                                       offsetof(trap_state, tstate));
2762                         tcg_temp_free_ptr(r_tsptr);
2763                     }
2764                     break;
2765                 case 3: // tt
2766                     {
2767                         TCGv_ptr r_tsptr;
2768
2769                         r_tsptr = tcg_temp_new_ptr();
2770                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2771                         tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2772                                        offsetof(trap_state, tt));
2773                         tcg_temp_free_ptr(r_tsptr);
2774                         tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2775                     }
2776                     break;
2777                 case 4: // tick
2778                     {
2779                         TCGv_ptr r_tickptr;
2780
2781                         r_tickptr = tcg_temp_new_ptr();
2782                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2783                                        offsetof(CPUSPARCState, tick));
2784                         gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2785                         gen_movl_TN_reg(rd, cpu_tmp0);
2786                         tcg_temp_free_ptr(r_tickptr);
2787                     }
2788                     break;
2789                 case 5: // tba
2790                     tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2791                     break;
2792                 case 6: // pstate
2793                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2794                                    offsetof(CPUSPARCState, pstate));
2795                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2796                     break;
2797                 case 7: // tl
2798                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2799                                    offsetof(CPUSPARCState, tl));
2800                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2801                     break;
2802                 case 8: // pil
2803                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2804                                    offsetof(CPUSPARCState, psrpil));
2805                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2806                     break;
2807                 case 9: // cwp
2808                     gen_helper_rdcwp(cpu_tmp0, cpu_env);
2809                     break;
2810                 case 10: // cansave
2811                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2812                                    offsetof(CPUSPARCState, cansave));
2813                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2814                     break;
2815                 case 11: // canrestore
2816                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2817                                    offsetof(CPUSPARCState, canrestore));
2818                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2819                     break;
2820                 case 12: // cleanwin
2821                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2822                                    offsetof(CPUSPARCState, cleanwin));
2823                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2824                     break;
2825                 case 13: // otherwin
2826                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2827                                    offsetof(CPUSPARCState, otherwin));
2828                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2829                     break;
2830                 case 14: // wstate
2831                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2832                                    offsetof(CPUSPARCState, wstate));
2833                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2834                     break;
2835                 case 16: // UA2005 gl
2836                     CHECK_IU_FEATURE(dc, GL);
2837                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2838                                    offsetof(CPUSPARCState, gl));
2839                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2840                     break;
2841                 case 26: // UA2005 strand status
2842                     CHECK_IU_FEATURE(dc, HYPV);
2843                     if (!hypervisor(dc))
2844                         goto priv_insn;
2845                     tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2846                     break;
2847                 case 31: // ver
2848                     tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2849                     break;
2850                 case 15: // fq
2851                 default:
2852                     goto illegal_insn;
2853                 }
2854 #else
2855                 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2856 #endif
2857                 gen_movl_TN_reg(rd, cpu_tmp0);
2858                 break;
2859             } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2860 #ifdef TARGET_SPARC64
2861                 save_state(dc);
2862                 gen_helper_flushw(cpu_env);
2863 #else
2864                 if (!supervisor(dc))
2865                     goto priv_insn;
2866                 gen_movl_TN_reg(rd, cpu_tbr);
2867 #endif
2868                 break;
2869 #endif
2870             } else if (xop == 0x34) {   /* FPU Operations */
2871                 if (gen_trap_ifnofpu(dc)) {
2872                     goto jmp_insn;
2873                 }
2874                 gen_op_clear_ieee_excp_and_FTT();
2875                 rs1 = GET_FIELD(insn, 13, 17);
2876                 rs2 = GET_FIELD(insn, 27, 31);
2877                 xop = GET_FIELD(insn, 18, 26);
2878                 save_state(dc);
2879                 switch (xop) {
2880                 case 0x1: /* fmovs */
2881                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2882                     gen_store_fpr_F(dc, rd, cpu_src1_32);
2883                     break;
2884                 case 0x5: /* fnegs */
2885                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
2886                     break;
2887                 case 0x9: /* fabss */
2888                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
2889                     break;
2890                 case 0x29: /* fsqrts */
2891                     CHECK_FPU_FEATURE(dc, FSQRT);
2892                     gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
2893                     break;
2894                 case 0x2a: /* fsqrtd */
2895                     CHECK_FPU_FEATURE(dc, FSQRT);
2896                     gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
2897                     break;
2898                 case 0x2b: /* fsqrtq */
2899                     CHECK_FPU_FEATURE(dc, FLOAT128);
2900                     gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
2901                     break;
2902                 case 0x41: /* fadds */
2903                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
2904                     break;
2905                 case 0x42: /* faddd */
2906                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
2907                     break;
2908                 case 0x43: /* faddq */
2909                     CHECK_FPU_FEATURE(dc, FLOAT128);
2910                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
2911                     break;
2912                 case 0x45: /* fsubs */
2913                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
2914                     break;
2915                 case 0x46: /* fsubd */
2916                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
2917                     break;
2918                 case 0x47: /* fsubq */
2919                     CHECK_FPU_FEATURE(dc, FLOAT128);
2920                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
2921                     break;
2922                 case 0x49: /* fmuls */
2923                     CHECK_FPU_FEATURE(dc, FMUL);
2924                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
2925                     break;
2926                 case 0x4a: /* fmuld */
2927                     CHECK_FPU_FEATURE(dc, FMUL);
2928                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
2929                     break;
2930                 case 0x4b: /* fmulq */
2931                     CHECK_FPU_FEATURE(dc, FLOAT128);
2932                     CHECK_FPU_FEATURE(dc, FMUL);
2933                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
2934                     break;
2935                 case 0x4d: /* fdivs */
2936                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
2937                     break;
2938                 case 0x4e: /* fdivd */
2939                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
2940                     break;
2941                 case 0x4f: /* fdivq */
2942                     CHECK_FPU_FEATURE(dc, FLOAT128);
2943                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
2944                     break;
2945                 case 0x69: /* fsmuld */
2946                     CHECK_FPU_FEATURE(dc, FSMULD);
2947                     gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
2948                     break;
2949                 case 0x6e: /* fdmulq */
2950                     CHECK_FPU_FEATURE(dc, FLOAT128);
2951                     gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
2952                     break;
2953                 case 0xc4: /* fitos */
2954                     gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
2955                     break;
2956                 case 0xc6: /* fdtos */
2957                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
2958                     break;
2959                 case 0xc7: /* fqtos */
2960                     CHECK_FPU_FEATURE(dc, FLOAT128);
2961                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
2962                     break;
2963                 case 0xc8: /* fitod */
2964                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
2965                     break;
2966                 case 0xc9: /* fstod */
2967                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
2968                     break;
2969                 case 0xcb: /* fqtod */
2970                     CHECK_FPU_FEATURE(dc, FLOAT128);
2971                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
2972                     break;
2973                 case 0xcc: /* fitoq */
2974                     CHECK_FPU_FEATURE(dc, FLOAT128);
2975                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
2976                     break;
2977                 case 0xcd: /* fstoq */
2978                     CHECK_FPU_FEATURE(dc, FLOAT128);
2979                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
2980                     break;
2981                 case 0xce: /* fdtoq */
2982                     CHECK_FPU_FEATURE(dc, FLOAT128);
2983                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
2984                     break;
2985                 case 0xd1: /* fstoi */
2986                     gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
2987                     break;
2988                 case 0xd2: /* fdtoi */
2989                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
2990                     break;
2991                 case 0xd3: /* fqtoi */
2992                     CHECK_FPU_FEATURE(dc, FLOAT128);
2993                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
2994                     break;
2995 #ifdef TARGET_SPARC64
2996                 case 0x2: /* V9 fmovd */
2997                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
2998                     gen_store_fpr_D(dc, rd, cpu_src1_64);
2999                     break;
3000                 case 0x3: /* V9 fmovq */
3001                     CHECK_FPU_FEATURE(dc, FLOAT128);
3002                     gen_move_Q(rd, rs2);
3003                     break;
3004                 case 0x6: /* V9 fnegd */
3005                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3006                     break;
3007                 case 0x7: /* V9 fnegq */
3008                     CHECK_FPU_FEATURE(dc, FLOAT128);
3009                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3010                     break;
3011                 case 0xa: /* V9 fabsd */
3012                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3013                     break;
3014                 case 0xb: /* V9 fabsq */
3015                     CHECK_FPU_FEATURE(dc, FLOAT128);
3016                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3017                     break;
3018                 case 0x81: /* V9 fstox */
3019                     gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3020                     break;
3021                 case 0x82: /* V9 fdtox */
3022                     gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3023                     break;
3024                 case 0x83: /* V9 fqtox */
3025                     CHECK_FPU_FEATURE(dc, FLOAT128);
3026                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3027                     break;
3028                 case 0x84: /* V9 fxtos */
3029                     gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3030                     break;
3031                 case 0x88: /* V9 fxtod */
3032                     gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3033                     break;
3034                 case 0x8c: /* V9 fxtoq */
3035                     CHECK_FPU_FEATURE(dc, FLOAT128);
3036                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3037                     break;
3038 #endif
3039                 default:
3040                     goto illegal_insn;
3041                 }
3042             } else if (xop == 0x35) {   /* FPU Operations */
3043 #ifdef TARGET_SPARC64
3044                 int cond;
3045 #endif
3046                 if (gen_trap_ifnofpu(dc)) {
3047                     goto jmp_insn;
3048                 }
3049                 gen_op_clear_ieee_excp_and_FTT();
3050                 rs1 = GET_FIELD(insn, 13, 17);
3051                 rs2 = GET_FIELD(insn, 27, 31);
3052                 xop = GET_FIELD(insn, 18, 26);
3053                 save_state(dc);
3054
3055 #ifdef TARGET_SPARC64
3056 #define FMOVR(sz)                                                  \
3057                 do {                                               \
3058                     DisasCompare cmp;                              \
3059                     cond = GET_FIELD_SP(insn, 14, 17);             \
3060                     cpu_src1 = get_src1(insn, cpu_src1);           \
3061                     gen_compare_reg(&cmp, cond, cpu_src1);         \
3062                     gen_fmov##sz(dc, &cmp, rd, rs2);               \
3063                     free_compare(&cmp);                            \
3064                 } while (0)
3065
3066                 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3067                     FMOVR(s);
3068                     break;
3069                 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3070                     FMOVR(d);
3071                     break;
3072                 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3073                     CHECK_FPU_FEATURE(dc, FLOAT128);
3074                     FMOVR(q);
3075                     break;
3076                 }
3077 #undef FMOVR
3078 #endif
3079                 switch (xop) {
3080 #ifdef TARGET_SPARC64
3081 #define FMOVCC(fcc, sz)                                                 \
3082                     do {                                                \
3083                         DisasCompare cmp;                               \
3084                         cond = GET_FIELD_SP(insn, 14, 17);              \
3085                         gen_fcompare(&cmp, fcc, cond);                  \
3086                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3087                         free_compare(&cmp);                             \
3088                     } while (0)
3089
3090                     case 0x001: /* V9 fmovscc %fcc0 */
3091                         FMOVCC(0, s);
3092                         break;
3093                     case 0x002: /* V9 fmovdcc %fcc0 */
3094                         FMOVCC(0, d);
3095                         break;
3096                     case 0x003: /* V9 fmovqcc %fcc0 */
3097                         CHECK_FPU_FEATURE(dc, FLOAT128);
3098                         FMOVCC(0, q);
3099                         break;
3100                     case 0x041: /* V9 fmovscc %fcc1 */
3101                         FMOVCC(1, s);
3102                         break;
3103                     case 0x042: /* V9 fmovdcc %fcc1 */
3104                         FMOVCC(1, d);
3105                         break;
3106                     case 0x043: /* V9 fmovqcc %fcc1 */
3107                         CHECK_FPU_FEATURE(dc, FLOAT128);
3108                         FMOVCC(1, q);
3109                         break;
3110                     case 0x081: /* V9 fmovscc %fcc2 */
3111                         FMOVCC(2, s);
3112                         break;
3113                     case 0x082: /* V9 fmovdcc %fcc2 */
3114                         FMOVCC(2, d);
3115                         break;
3116                     case 0x083: /* V9 fmovqcc %fcc2 */
3117                         CHECK_FPU_FEATURE(dc, FLOAT128);
3118                         FMOVCC(2, q);
3119                         break;
3120                     case 0x0c1: /* V9 fmovscc %fcc3 */
3121                         FMOVCC(3, s);
3122                         break;
3123                     case 0x0c2: /* V9 fmovdcc %fcc3 */
3124                         FMOVCC(3, d);
3125                         break;
3126                     case 0x0c3: /* V9 fmovqcc %fcc3 */
3127                         CHECK_FPU_FEATURE(dc, FLOAT128);
3128                         FMOVCC(3, q);
3129                         break;
3130 #undef FMOVCC
3131 #define FMOVCC(xcc, sz)                                                 \
3132                     do {                                                \
3133                         DisasCompare cmp;                               \
3134                         cond = GET_FIELD_SP(insn, 14, 17);              \
3135                         gen_compare(&cmp, xcc, cond, dc);               \
3136                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3137                         free_compare(&cmp);                             \
3138                     } while (0)
3139
3140                     case 0x101: /* V9 fmovscc %icc */
3141                         FMOVCC(0, s);
3142                         break;
3143                     case 0x102: /* V9 fmovdcc %icc */
3144                         FMOVCC(0, d);
3145                         break;
3146                     case 0x103: /* V9 fmovqcc %icc */
3147                         CHECK_FPU_FEATURE(dc, FLOAT128);
3148                         FMOVCC(0, q);
3149                         break;
3150                     case 0x181: /* V9 fmovscc %xcc */
3151                         FMOVCC(1, s);
3152                         break;
3153                     case 0x182: /* V9 fmovdcc %xcc */
3154                         FMOVCC(1, d);
3155                         break;
3156                     case 0x183: /* V9 fmovqcc %xcc */
3157                         CHECK_FPU_FEATURE(dc, FLOAT128);
3158                         FMOVCC(1, q);
3159                         break;
3160 #undef FMOVCC
3161 #endif
3162                     case 0x51: /* fcmps, V9 %fcc */
3163                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3164                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3165                         gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3166                         break;
3167                     case 0x52: /* fcmpd, V9 %fcc */
3168                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3169                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3170                         gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3171                         break;
3172                     case 0x53: /* fcmpq, V9 %fcc */
3173                         CHECK_FPU_FEATURE(dc, FLOAT128);
3174                         gen_op_load_fpr_QT0(QFPREG(rs1));
3175                         gen_op_load_fpr_QT1(QFPREG(rs2));
3176                         gen_op_fcmpq(rd & 3);
3177                         break;
3178                     case 0x55: /* fcmpes, V9 %fcc */
3179                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3180                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3181                         gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3182                         break;
3183                     case 0x56: /* fcmped, V9 %fcc */
3184                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3185                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3186                         gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3187                         break;
3188                     case 0x57: /* fcmpeq, V9 %fcc */
3189                         CHECK_FPU_FEATURE(dc, FLOAT128);
3190                         gen_op_load_fpr_QT0(QFPREG(rs1));
3191                         gen_op_load_fpr_QT1(QFPREG(rs2));
3192                         gen_op_fcmpeq(rd & 3);
3193                         break;
3194                     default:
3195                         goto illegal_insn;
3196                 }
3197             } else if (xop == 0x2) {
3198                 // clr/mov shortcut
3199
3200                 rs1 = GET_FIELD(insn, 13, 17);
3201                 if (rs1 == 0) {
3202                     // or %g0, x, y -> mov T0, x; mov y, T0
3203                     if (IS_IMM) {       /* immediate */
3204                         TCGv r_const;
3205
3206                         simm = GET_FIELDs(insn, 19, 31);
3207                         r_const = tcg_const_tl(simm);
3208                         gen_movl_TN_reg(rd, r_const);
3209                         tcg_temp_free(r_const);
3210                     } else {            /* register */
3211                         rs2 = GET_FIELD(insn, 27, 31);
3212                         gen_movl_reg_TN(rs2, cpu_dst);
3213                         gen_movl_TN_reg(rd, cpu_dst);
3214                     }
3215                 } else {
3216                     cpu_src1 = get_src1(insn, cpu_src1);
3217                     if (IS_IMM) {       /* immediate */
3218                         simm = GET_FIELDs(insn, 19, 31);
3219                         tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3220                         gen_movl_TN_reg(rd, cpu_dst);
3221                     } else {            /* register */
3222                         // or x, %g0, y -> mov T1, x; mov y, T1
3223                         rs2 = GET_FIELD(insn, 27, 31);
3224                         if (rs2 != 0) {
3225                             gen_movl_reg_TN(rs2, cpu_src2);
3226                             tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3227                             gen_movl_TN_reg(rd, cpu_dst);
3228                         } else
3229                             gen_movl_TN_reg(rd, cpu_src1);
3230                     }
3231                 }
3232 #ifdef TARGET_SPARC64
3233             } else if (xop == 0x25) { /* sll, V9 sllx */
3234                 cpu_src1 = get_src1(insn, cpu_src1);
3235                 if (IS_IMM) {   /* immediate */
3236                     simm = GET_FIELDs(insn, 20, 31);
3237                     if (insn & (1 << 12)) {
3238                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3239                     } else {
3240                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3241                     }
3242                 } else {                /* register */
3243                     rs2 = GET_FIELD(insn, 27, 31);
3244                     gen_movl_reg_TN(rs2, cpu_src2);
3245                     if (insn & (1 << 12)) {
3246                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3247                     } else {
3248                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3249                     }
3250                     tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3251                 }
3252                 gen_movl_TN_reg(rd, cpu_dst);
3253             } else if (xop == 0x26) { /* srl, V9 srlx */
3254                 cpu_src1 = get_src1(insn, cpu_src1);
3255                 if (IS_IMM) {   /* immediate */
3256                     simm = GET_FIELDs(insn, 20, 31);
3257                     if (insn & (1 << 12)) {
3258                         tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3259                     } else {
3260                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3261                         tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3262                     }
3263                 } else {                /* register */
3264                     rs2 = GET_FIELD(insn, 27, 31);
3265                     gen_movl_reg_TN(rs2, cpu_src2);
3266                     if (insn & (1 << 12)) {
3267                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3268                         tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3269                     } else {
3270                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3271                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3272                         tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3273                     }
3274                 }
3275                 gen_movl_TN_reg(rd, cpu_dst);
3276             } else if (xop == 0x27) { /* sra, V9 srax */
3277                 cpu_src1 = get_src1(insn, cpu_src1);
3278                 if (IS_IMM) {   /* immediate */
3279                     simm = GET_FIELDs(insn, 20, 31);
3280                     if (insn & (1 << 12)) {
3281                         tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3282                     } else {
3283                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3284                         tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3285                         tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3286                     }
3287                 } else {                /* register */
3288                     rs2 = GET_FIELD(insn, 27, 31);
3289                     gen_movl_reg_TN(rs2, cpu_src2);
3290                     if (insn & (1 << 12)) {
3291                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3292                         tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3293                     } else {
3294                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3295                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3296                         tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3297                         tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3298                     }
3299                 }
3300                 gen_movl_TN_reg(rd, cpu_dst);
3301 #endif
3302             } else if (xop < 0x36) {
3303                 if (xop < 0x20) {
3304                     cpu_src1 = get_src1(insn, cpu_src1);
3305                     cpu_src2 = get_src2(insn, cpu_src2);
3306                     switch (xop & ~0x10) {
3307                     case 0x0: /* add */
3308                         if (IS_IMM) {
3309                             simm = GET_FIELDs(insn, 19, 31);
3310                             if (xop & 0x10) {
3311                                 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3312                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3313                                 dc->cc_op = CC_OP_ADD;
3314                             } else {
3315                                 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3316                             }
3317                         } else {
3318                             if (xop & 0x10) {
3319                                 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3320                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3321                                 dc->cc_op = CC_OP_ADD;
3322                             } else {
3323                                 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3324                             }
3325                         }
3326                         break;
3327                     case 0x1: /* and */
3328                         if (IS_IMM) {
3329                             simm = GET_FIELDs(insn, 19, 31);
3330                             tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3331                         } else {
3332                             tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3333                         }
3334                         if (xop & 0x10) {
3335                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3336                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3337                             dc->cc_op = CC_OP_LOGIC;
3338                         }
3339                         break;
3340                     case 0x2: /* or */
3341                         if (IS_IMM) {
3342                             simm = GET_FIELDs(insn, 19, 31);
3343                             tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3344                         } else {
3345                             tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3346                         }
3347                         if (xop & 0x10) {
3348                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3349                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3350                             dc->cc_op = CC_OP_LOGIC;
3351                         }
3352                         break;
3353                     case 0x3: /* xor */
3354                         if (IS_IMM) {
3355                             simm = GET_FIELDs(insn, 19, 31);
3356                             tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3357                         } else {
3358                             tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3359                         }
3360                         if (xop & 0x10) {
3361                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3362                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3363                             dc->cc_op = CC_OP_LOGIC;
3364                         }
3365                         break;
3366                     case 0x4: /* sub */
3367                         if (IS_IMM) {
3368                             simm = GET_FIELDs(insn, 19, 31);
3369                             if (xop & 0x10) {
3370                                 gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
3371                             } else {
3372                                 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3373                             }
3374                         } else {
3375                             if (xop & 0x10) {
3376                                 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3377                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3378                                 dc->cc_op = CC_OP_SUB;
3379                             } else {
3380                                 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3381                             }
3382                         }
3383                         break;
3384                     case 0x5: /* andn */
3385                         if (IS_IMM) {
3386                             simm = GET_FIELDs(insn, 19, 31);
3387                             tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3388                         } else {
3389                             tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3390                         }
3391                         if (xop & 0x10) {
3392                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3393                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3394                             dc->cc_op = CC_OP_LOGIC;
3395                         }
3396                         break;
3397                     case 0x6: /* orn */
3398                         if (IS_IMM) {
3399                             simm = GET_FIELDs(insn, 19, 31);
3400                             tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3401                         } else {
3402                             tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3403                         }
3404                         if (xop & 0x10) {
3405                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3406                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3407                             dc->cc_op = CC_OP_LOGIC;
3408                         }
3409                         break;
3410                     case 0x7: /* xorn */
3411                         if (IS_IMM) {
3412                             simm = GET_FIELDs(insn, 19, 31);
3413                             tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3414                         } else {
3415                             tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3416                             tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3417                         }
3418                         if (xop & 0x10) {
3419                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3420                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3421                             dc->cc_op = CC_OP_LOGIC;
3422                         }
3423                         break;
3424                     case 0x8: /* addx, V9 addc */
3425                         gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3426                                         (xop & 0x10));
3427                         break;
3428 #ifdef TARGET_SPARC64
3429                     case 0x9: /* V9 mulx */
3430                         if (IS_IMM) {
3431                             simm = GET_FIELDs(insn, 19, 31);
3432                             tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3433                         } else {
3434                             tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3435                         }
3436                         break;
3437 #endif
3438                     case 0xa: /* umul */
3439                         CHECK_IU_FEATURE(dc, MUL);
3440                         gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3441                         if (xop & 0x10) {
3442                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3443                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3444                             dc->cc_op = CC_OP_LOGIC;
3445                         }
3446                         break;
3447                     case 0xb: /* smul */
3448                         CHECK_IU_FEATURE(dc, MUL);
3449                         gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3450                         if (xop & 0x10) {
3451                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3452                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3453                             dc->cc_op = CC_OP_LOGIC;
3454                         }
3455                         break;
3456                     case 0xc: /* subx, V9 subc */
3457                         gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3458                                         (xop & 0x10));
3459                         break;
3460 #ifdef TARGET_SPARC64
3461                     case 0xd: /* V9 udivx */
3462                         gen_helper_udivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3463                         break;
3464 #endif
3465                     case 0xe: /* udiv */
3466                         CHECK_IU_FEATURE(dc, DIV);
3467                         if (xop & 0x10) {
3468                             gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
3469                                                cpu_src2);
3470                             dc->cc_op = CC_OP_DIV;
3471                         } else {
3472                             gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
3473                                             cpu_src2);
3474                         }
3475                         break;
3476                     case 0xf: /* sdiv */
3477                         CHECK_IU_FEATURE(dc, DIV);
3478                         if (xop & 0x10) {
3479                             gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
3480                                                cpu_src2);
3481                             dc->cc_op = CC_OP_DIV;
3482                         } else {
3483                             gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
3484                                             cpu_src2);
3485                         }
3486                         break;
3487                     default:
3488                         goto illegal_insn;
3489                     }
3490                     gen_movl_TN_reg(rd, cpu_dst);
3491                 } else {
3492                     cpu_src1 = get_src1(insn, cpu_src1);
3493                     cpu_src2 = get_src2(insn, cpu_src2);
3494                     switch (xop) {
3495                     case 0x20: /* taddcc */
3496                         gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3497                         gen_movl_TN_reg(rd, cpu_dst);
3498                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3499                         dc->cc_op = CC_OP_TADD;
3500                         break;
3501                     case 0x21: /* tsubcc */
3502                         gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3503                         gen_movl_TN_reg(rd, cpu_dst);
3504                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3505                         dc->cc_op = CC_OP_TSUB;
3506                         break;
3507                     case 0x22: /* taddcctv */
3508                         gen_helper_taddcctv(cpu_dst, cpu_env,
3509                                             cpu_src1, cpu_src2);
3510                         gen_movl_TN_reg(rd, cpu_dst);
3511                         dc->cc_op = CC_OP_TADDTV;
3512                         break;
3513                     case 0x23: /* tsubcctv */
3514                         gen_helper_tsubcctv(cpu_dst, cpu_env,
3515                                             cpu_src1, cpu_src2);
3516                         gen_movl_TN_reg(rd, cpu_dst);
3517                         dc->cc_op = CC_OP_TSUBTV;
3518                         break;
3519                     case 0x24: /* mulscc */
3520                         gen_helper_compute_psr(cpu_env);
3521                         gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3522                         gen_movl_TN_reg(rd, cpu_dst);
3523                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3524                         dc->cc_op = CC_OP_ADD;
3525                         break;
3526 #ifndef TARGET_SPARC64
3527                     case 0x25:  /* sll */
3528                         if (IS_IMM) { /* immediate */
3529                             simm = GET_FIELDs(insn, 20, 31);
3530                             tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3531                         } else { /* register */
3532                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3533                             tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3534                         }
3535                         gen_movl_TN_reg(rd, cpu_dst);
3536                         break;
3537                     case 0x26:  /* srl */
3538                         if (IS_IMM) { /* immediate */
3539                             simm = GET_FIELDs(insn, 20, 31);
3540                             tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3541                         } else { /* register */
3542                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3543                             tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3544                         }
3545                         gen_movl_TN_reg(rd, cpu_dst);
3546                         break;
3547                     case 0x27:  /* sra */
3548                         if (IS_IMM) { /* immediate */
3549                             simm = GET_FIELDs(insn, 20, 31);
3550                             tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3551                         } else { /* register */
3552                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3553                             tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3554                         }
3555                         gen_movl_TN_reg(rd, cpu_dst);
3556                         break;
3557 #endif
3558                     case 0x30:
3559                         {
3560                             switch(rd) {
3561                             case 0: /* wry */
3562                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3563                                 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3564                                 break;
3565 #ifndef TARGET_SPARC64
3566                             case 0x01 ... 0x0f: /* undefined in the
3567                                                    SPARCv8 manual, nop
3568                                                    on the microSPARC
3569                                                    II */
3570                             case 0x10 ... 0x1f: /* implementation-dependent
3571                                                    in the SPARCv8
3572                                                    manual, nop on the
3573                                                    microSPARC II */
3574                                 break;
3575 #else
3576                             case 0x2: /* V9 wrccr */
3577                                 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3578                                 gen_helper_wrccr(cpu_env, cpu_dst);
3579                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3580                                 dc->cc_op = CC_OP_FLAGS;
3581                                 break;
3582                             case 0x3: /* V9 wrasi */
3583                                 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3584                                 tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3585                                 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3586                                 break;
3587                             case 0x6: /* V9 wrfprs */
3588                                 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3589                                 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3590                                 save_state(dc);
3591                                 gen_op_next_insn();
3592                                 tcg_gen_exit_tb(0);
3593                                 dc->is_br = 1;
3594                                 break;
3595                             case 0xf: /* V9 sir, nop if user */
3596 #if !defined(CONFIG_USER_ONLY)
3597                                 if (supervisor(dc)) {
3598                                     ; // XXX
3599                                 }
3600 #endif
3601                                 break;
3602                             case 0x13: /* Graphics Status */
3603                                 if (gen_trap_ifnofpu(dc)) {
3604                                     goto jmp_insn;
3605                                 }
3606                                 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3607                                 break;
3608                             case 0x14: /* Softint set */
3609                                 if (!supervisor(dc))
3610                                     goto illegal_insn;
3611                                 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3612                                 gen_helper_set_softint(cpu_env, cpu_tmp64);
3613                                 break;
3614                             case 0x15: /* Softint clear */
3615                                 if (!supervisor(dc))
3616                                     goto illegal_insn;
3617                                 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3618                                 gen_helper_clear_softint(cpu_env, cpu_tmp64);
3619                                 break;
3620                             case 0x16: /* Softint write */
3621                                 if (!supervisor(dc))
3622                                     goto illegal_insn;
3623                                 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3624                                 gen_helper_write_softint(cpu_env, cpu_tmp64);
3625                                 break;
3626                             case 0x17: /* Tick compare */
3627 #if !defined(CONFIG_USER_ONLY)
3628                                 if (!supervisor(dc))
3629                                     goto illegal_insn;
3630 #endif
3631                                 {
3632                                     TCGv_ptr r_tickptr;
3633
3634                                     tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3635                                                    cpu_src2);
3636                                     r_tickptr = tcg_temp_new_ptr();
3637                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3638                                                    offsetof(CPUSPARCState, tick));
3639                                     gen_helper_tick_set_limit(r_tickptr,
3640                                                               cpu_tick_cmpr);
3641                                     tcg_temp_free_ptr(r_tickptr);
3642                                 }
3643                                 break;
3644                             case 0x18: /* System tick */
3645 #if !defined(CONFIG_USER_ONLY)
3646                                 if (!supervisor(dc))
3647                                     goto illegal_insn;
3648 #endif
3649                                 {
3650                                     TCGv_ptr r_tickptr;
3651
3652                                     tcg_gen_xor_tl(cpu_dst, cpu_src1,
3653                                                    cpu_src2);
3654                                     r_tickptr = tcg_temp_new_ptr();
3655                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3656                                                    offsetof(CPUSPARCState, stick));
3657                                     gen_helper_tick_set_count(r_tickptr,
3658                                                               cpu_dst);
3659                                     tcg_temp_free_ptr(r_tickptr);
3660                                 }
3661                                 break;
3662                             case 0x19: /* System tick compare */
3663 #if !defined(CONFIG_USER_ONLY)
3664                                 if (!supervisor(dc))
3665                                     goto illegal_insn;
3666 #endif
3667                                 {
3668                                     TCGv_ptr r_tickptr;
3669
3670                                     tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3671                                                    cpu_src2);
3672                                     r_tickptr = tcg_temp_new_ptr();
3673                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3674                                                    offsetof(CPUSPARCState, stick));
3675                                     gen_helper_tick_set_limit(r_tickptr,
3676                                                               cpu_stick_cmpr);
3677                                     tcg_temp_free_ptr(r_tickptr);
3678                                 }
3679                                 break;
3680
3681                             case 0x10: /* Performance Control */
3682                             case 0x11: /* Performance Instrumentation
3683                                           Counter */
3684                             case 0x12: /* Dispatch Control */
3685 #endif
3686                             default:
3687                                 goto illegal_insn;
3688                             }
3689                         }
3690                         break;
3691 #if !defined(CONFIG_USER_ONLY)
3692                     case 0x31: /* wrpsr, V9 saved, restored */
3693                         {
3694                             if (!supervisor(dc))
3695                                 goto priv_insn;
3696 #ifdef TARGET_SPARC64
3697                             switch (rd) {
3698                             case 0:
3699                                 gen_helper_saved(cpu_env);
3700                                 break;
3701                             case 1:
3702                                 gen_helper_restored(cpu_env);
3703                                 break;
3704                             case 2: /* UA2005 allclean */
3705                             case 3: /* UA2005 otherw */
3706                             case 4: /* UA2005 normalw */
3707                             case 5: /* UA2005 invalw */
3708                                 // XXX
3709                             default:
3710                                 goto illegal_insn;
3711                             }
3712 #else
3713                             tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3714                             gen_helper_wrpsr(cpu_env, cpu_dst);
3715                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3716                             dc->cc_op = CC_OP_FLAGS;
3717                             save_state(dc);
3718                             gen_op_next_insn();
3719                             tcg_gen_exit_tb(0);
3720                             dc->is_br = 1;
3721 #endif
3722                         }
3723                         break;
3724                     case 0x32: /* wrwim, V9 wrpr */
3725                         {
3726                             if (!supervisor(dc))
3727                                 goto priv_insn;
3728                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3729 #ifdef TARGET_SPARC64
3730                             switch (rd) {
3731                             case 0: // tpc
3732                                 {
3733                                     TCGv_ptr r_tsptr;
3734
3735                                     r_tsptr = tcg_temp_new_ptr();
3736                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3737                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3738                                                   offsetof(trap_state, tpc));
3739                                     tcg_temp_free_ptr(r_tsptr);
3740                                 }
3741                                 break;
3742                             case 1: // tnpc
3743                                 {
3744                                     TCGv_ptr r_tsptr;
3745
3746                                     r_tsptr = tcg_temp_new_ptr();
3747                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3748                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3749                                                   offsetof(trap_state, tnpc));
3750                                     tcg_temp_free_ptr(r_tsptr);
3751                                 }
3752                                 break;
3753                             case 2: // tstate
3754                                 {
3755                                     TCGv_ptr r_tsptr;
3756
3757                                     r_tsptr = tcg_temp_new_ptr();
3758                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3759                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3760                                                   offsetof(trap_state,
3761                                                            tstate));
3762                                     tcg_temp_free_ptr(r_tsptr);
3763                                 }
3764                                 break;
3765                             case 3: // tt
3766                                 {
3767                                     TCGv_ptr r_tsptr;
3768
3769                                     r_tsptr = tcg_temp_new_ptr();
3770                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3771                                     tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3772                                     tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3773                                                    offsetof(trap_state, tt));
3774                                     tcg_temp_free_ptr(r_tsptr);
3775                                 }
3776                                 break;
3777                             case 4: // tick
3778                                 {
3779                                     TCGv_ptr r_tickptr;
3780
3781                                     r_tickptr = tcg_temp_new_ptr();
3782                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3783                                                    offsetof(CPUSPARCState, tick));
3784                                     gen_helper_tick_set_count(r_tickptr,
3785                                                               cpu_tmp0);
3786                                     tcg_temp_free_ptr(r_tickptr);
3787                                 }
3788                                 break;
3789                             case 5: // tba
3790                                 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3791                                 break;
3792                             case 6: // pstate
3793                                 {
3794                                     TCGv r_tmp = tcg_temp_local_new();
3795
3796                                     tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3797                                     save_state(dc);
3798                                     gen_helper_wrpstate(cpu_env, r_tmp);
3799                                     tcg_temp_free(r_tmp);
3800                                     dc->npc = DYNAMIC_PC;
3801                                 }
3802                                 break;
3803                             case 7: // tl
3804                                 {
3805                                     TCGv r_tmp = tcg_temp_local_new();
3806
3807                                     tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3808                                     save_state(dc);
3809                                     tcg_gen_trunc_tl_i32(cpu_tmp32, r_tmp);
3810                                     tcg_temp_free(r_tmp);
3811                                     tcg_gen_st_i32(cpu_tmp32, cpu_env,
3812                                                    offsetof(CPUSPARCState, tl));
3813                                     dc->npc = DYNAMIC_PC;
3814                                 }
3815                                 break;
3816                             case 8: // pil
3817                                 gen_helper_wrpil(cpu_env, cpu_tmp0);
3818                                 break;
3819                             case 9: // cwp
3820                                 gen_helper_wrcwp(cpu_env, cpu_tmp0);
3821                                 break;
3822                             case 10: // cansave
3823                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3824                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3825                                                offsetof(CPUSPARCState,
3826                                                         cansave));
3827                                 break;
3828                             case 11: // canrestore
3829                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3830                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3831                                                offsetof(CPUSPARCState,
3832                                                         canrestore));
3833                                 break;
3834                             case 12: // cleanwin
3835                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3836                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3837                                                offsetof(CPUSPARCState,
3838                                                         cleanwin));
3839                                 break;
3840                             case 13: // otherwin
3841                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3842                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3843                                                offsetof(CPUSPARCState,
3844                                                         otherwin));
3845                                 break;
3846                             case 14: // wstate
3847                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3848                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3849                                                offsetof(CPUSPARCState,
3850                                                         wstate));
3851                                 break;
3852                             case 16: // UA2005 gl
3853                                 CHECK_IU_FEATURE(dc, GL);
3854                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3855                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3856                                                offsetof(CPUSPARCState, gl));
3857                                 break;
3858                             case 26: // UA2005 strand status
3859                                 CHECK_IU_FEATURE(dc, HYPV);
3860                                 if (!hypervisor(dc))
3861                                     goto priv_insn;
3862                                 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3863                                 break;
3864                             default:
3865                                 goto illegal_insn;
3866                             }
3867 #else
3868                             tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3869                             if (dc->def->nwindows != 32)
3870                                 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3871                                                 (1 << dc->def->nwindows) - 1);
3872                             tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3873 #endif
3874                         }
3875                         break;
3876                     case 0x33: /* wrtbr, UA2005 wrhpr */
3877                         {
3878 #ifndef TARGET_SPARC64
3879                             if (!supervisor(dc))
3880                                 goto priv_insn;
3881                             tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3882 #else
3883                             CHECK_IU_FEATURE(dc, HYPV);
3884                             if (!hypervisor(dc))
3885                                 goto priv_insn;
3886                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3887                             switch (rd) {
3888                             case 0: // hpstate
3889                                 // XXX gen_op_wrhpstate();
3890                                 save_state(dc);
3891                                 gen_op_next_insn();
3892                                 tcg_gen_exit_tb(0);
3893                                 dc->is_br = 1;
3894                                 break;
3895                             case 1: // htstate
3896                                 // XXX gen_op_wrhtstate();
3897                                 break;
3898                             case 3: // hintp
3899                                 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3900                                 break;
3901                             case 5: // htba
3902                                 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3903                                 break;
3904                             case 31: // hstick_cmpr
3905                                 {
3906                                     TCGv_ptr r_tickptr;
3907
3908                                     tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3909                                     r_tickptr = tcg_temp_new_ptr();
3910                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3911                                                    offsetof(CPUSPARCState, hstick));
3912                                     gen_helper_tick_set_limit(r_tickptr,
3913                                                               cpu_hstick_cmpr);
3914                                     tcg_temp_free_ptr(r_tickptr);
3915                                 }
3916                                 break;
3917                             case 6: // hver readonly
3918                             default:
3919                                 goto illegal_insn;
3920                             }
3921 #endif
3922                         }
3923                         break;
3924 #endif
3925 #ifdef TARGET_SPARC64
3926                     case 0x2c: /* V9 movcc */
3927                         {
3928                             int cc = GET_FIELD_SP(insn, 11, 12);
3929                             int cond = GET_FIELD_SP(insn, 14, 17);
3930                             DisasCompare cmp;
3931
3932                             if (insn & (1 << 18)) {
3933                                 if (cc == 0) {
3934                                     gen_compare(&cmp, 0, cond, dc);
3935                                 } else if (cc == 2) {
3936                                     gen_compare(&cmp, 1, cond, dc);
3937                                 } else {
3938                                     goto illegal_insn;
3939                                 }
3940                             } else {
3941                                 gen_fcompare(&cmp, cc, cond);
3942                             }
3943
3944                             /* The get_src2 above loaded the normal 13-bit
3945                                immediate field, not the 11-bit field we have
3946                                in movcc.  But it did handle the reg case.  */
3947                             if (IS_IMM) {
3948                                 simm = GET_FIELD_SPs(insn, 0, 10);
3949                                 tcg_gen_movi_tl(cpu_src2, simm);
3950                             }
3951
3952                             gen_movl_reg_TN(rd, cpu_dst);
3953                             tcg_gen_movcond_tl(cmp.cond, cpu_dst,
3954                                                cmp.c1, cmp.c2,
3955                                                cpu_src2, cpu_dst);
3956                             free_compare(&cmp);
3957                             gen_movl_TN_reg(rd, cpu_dst);
3958                             break;
3959                         }
3960                     case 0x2d: /* V9 sdivx */
3961                         gen_helper_sdivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3962                         gen_movl_TN_reg(rd, cpu_dst);
3963                         break;
3964                     case 0x2e: /* V9 popc */
3965                         {
3966                             cpu_src2 = get_src2(insn, cpu_src2);
3967                             gen_helper_popc(cpu_dst, cpu_src2);
3968                             gen_movl_TN_reg(rd, cpu_dst);
3969                         }
3970                     case 0x2f: /* V9 movr */
3971                         {
3972                             int cond = GET_FIELD_SP(insn, 10, 12);
3973                             DisasCompare cmp;
3974
3975                             gen_compare_reg(&cmp, cond, cpu_src1);
3976
3977                             /* The get_src2 above loaded the normal 13-bit
3978                                immediate field, not the 10-bit field we have
3979                                in movr.  But it did handle the reg case.  */
3980                             if (IS_IMM) {
3981                                 simm = GET_FIELD_SPs(insn, 0, 9);
3982                                 tcg_gen_movi_tl(cpu_src2, simm);
3983                             }
3984
3985                             gen_movl_reg_TN(rd, cpu_dst);
3986                             tcg_gen_movcond_tl(cmp.cond, cpu_dst,
3987                                                cmp.c1, cmp.c2,
3988                                                cpu_src2, cpu_dst);
3989                             free_compare(&cmp);
3990                             gen_movl_TN_reg(rd, cpu_dst);
3991                             break;
3992                         }
3993 #endif
3994                     default:
3995                         goto illegal_insn;
3996                     }
3997                 }
3998             } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3999 #ifdef TARGET_SPARC64
4000                 int opf = GET_FIELD_SP(insn, 5, 13);
4001                 rs1 = GET_FIELD(insn, 13, 17);
4002                 rs2 = GET_FIELD(insn, 27, 31);
4003                 if (gen_trap_ifnofpu(dc)) {
4004                     goto jmp_insn;
4005                 }
4006
4007                 switch (opf) {
4008                 case 0x000: /* VIS I edge8cc */
4009                     CHECK_FPU_FEATURE(dc, VIS1);
4010                     gen_movl_reg_TN(rs1, cpu_src1);
4011                     gen_movl_reg_TN(rs2, cpu_src2);
4012                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4013                     gen_movl_TN_reg(rd, cpu_dst);
4014                     break;
4015                 case 0x001: /* VIS II edge8n */
4016                     CHECK_FPU_FEATURE(dc, VIS2);
4017                     gen_movl_reg_TN(rs1, cpu_src1);
4018                     gen_movl_reg_TN(rs2, cpu_src2);
4019                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4020                     gen_movl_TN_reg(rd, cpu_dst);
4021                     break;
4022                 case 0x002: /* VIS I edge8lcc */
4023                     CHECK_FPU_FEATURE(dc, VIS1);
4024                     gen_movl_reg_TN(rs1, cpu_src1);
4025                     gen_movl_reg_TN(rs2, cpu_src2);
4026                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4027                     gen_movl_TN_reg(rd, cpu_dst);
4028                     break;
4029                 case 0x003: /* VIS II edge8ln */
4030                     CHECK_FPU_FEATURE(dc, VIS2);
4031                     gen_movl_reg_TN(rs1, cpu_src1);
4032                     gen_movl_reg_TN(rs2, cpu_src2);
4033                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4034                     gen_movl_TN_reg(rd, cpu_dst);
4035                     break;
4036                 case 0x004: /* VIS I edge16cc */
4037                     CHECK_FPU_FEATURE(dc, VIS1);
4038                     gen_movl_reg_TN(rs1, cpu_src1);
4039                     gen_movl_reg_TN(rs2, cpu_src2);
4040                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4041                     gen_movl_TN_reg(rd, cpu_dst);
4042                     break;
4043                 case 0x005: /* VIS II edge16n */
4044                     CHECK_FPU_FEATURE(dc, VIS2);
4045                     gen_movl_reg_TN(rs1, cpu_src1);
4046                     gen_movl_reg_TN(rs2, cpu_src2);
4047                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4048                     gen_movl_TN_reg(rd, cpu_dst);
4049                     break;
4050                 case 0x006: /* VIS I edge16lcc */
4051                     CHECK_FPU_FEATURE(dc, VIS1);
4052                     gen_movl_reg_TN(rs1, cpu_src1);
4053                     gen_movl_reg_TN(rs2, cpu_src2);
4054                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4055                     gen_movl_TN_reg(rd, cpu_dst);
4056                     break;
4057                 case 0x007: /* VIS II edge16ln */
4058                     CHECK_FPU_FEATURE(dc, VIS2);
4059                     gen_movl_reg_TN(rs1, cpu_src1);
4060                     gen_movl_reg_TN(rs2, cpu_src2);
4061                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4062                     gen_movl_TN_reg(rd, cpu_dst);
4063                     break;
4064                 case 0x008: /* VIS I edge32cc */
4065                     CHECK_FPU_FEATURE(dc, VIS1);
4066                     gen_movl_reg_TN(rs1, cpu_src1);
4067                     gen_movl_reg_TN(rs2, cpu_src2);
4068                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4069                     gen_movl_TN_reg(rd, cpu_dst);
4070                     break;
4071                 case 0x009: /* VIS II edge32n */
4072                     CHECK_FPU_FEATURE(dc, VIS2);
4073                     gen_movl_reg_TN(rs1, cpu_src1);
4074                     gen_movl_reg_TN(rs2, cpu_src2);
4075                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4076                     gen_movl_TN_reg(rd, cpu_dst);
4077                     break;
4078                 case 0x00a: /* VIS I edge32lcc */
4079                     CHECK_FPU_FEATURE(dc, VIS1);
4080                     gen_movl_reg_TN(rs1, cpu_src1);
4081                     gen_movl_reg_TN(rs2, cpu_src2);
4082                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4083                     gen_movl_TN_reg(rd, cpu_dst);
4084                     break;
4085                 case 0x00b: /* VIS II edge32ln */
4086                     CHECK_FPU_FEATURE(dc, VIS2);
4087                     gen_movl_reg_TN(rs1, cpu_src1);
4088                     gen_movl_reg_TN(rs2, cpu_src2);
4089                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4090                     gen_movl_TN_reg(rd, cpu_dst);
4091                     break;
4092                 case 0x010: /* VIS I array8 */
4093                     CHECK_FPU_FEATURE(dc, VIS1);
4094                     cpu_src1 = get_src1(insn, cpu_src1);
4095                     gen_movl_reg_TN(rs2, cpu_src2);
4096                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4097                     gen_movl_TN_reg(rd, cpu_dst);
4098                     break;
4099                 case 0x012: /* VIS I array16 */
4100                     CHECK_FPU_FEATURE(dc, VIS1);
4101                     cpu_src1 = get_src1(insn, cpu_src1);
4102                     gen_movl_reg_TN(rs2, cpu_src2);
4103                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4104                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4105                     gen_movl_TN_reg(rd, cpu_dst);
4106                     break;
4107                 case 0x014: /* VIS I array32 */
4108                     CHECK_FPU_FEATURE(dc, VIS1);
4109                     cpu_src1 = get_src1(insn, cpu_src1);
4110                     gen_movl_reg_TN(rs2, cpu_src2);
4111                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4112                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4113                     gen_movl_TN_reg(rd, cpu_dst);
4114                     break;
4115                 case 0x018: /* VIS I alignaddr */
4116                     CHECK_FPU_FEATURE(dc, VIS1);
4117                     cpu_src1 = get_src1(insn, cpu_src1);
4118                     gen_movl_reg_TN(rs2, cpu_src2);
4119                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4120                     gen_movl_TN_reg(rd, cpu_dst);
4121                     break;
4122                 case 0x01a: /* VIS I alignaddrl */
4123                     CHECK_FPU_FEATURE(dc, VIS1);
4124                     cpu_src1 = get_src1(insn, cpu_src1);
4125                     gen_movl_reg_TN(rs2, cpu_src2);
4126                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4127                     gen_movl_TN_reg(rd, cpu_dst);
4128                     break;
4129                 case 0x019: /* VIS II bmask */
4130                     CHECK_FPU_FEATURE(dc, VIS2);
4131                     cpu_src1 = get_src1(insn, cpu_src1);
4132                     cpu_src2 = get_src1(insn, cpu_src2);
4133                     tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4134                     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4135                     gen_movl_TN_reg(rd, cpu_dst);
4136                     break;
4137                 case 0x020: /* VIS I fcmple16 */
4138                     CHECK_FPU_FEATURE(dc, VIS1);
4139                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4140                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4141                     gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4142                     gen_movl_TN_reg(rd, cpu_dst);
4143                     break;
4144                 case 0x022: /* VIS I fcmpne16 */
4145                     CHECK_FPU_FEATURE(dc, VIS1);
4146                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4147                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4148                     gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4149                     gen_movl_TN_reg(rd, cpu_dst);
4150                     break;
4151                 case 0x024: /* VIS I fcmple32 */
4152                     CHECK_FPU_FEATURE(dc, VIS1);
4153                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4154                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4155                     gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4156                     gen_movl_TN_reg(rd, cpu_dst);
4157                     break;
4158                 case 0x026: /* VIS I fcmpne32 */
4159                     CHECK_FPU_FEATURE(dc, VIS1);
4160                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4161                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4162                     gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4163                     gen_movl_TN_reg(rd, cpu_dst);
4164                     break;
4165                 case 0x028: /* VIS I fcmpgt16 */
4166                     CHECK_FPU_FEATURE(dc, VIS1);
4167                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4168                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4169                     gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4170                     gen_movl_TN_reg(rd, cpu_dst);
4171                     break;
4172                 case 0x02a: /* VIS I fcmpeq16 */
4173                     CHECK_FPU_FEATURE(dc, VIS1);
4174                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4175                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4176                     gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4177                     gen_movl_TN_reg(rd, cpu_dst);
4178                     break;
4179                 case 0x02c: /* VIS I fcmpgt32 */
4180                     CHECK_FPU_FEATURE(dc, VIS1);
4181                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4182                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4183                     gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4184                     gen_movl_TN_reg(rd, cpu_dst);
4185                     break;
4186                 case 0x02e: /* VIS I fcmpeq32 */
4187                     CHECK_FPU_FEATURE(dc, VIS1);
4188                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4189                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4190                     gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4191                     gen_movl_TN_reg(rd, cpu_dst);
4192                     break;
4193                 case 0x031: /* VIS I fmul8x16 */
4194                     CHECK_FPU_FEATURE(dc, VIS1);
4195                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4196                     break;
4197                 case 0x033: /* VIS I fmul8x16au */
4198                     CHECK_FPU_FEATURE(dc, VIS1);
4199                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4200                     break;
4201                 case 0x035: /* VIS I fmul8x16al */
4202                     CHECK_FPU_FEATURE(dc, VIS1);
4203                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4204                     break;
4205                 case 0x036: /* VIS I fmul8sux16 */
4206                     CHECK_FPU_FEATURE(dc, VIS1);
4207                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4208                     break;
4209                 case 0x037: /* VIS I fmul8ulx16 */
4210                     CHECK_FPU_FEATURE(dc, VIS1);
4211                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4212                     break;
4213                 case 0x038: /* VIS I fmuld8sux16 */
4214                     CHECK_FPU_FEATURE(dc, VIS1);
4215                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4216                     break;
4217                 case 0x039: /* VIS I fmuld8ulx16 */
4218                     CHECK_FPU_FEATURE(dc, VIS1);
4219                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4220                     break;
4221                 case 0x03a: /* VIS I fpack32 */
4222                     CHECK_FPU_FEATURE(dc, VIS1);
4223                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4224                     break;
4225                 case 0x03b: /* VIS I fpack16 */
4226                     CHECK_FPU_FEATURE(dc, VIS1);
4227                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4228                     cpu_dst_32 = gen_dest_fpr_F();
4229                     gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4230                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4231                     break;
4232                 case 0x03d: /* VIS I fpackfix */
4233                     CHECK_FPU_FEATURE(dc, VIS1);
4234                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4235                     cpu_dst_32 = gen_dest_fpr_F();
4236                     gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4237                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4238                     break;
4239                 case 0x03e: /* VIS I pdist */
4240                     CHECK_FPU_FEATURE(dc, VIS1);
4241                     gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4242                     break;
4243                 case 0x048: /* VIS I faligndata */
4244                     CHECK_FPU_FEATURE(dc, VIS1);
4245                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4246                     break;
4247                 case 0x04b: /* VIS I fpmerge */
4248                     CHECK_FPU_FEATURE(dc, VIS1);
4249                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4250                     break;
4251                 case 0x04c: /* VIS II bshuffle */
4252                     CHECK_FPU_FEATURE(dc, VIS2);
4253                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4254                     break;
4255                 case 0x04d: /* VIS I fexpand */
4256                     CHECK_FPU_FEATURE(dc, VIS1);
4257                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4258                     break;
4259                 case 0x050: /* VIS I fpadd16 */
4260                     CHECK_FPU_FEATURE(dc, VIS1);
4261                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4262                     break;
4263                 case 0x051: /* VIS I fpadd16s */
4264                     CHECK_FPU_FEATURE(dc, VIS1);
4265                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4266                     break;
4267                 case 0x052: /* VIS I fpadd32 */
4268                     CHECK_FPU_FEATURE(dc, VIS1);
4269                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4270                     break;
4271                 case 0x053: /* VIS I fpadd32s */
4272                     CHECK_FPU_FEATURE(dc, VIS1);
4273                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4274                     break;
4275                 case 0x054: /* VIS I fpsub16 */
4276                     CHECK_FPU_FEATURE(dc, VIS1);
4277                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4278                     break;
4279                 case 0x055: /* VIS I fpsub16s */
4280                     CHECK_FPU_FEATURE(dc, VIS1);
4281                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4282                     break;
4283                 case 0x056: /* VIS I fpsub32 */
4284                     CHECK_FPU_FEATURE(dc, VIS1);
4285                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4286                     break;
4287                 case 0x057: /* VIS I fpsub32s */
4288                     CHECK_FPU_FEATURE(dc, VIS1);
4289                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4290                     break;
4291                 case 0x060: /* VIS I fzero */
4292                     CHECK_FPU_FEATURE(dc, VIS1);
4293                     cpu_dst_64 = gen_dest_fpr_D();
4294                     tcg_gen_movi_i64(cpu_dst_64, 0);
4295                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4296                     break;
4297                 case 0x061: /* VIS I fzeros */
4298                     CHECK_FPU_FEATURE(dc, VIS1);
4299                     cpu_dst_32 = gen_dest_fpr_F();
4300                     tcg_gen_movi_i32(cpu_dst_32, 0);
4301                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4302                     break;
4303                 case 0x062: /* VIS I fnor */
4304                     CHECK_FPU_FEATURE(dc, VIS1);
4305                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4306                     break;
4307                 case 0x063: /* VIS I fnors */
4308                     CHECK_FPU_FEATURE(dc, VIS1);
4309                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4310                     break;
4311                 case 0x064: /* VIS I fandnot2 */
4312                     CHECK_FPU_FEATURE(dc, VIS1);
4313                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4314                     break;
4315                 case 0x065: /* VIS I fandnot2s */
4316                     CHECK_FPU_FEATURE(dc, VIS1);
4317                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4318                     break;
4319                 case 0x066: /* VIS I fnot2 */
4320                     CHECK_FPU_FEATURE(dc, VIS1);
4321                     gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4322                     break;
4323                 case 0x067: /* VIS I fnot2s */
4324                     CHECK_FPU_FEATURE(dc, VIS1);
4325                     gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4326                     break;
4327                 case 0x068: /* VIS I fandnot1 */
4328                     CHECK_FPU_FEATURE(dc, VIS1);
4329                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4330                     break;
4331                 case 0x069: /* VIS I fandnot1s */
4332                     CHECK_FPU_FEATURE(dc, VIS1);
4333                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4334                     break;
4335                 case 0x06a: /* VIS I fnot1 */
4336                     CHECK_FPU_FEATURE(dc, VIS1);
4337                     gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4338                     break;
4339                 case 0x06b: /* VIS I fnot1s */
4340                     CHECK_FPU_FEATURE(dc, VIS1);
4341                     gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4342                     break;
4343                 case 0x06c: /* VIS I fxor */
4344                     CHECK_FPU_FEATURE(dc, VIS1);
4345                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4346                     break;
4347                 case 0x06d: /* VIS I fxors */
4348                     CHECK_FPU_FEATURE(dc, VIS1);
4349                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4350                     break;
4351                 case 0x06e: /* VIS I fnand */
4352                     CHECK_FPU_FEATURE(dc, VIS1);
4353                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4354                     break;
4355                 case 0x06f: /* VIS I fnands */
4356                     CHECK_FPU_FEATURE(dc, VIS1);
4357                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4358                     break;
4359                 case 0x070: /* VIS I fand */
4360                     CHECK_FPU_FEATURE(dc, VIS1);
4361                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4362                     break;
4363                 case 0x071: /* VIS I fands */
4364                     CHECK_FPU_FEATURE(dc, VIS1);
4365                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4366                     break;
4367                 case 0x072: /* VIS I fxnor */
4368                     CHECK_FPU_FEATURE(dc, VIS1);
4369                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4370                     break;
4371                 case 0x073: /* VIS I fxnors */
4372                     CHECK_FPU_FEATURE(dc, VIS1);
4373                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4374                     break;
4375                 case 0x074: /* VIS I fsrc1 */
4376                     CHECK_FPU_FEATURE(dc, VIS1);
4377                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4378                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4379                     break;
4380                 case 0x075: /* VIS I fsrc1s */
4381                     CHECK_FPU_FEATURE(dc, VIS1);
4382                     cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4383                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4384                     break;
4385                 case 0x076: /* VIS I fornot2 */
4386                     CHECK_FPU_FEATURE(dc, VIS1);
4387                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4388                     break;
4389                 case 0x077: /* VIS I fornot2s */
4390                     CHECK_FPU_FEATURE(dc, VIS1);
4391                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4392                     break;
4393                 case 0x078: /* VIS I fsrc2 */
4394                     CHECK_FPU_FEATURE(dc, VIS1);
4395                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4396                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4397                     break;
4398                 case 0x079: /* VIS I fsrc2s */
4399                     CHECK_FPU_FEATURE(dc, VIS1);
4400                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4401                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4402                     break;
4403                 case 0x07a: /* VIS I fornot1 */
4404                     CHECK_FPU_FEATURE(dc, VIS1);
4405                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4406                     break;
4407                 case 0x07b: /* VIS I fornot1s */
4408                     CHECK_FPU_FEATURE(dc, VIS1);
4409                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
4410                     break;
4411                 case 0x07c: /* VIS I for */
4412                     CHECK_FPU_FEATURE(dc, VIS1);
4413                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
4414                     break;
4415                 case 0x07d: /* VIS I fors */
4416                     CHECK_FPU_FEATURE(dc, VIS1);
4417                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
4418                     break;
4419                 case 0x07e: /* VIS I fone */
4420                     CHECK_FPU_FEATURE(dc, VIS1);
4421                     cpu_dst_64 = gen_dest_fpr_D();
4422                     tcg_gen_movi_i64(cpu_dst_64, -1);
4423                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4424                     break;
4425                 case 0x07f: /* VIS I fones */
4426                     CHECK_FPU_FEATURE(dc, VIS1);
4427                     cpu_dst_32 = gen_dest_fpr_F();
4428                     tcg_gen_movi_i32(cpu_dst_32, -1);
4429                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4430                     break;
4431                 case 0x080: /* VIS I shutdown */
4432                 case 0x081: /* VIS II siam */
4433                     // XXX
4434                     goto illegal_insn;
4435                 default:
4436                     goto illegal_insn;
4437                 }
4438 #else
4439                 goto ncp_insn;
4440 #endif
4441             } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4442 #ifdef TARGET_SPARC64
4443                 goto illegal_insn;
4444 #else
4445                 goto ncp_insn;
4446 #endif
4447 #ifdef TARGET_SPARC64
4448             } else if (xop == 0x39) { /* V9 return */
4449                 TCGv_i32 r_const;
4450
4451                 save_state(dc);
4452                 cpu_src1 = get_src1(insn, cpu_src1);
4453                 if (IS_IMM) {   /* immediate */
4454                     simm = GET_FIELDs(insn, 19, 31);
4455                     tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4456                 } else {                /* register */
4457                     rs2 = GET_FIELD(insn, 27, 31);
4458                     if (rs2) {
4459                         gen_movl_reg_TN(rs2, cpu_src2);
4460                         tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4461                     } else
4462                         tcg_gen_mov_tl(cpu_dst, cpu_src1);
4463                 }
4464                 gen_helper_restore(cpu_env);
4465                 gen_mov_pc_npc(dc);
4466                 r_const = tcg_const_i32(3);
4467                 gen_helper_check_align(cpu_env, cpu_dst, r_const);
4468                 tcg_temp_free_i32(r_const);
4469                 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4470                 dc->npc = DYNAMIC_PC;
4471                 goto jmp_insn;
4472 #endif
4473             } else {
4474                 cpu_src1 = get_src1(insn, cpu_src1);
4475                 if (IS_IMM) {   /* immediate */
4476                     simm = GET_FIELDs(insn, 19, 31);
4477                     tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4478                 } else {                /* register */
4479                     rs2 = GET_FIELD(insn, 27, 31);
4480                     if (rs2) {
4481                         gen_movl_reg_TN(rs2, cpu_src2);
4482                         tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4483                     } else
4484                         tcg_gen_mov_tl(cpu_dst, cpu_src1);
4485                 }
4486                 switch (xop) {
4487                 case 0x38:      /* jmpl */
4488                     {
4489                         TCGv r_pc;
4490                         TCGv_i32 r_const;
4491
4492                         r_pc = tcg_const_tl(dc->pc);
4493                         gen_movl_TN_reg(rd, r_pc);
4494                         tcg_temp_free(r_pc);
4495                         gen_mov_pc_npc(dc);
4496                         r_const = tcg_const_i32(3);
4497                         gen_helper_check_align(cpu_env, cpu_dst, r_const);
4498                         tcg_temp_free_i32(r_const);
4499                         gen_address_mask(dc, cpu_dst);
4500                         tcg_gen_mov_tl(cpu_npc, cpu_dst);
4501                         dc->npc = DYNAMIC_PC;
4502                     }
4503                     goto jmp_insn;
4504 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4505                 case 0x39:      /* rett, V9 return */
4506                     {
4507                         TCGv_i32 r_const;
4508
4509                         if (!supervisor(dc))
4510                             goto priv_insn;
4511                         gen_mov_pc_npc(dc);
4512                         r_const = tcg_const_i32(3);
4513                         gen_helper_check_align(cpu_env, cpu_dst, r_const);
4514                         tcg_temp_free_i32(r_const);
4515                         tcg_gen_mov_tl(cpu_npc, cpu_dst);
4516                         dc->npc = DYNAMIC_PC;
4517                         gen_helper_rett(cpu_env);
4518                     }
4519                     goto jmp_insn;
4520 #endif
4521                 case 0x3b: /* flush */
4522                     if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4523                         goto unimp_flush;
4524                     /* nop */
4525                     break;
4526                 case 0x3c:      /* save */
4527                     save_state(dc);
4528                     gen_helper_save(cpu_env);
4529                     gen_movl_TN_reg(rd, cpu_dst);
4530                     break;
4531                 case 0x3d:      /* restore */
4532                     save_state(dc);
4533                     gen_helper_restore(cpu_env);
4534                     gen_movl_TN_reg(rd, cpu_dst);
4535                     break;
4536 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4537                 case 0x3e:      /* V9 done/retry */
4538                     {
4539                         switch (rd) {
4540                         case 0:
4541                             if (!supervisor(dc))
4542                                 goto priv_insn;
4543                             dc->npc = DYNAMIC_PC;
4544                             dc->pc = DYNAMIC_PC;
4545                             gen_helper_done(cpu_env);
4546                             goto jmp_insn;
4547                         case 1:
4548                             if (!supervisor(dc))
4549                                 goto priv_insn;
4550                             dc->npc = DYNAMIC_PC;
4551                             dc->pc = DYNAMIC_PC;
4552                             gen_helper_retry(cpu_env);
4553                             goto jmp_insn;
4554                         default:
4555                             goto illegal_insn;
4556                         }
4557                     }
4558                     break;
4559 #endif
4560                 default:
4561                     goto illegal_insn;
4562                 }
4563             }
4564             break;
4565         }
4566         break;
4567     case 3:                     /* load/store instructions */
4568         {
4569             unsigned int xop = GET_FIELD(insn, 7, 12);
4570
4571             /* flush pending conditional evaluations before exposing
4572                cpu state */
4573             if (dc->cc_op != CC_OP_FLAGS) {
4574                 dc->cc_op = CC_OP_FLAGS;
4575                 gen_helper_compute_psr(cpu_env);
4576             }
4577             cpu_src1 = get_src1(insn, cpu_src1);
4578             if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4579                 rs2 = GET_FIELD(insn, 27, 31);
4580                 gen_movl_reg_TN(rs2, cpu_src2);
4581                 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4582             } else if (IS_IMM) {     /* immediate */
4583                 simm = GET_FIELDs(insn, 19, 31);
4584                 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4585             } else {            /* register */
4586                 rs2 = GET_FIELD(insn, 27, 31);
4587                 if (rs2 != 0) {
4588                     gen_movl_reg_TN(rs2, cpu_src2);
4589                     tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4590                 } else
4591                     tcg_gen_mov_tl(cpu_addr, cpu_src1);
4592             }
4593             if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4594                 (xop > 0x17 && xop <= 0x1d ) ||
4595                 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4596                 switch (xop) {
4597                 case 0x0:       /* ld, V9 lduw, load unsigned word */
4598                     gen_address_mask(dc, cpu_addr);
4599                     tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4600                     break;
4601                 case 0x1:       /* ldub, load unsigned byte */
4602                     gen_address_mask(dc, cpu_addr);
4603                     tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4604                     break;
4605                 case 0x2:       /* lduh, load unsigned halfword */
4606                     gen_address_mask(dc, cpu_addr);
4607                     tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4608                     break;
4609                 case 0x3:       /* ldd, load double word */
4610                     if (rd & 1)
4611                         goto illegal_insn;
4612                     else {
4613                         TCGv_i32 r_const;
4614
4615                         save_state(dc);
4616                         r_const = tcg_const_i32(7);
4617                         /* XXX remove alignment check */
4618                         gen_helper_check_align(cpu_env, cpu_addr, r_const);
4619                         tcg_temp_free_i32(r_const);
4620                         gen_address_mask(dc, cpu_addr);
4621                         tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4622                         tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4623                         tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4624                         gen_movl_TN_reg(rd + 1, cpu_tmp0);
4625                         tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4626                         tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4627                         tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4628                     }
4629                     break;
4630                 case 0x9:       /* ldsb, load signed byte */
4631                     gen_address_mask(dc, cpu_addr);
4632                     tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4633                     break;
4634                 case 0xa:       /* ldsh, load signed halfword */
4635                     gen_address_mask(dc, cpu_addr);
4636                     tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4637                     break;
4638                 case 0xd:       /* ldstub -- XXX: should be atomically */
4639                     {
4640                         TCGv r_const;
4641
4642                         gen_address_mask(dc, cpu_addr);
4643                         tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4644                         r_const = tcg_const_tl(0xff);
4645                         tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4646                         tcg_temp_free(r_const);
4647                     }
4648                     break;
4649                 case 0x0f:      /* swap, swap register with memory. Also
4650                                    atomically */
4651                     CHECK_IU_FEATURE(dc, SWAP);
4652                     gen_movl_reg_TN(rd, cpu_val);
4653                     gen_address_mask(dc, cpu_addr);
4654                     tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4655                     tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4656                     tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4657                     break;
4658 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4659                 case 0x10:      /* lda, V9 lduwa, load word alternate */
4660 #ifndef TARGET_SPARC64
4661                     if (IS_IMM)
4662                         goto illegal_insn;
4663                     if (!supervisor(dc))
4664                         goto priv_insn;
4665 #endif
4666                     save_state(dc);
4667                     gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4668                     break;
4669                 case 0x11:      /* lduba, load unsigned byte alternate */
4670 #ifndef TARGET_SPARC64
4671                     if (IS_IMM)
4672                         goto illegal_insn;
4673                     if (!supervisor(dc))
4674                         goto priv_insn;
4675 #endif
4676                     save_state(dc);
4677                     gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4678                     break;
4679                 case 0x12:      /* lduha, load unsigned halfword alternate */
4680 #ifndef TARGET_SPARC64
4681                     if (IS_IMM)
4682                         goto illegal_insn;
4683                     if (!supervisor(dc))
4684                         goto priv_insn;
4685 #endif
4686                     save_state(dc);
4687                     gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4688                     break;
4689                 case 0x13:      /* ldda, load double word alternate */
4690 #ifndef TARGET_SPARC64
4691                     if (IS_IMM)
4692                         goto illegal_insn;
4693                     if (!supervisor(dc))
4694                         goto priv_insn;
4695 #endif
4696                     if (rd & 1)
4697                         goto illegal_insn;
4698                     save_state(dc);
4699                     gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4700                     goto skip_move;
4701                 case 0x19:      /* ldsba, load signed byte alternate */
4702 #ifndef TARGET_SPARC64
4703                     if (IS_IMM)
4704                         goto illegal_insn;
4705                     if (!supervisor(dc))
4706                         goto priv_insn;
4707 #endif
4708                     save_state(dc);
4709                     gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4710                     break;
4711                 case 0x1a:      /* ldsha, load signed halfword alternate */
4712 #ifndef TARGET_SPARC64
4713                     if (IS_IMM)
4714                         goto illegal_insn;
4715                     if (!supervisor(dc))
4716                         goto priv_insn;
4717 #endif
4718                     save_state(dc);
4719                     gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4720                     break;
4721                 case 0x1d:      /* ldstuba -- XXX: should be atomically */
4722 #ifndef TARGET_SPARC64
4723                     if (IS_IMM)
4724                         goto illegal_insn;
4725                     if (!supervisor(dc))
4726                         goto priv_insn;
4727 #endif
4728                     save_state(dc);
4729                     gen_ldstub_asi(cpu_val, cpu_addr, insn);
4730                     break;
4731                 case 0x1f:      /* swapa, swap reg with alt. memory. Also
4732                                    atomically */
4733                     CHECK_IU_FEATURE(dc, SWAP);
4734 #ifndef TARGET_SPARC64
4735                     if (IS_IMM)
4736                         goto illegal_insn;
4737                     if (!supervisor(dc))
4738                         goto priv_insn;
4739 #endif
4740                     save_state(dc);
4741                     gen_movl_reg_TN(rd, cpu_val);
4742                     gen_swap_asi(cpu_val, cpu_addr, insn);
4743                     break;
4744
4745 #ifndef TARGET_SPARC64
4746                 case 0x30: /* ldc */
4747                 case 0x31: /* ldcsr */
4748                 case 0x33: /* lddc */
4749                     goto ncp_insn;
4750 #endif
4751 #endif
4752 #ifdef TARGET_SPARC64
4753                 case 0x08: /* V9 ldsw */
4754                     gen_address_mask(dc, cpu_addr);
4755                     tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4756                     break;
4757                 case 0x0b: /* V9 ldx */
4758                     gen_address_mask(dc, cpu_addr);
4759                     tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4760                     break;
4761                 case 0x18: /* V9 ldswa */
4762                     save_state(dc);
4763                     gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4764                     break;
4765                 case 0x1b: /* V9 ldxa */
4766                     save_state(dc);
4767                     gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4768                     break;
4769                 case 0x2d: /* V9 prefetch, no effect */
4770                     goto skip_move;
4771                 case 0x30: /* V9 ldfa */
4772                     if (gen_trap_ifnofpu(dc)) {
4773                         goto jmp_insn;
4774                     }
4775                     save_state(dc);
4776                     gen_ldf_asi(cpu_addr, insn, 4, rd);
4777                     gen_update_fprs_dirty(rd);
4778                     goto skip_move;
4779                 case 0x33: /* V9 lddfa */
4780                     if (gen_trap_ifnofpu(dc)) {
4781                         goto jmp_insn;
4782                     }
4783                     save_state(dc);
4784                     gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4785                     gen_update_fprs_dirty(DFPREG(rd));
4786                     goto skip_move;
4787                 case 0x3d: /* V9 prefetcha, no effect */
4788                     goto skip_move;
4789                 case 0x32: /* V9 ldqfa */
4790                     CHECK_FPU_FEATURE(dc, FLOAT128);
4791                     if (gen_trap_ifnofpu(dc)) {
4792                         goto jmp_insn;
4793                     }
4794                     save_state(dc);
4795                     gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4796                     gen_update_fprs_dirty(QFPREG(rd));
4797                     goto skip_move;
4798 #endif
4799                 default:
4800                     goto illegal_insn;
4801                 }
4802                 gen_movl_TN_reg(rd, cpu_val);
4803 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4804             skip_move: ;
4805 #endif
4806             } else if (xop >= 0x20 && xop < 0x24) {
4807                 if (gen_trap_ifnofpu(dc)) {
4808                     goto jmp_insn;
4809                 }
4810                 save_state(dc);
4811                 switch (xop) {
4812                 case 0x20:      /* ldf, load fpreg */
4813                     gen_address_mask(dc, cpu_addr);
4814                     tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4815                     cpu_dst_32 = gen_dest_fpr_F();
4816                     tcg_gen_trunc_tl_i32(cpu_dst_32, cpu_tmp0);
4817                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4818                     break;
4819                 case 0x21:      /* ldfsr, V9 ldxfsr */
4820 #ifdef TARGET_SPARC64
4821                     gen_address_mask(dc, cpu_addr);
4822                     if (rd == 1) {
4823                         tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4824                         gen_helper_ldxfsr(cpu_env, cpu_tmp64);
4825                     } else {
4826                         tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4827                         tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
4828                         gen_helper_ldfsr(cpu_env, cpu_tmp32);
4829                     }
4830 #else
4831                     {
4832                         tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4833                         gen_helper_ldfsr(cpu_env, cpu_tmp32);
4834                     }
4835 #endif
4836                     break;
4837                 case 0x22:      /* ldqf, load quad fpreg */
4838                     {
4839                         TCGv_i32 r_const;
4840
4841                         CHECK_FPU_FEATURE(dc, FLOAT128);
4842                         r_const = tcg_const_i32(dc->mem_idx);
4843                         gen_address_mask(dc, cpu_addr);
4844                         gen_helper_ldqf(cpu_env, cpu_addr, r_const);
4845                         tcg_temp_free_i32(r_const);
4846                         gen_op_store_QT0_fpr(QFPREG(rd));
4847                         gen_update_fprs_dirty(QFPREG(rd));
4848                     }
4849                     break;
4850                 case 0x23:      /* lddf, load double fpreg */
4851                     gen_address_mask(dc, cpu_addr);
4852                     cpu_dst_64 = gen_dest_fpr_D();
4853                     tcg_gen_qemu_ld64(cpu_dst_64, cpu_addr, dc->mem_idx);
4854                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4855                     break;
4856                 default:
4857                     goto illegal_insn;
4858                 }
4859             } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4860                        xop == 0xe || xop == 0x1e) {
4861                 gen_movl_reg_TN(rd, cpu_val);
4862                 switch (xop) {
4863                 case 0x4: /* st, store word */
4864                     gen_address_mask(dc, cpu_addr);
4865                     tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4866                     break;
4867                 case 0x5: /* stb, store byte */
4868                     gen_address_mask(dc, cpu_addr);
4869                     tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4870                     break;
4871                 case 0x6: /* sth, store halfword */
4872                     gen_address_mask(dc, cpu_addr);
4873                     tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4874                     break;
4875                 case 0x7: /* std, store double word */
4876                     if (rd & 1)
4877                         goto illegal_insn;
4878                     else {
4879                         TCGv_i32 r_const;
4880
4881                         save_state(dc);
4882                         gen_address_mask(dc, cpu_addr);
4883                         r_const = tcg_const_i32(7);
4884                         /* XXX remove alignment check */
4885                         gen_helper_check_align(cpu_env, cpu_addr, r_const);
4886                         tcg_temp_free_i32(r_const);
4887                         gen_movl_reg_TN(rd + 1, cpu_tmp0);
4888                         tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4889                         tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4890                     }
4891                     break;
4892 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4893                 case 0x14: /* sta, V9 stwa, store word alternate */
4894 #ifndef TARGET_SPARC64
4895                     if (IS_IMM)
4896                         goto illegal_insn;
4897                     if (!supervisor(dc))
4898                         goto priv_insn;
4899 #endif
4900                     save_state(dc);
4901                     gen_st_asi(cpu_val, cpu_addr, insn, 4);
4902                     dc->npc = DYNAMIC_PC;
4903                     break;
4904                 case 0x15: /* stba, store byte alternate */
4905 #ifndef TARGET_SPARC64
4906                     if (IS_IMM)
4907                         goto illegal_insn;
4908                     if (!supervisor(dc))
4909                         goto priv_insn;
4910 #endif
4911                     save_state(dc);
4912                     gen_st_asi(cpu_val, cpu_addr, insn, 1);
4913                     dc->npc = DYNAMIC_PC;
4914                     break;
4915                 case 0x16: /* stha, store halfword alternate */
4916 #ifndef TARGET_SPARC64
4917                     if (IS_IMM)
4918                         goto illegal_insn;
4919                     if (!supervisor(dc))
4920                         goto priv_insn;
4921 #endif
4922                     save_state(dc);
4923                     gen_st_asi(cpu_val, cpu_addr, insn, 2);
4924                     dc->npc = DYNAMIC_PC;
4925                     break;
4926                 case 0x17: /* stda, store double word alternate */
4927 #ifndef TARGET_SPARC64
4928                     if (IS_IMM)
4929                         goto illegal_insn;
4930                     if (!supervisor(dc))
4931                         goto priv_insn;
4932 #endif
4933                     if (rd & 1)
4934                         goto illegal_insn;
4935                     else {
4936                         save_state(dc);
4937                         gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4938                     }
4939                     break;
4940 #endif
4941 #ifdef TARGET_SPARC64
4942                 case 0x0e: /* V9 stx */
4943                     gen_address_mask(dc, cpu_addr);
4944                     tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4945                     break;
4946                 case 0x1e: /* V9 stxa */
4947                     save_state(dc);
4948                     gen_st_asi(cpu_val, cpu_addr, insn, 8);
4949                     dc->npc = DYNAMIC_PC;
4950                     break;
4951 #endif
4952                 default:
4953                     goto illegal_insn;
4954                 }
4955             } else if (xop > 0x23 && xop < 0x28) {
4956                 if (gen_trap_ifnofpu(dc)) {
4957                     goto jmp_insn;
4958                 }
4959                 save_state(dc);
4960                 switch (xop) {
4961                 case 0x24: /* stf, store fpreg */
4962                     gen_address_mask(dc, cpu_addr);
4963                     cpu_src1_32 = gen_load_fpr_F(dc, rd);
4964                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_src1_32);
4965                     tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4966                     break;
4967                 case 0x25: /* stfsr, V9 stxfsr */
4968 #ifdef TARGET_SPARC64
4969                     gen_address_mask(dc, cpu_addr);
4970                     tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUSPARCState, fsr));
4971                     if (rd == 1)
4972                         tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4973                     else
4974                         tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4975 #else
4976                     tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fsr));
4977                     tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4978 #endif
4979                     break;
4980                 case 0x26:
4981 #ifdef TARGET_SPARC64
4982                     /* V9 stqf, store quad fpreg */
4983                     {
4984                         TCGv_i32 r_const;
4985
4986                         CHECK_FPU_FEATURE(dc, FLOAT128);
4987                         gen_op_load_fpr_QT0(QFPREG(rd));
4988                         r_const = tcg_const_i32(dc->mem_idx);
4989                         gen_address_mask(dc, cpu_addr);
4990                         gen_helper_stqf(cpu_env, cpu_addr, r_const);
4991                         tcg_temp_free_i32(r_const);
4992                     }
4993                     break;
4994 #else /* !TARGET_SPARC64 */
4995                     /* stdfq, store floating point queue */
4996 #if defined(CONFIG_USER_ONLY)
4997                     goto illegal_insn;
4998 #else
4999                     if (!supervisor(dc))
5000                         goto priv_insn;
5001                     if (gen_trap_ifnofpu(dc)) {
5002                         goto jmp_insn;
5003                     }
5004                     goto nfq_insn;
5005 #endif
5006 #endif
5007                 case 0x27: /* stdf, store double fpreg */
5008                     gen_address_mask(dc, cpu_addr);
5009                     cpu_src1_64 = gen_load_fpr_D(dc, rd);
5010                     tcg_gen_qemu_st64(cpu_src1_64, cpu_addr, dc->mem_idx);
5011                     break;
5012                 default:
5013                     goto illegal_insn;
5014                 }
5015             } else if (xop > 0x33 && xop < 0x3f) {
5016                 save_state(dc);
5017                 switch (xop) {
5018 #ifdef TARGET_SPARC64
5019                 case 0x34: /* V9 stfa */
5020                     if (gen_trap_ifnofpu(dc)) {
5021                         goto jmp_insn;
5022                     }
5023                     gen_stf_asi(cpu_addr, insn, 4, rd);
5024                     break;
5025                 case 0x36: /* V9 stqfa */
5026                     {
5027                         TCGv_i32 r_const;
5028
5029                         CHECK_FPU_FEATURE(dc, FLOAT128);
5030                         if (gen_trap_ifnofpu(dc)) {
5031                             goto jmp_insn;
5032                         }
5033                         r_const = tcg_const_i32(7);
5034                         gen_helper_check_align(cpu_env, cpu_addr, r_const);
5035                         tcg_temp_free_i32(r_const);
5036                         gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
5037                     }
5038                     break;
5039                 case 0x37: /* V9 stdfa */
5040                     if (gen_trap_ifnofpu(dc)) {
5041                         goto jmp_insn;
5042                     }
5043                     gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
5044                     break;
5045                 case 0x3c: /* V9 casa */
5046                     gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
5047                     gen_movl_TN_reg(rd, cpu_val);
5048                     break;
5049                 case 0x3e: /* V9 casxa */
5050                     gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
5051                     gen_movl_TN_reg(rd, cpu_val);
5052                     break;
5053 #else
5054                 case 0x34: /* stc */
5055                 case 0x35: /* stcsr */
5056                 case 0x36: /* stdcq */
5057                 case 0x37: /* stdc */
5058                     goto ncp_insn;
5059 #endif
5060                 default:
5061                     goto illegal_insn;
5062                 }
5063             } else
5064                 goto illegal_insn;
5065         }
5066         break;
5067     }
5068     /* default case for non jump instructions */
5069     if (dc->npc == DYNAMIC_PC) {
5070         dc->pc = DYNAMIC_PC;
5071         gen_op_next_insn();
5072     } else if (dc->npc == JUMP_PC) {
5073         /* we can do a static jump */
5074         gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5075         dc->is_br = 1;
5076     } else {
5077         dc->pc = dc->npc;
5078         dc->npc = dc->npc + 4;
5079     }
5080  jmp_insn:
5081     goto egress;
5082  illegal_insn:
5083     {
5084         TCGv_i32 r_const;
5085
5086         save_state(dc);
5087         r_const = tcg_const_i32(TT_ILL_INSN);
5088         gen_helper_raise_exception(cpu_env, r_const);
5089         tcg_temp_free_i32(r_const);
5090         dc->is_br = 1;
5091     }
5092     goto egress;
5093  unimp_flush:
5094     {
5095         TCGv_i32 r_const;
5096
5097         save_state(dc);
5098         r_const = tcg_const_i32(TT_UNIMP_FLUSH);
5099         gen_helper_raise_exception(cpu_env, r_const);
5100         tcg_temp_free_i32(r_const);
5101         dc->is_br = 1;
5102     }
5103     goto egress;
5104 #if !defined(CONFIG_USER_ONLY)
5105  priv_insn:
5106     {
5107         TCGv_i32 r_const;
5108
5109         save_state(dc);
5110         r_const = tcg_const_i32(TT_PRIV_INSN);
5111         gen_helper_raise_exception(cpu_env, r_const);
5112         tcg_temp_free_i32(r_const);
5113         dc->is_br = 1;
5114     }
5115     goto egress;
5116 #endif
5117  nfpu_insn:
5118     save_state(dc);
5119     gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
5120     dc->is_br = 1;
5121     goto egress;
5122 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5123  nfq_insn:
5124     save_state(dc);
5125     gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
5126     dc->is_br = 1;
5127     goto egress;
5128 #endif
5129 #ifndef TARGET_SPARC64
5130  ncp_insn:
5131     {
5132         TCGv r_const;
5133
5134         save_state(dc);
5135         r_const = tcg_const_i32(TT_NCP_INSN);
5136         gen_helper_raise_exception(cpu_env, r_const);
5137         tcg_temp_free(r_const);
5138         dc->is_br = 1;
5139     }
5140     goto egress;
5141 #endif
5142  egress:
5143     tcg_temp_free(cpu_tmp1);
5144     tcg_temp_free(cpu_tmp2);
5145     if (dc->n_t32 != 0) {
5146         int i;
5147         for (i = dc->n_t32 - 1; i >= 0; --i) {
5148             tcg_temp_free_i32(dc->t32[i]);
5149         }
5150         dc->n_t32 = 0;
5151     }
5152 }
5153
5154 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
5155                                                   int spc, CPUSPARCState *env)
5156 {
5157     target_ulong pc_start, last_pc;
5158     uint16_t *gen_opc_end;
5159     DisasContext dc1, *dc = &dc1;
5160     CPUBreakpoint *bp;
5161     int j, lj = -1;
5162     int num_insns;
5163     int max_insns;
5164     unsigned int insn;
5165
5166     memset(dc, 0, sizeof(DisasContext));
5167     dc->tb = tb;
5168     pc_start = tb->pc;
5169     dc->pc = pc_start;
5170     last_pc = dc->pc;
5171     dc->npc = (target_ulong) tb->cs_base;
5172     dc->cc_op = CC_OP_DYNAMIC;
5173     dc->mem_idx = cpu_mmu_index(env);
5174     dc->def = env->def;
5175     dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5176     dc->address_mask_32bit = tb_am_enabled(tb->flags);
5177     dc->singlestep = (env->singlestep_enabled || singlestep);
5178     gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
5179
5180     num_insns = 0;
5181     max_insns = tb->cflags & CF_COUNT_MASK;
5182     if (max_insns == 0)
5183         max_insns = CF_COUNT_MASK;
5184     gen_icount_start();
5185     do {
5186         if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
5187             QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
5188                 if (bp->pc == dc->pc) {
5189                     if (dc->pc != pc_start)
5190                         save_state(dc);
5191                     gen_helper_debug(cpu_env);
5192                     tcg_gen_exit_tb(0);
5193                     dc->is_br = 1;
5194                     goto exit_gen_loop;
5195                 }
5196             }
5197         }
5198         if (spc) {
5199             qemu_log("Search PC...\n");
5200             j = gen_opc_ptr - gen_opc_buf;
5201             if (lj < j) {
5202                 lj++;
5203                 while (lj < j)
5204                     gen_opc_instr_start[lj++] = 0;
5205                 gen_opc_pc[lj] = dc->pc;
5206                 gen_opc_npc[lj] = dc->npc;
5207                 gen_opc_instr_start[lj] = 1;
5208                 gen_opc_icount[lj] = num_insns;
5209             }
5210         }
5211         if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
5212             gen_io_start();
5213         last_pc = dc->pc;
5214         insn = cpu_ldl_code(env, dc->pc);
5215
5216         cpu_tmp0 = tcg_temp_new();
5217         cpu_tmp32 = tcg_temp_new_i32();
5218         cpu_tmp64 = tcg_temp_new_i64();
5219         cpu_dst = tcg_temp_new();
5220         cpu_val = tcg_temp_new();
5221         cpu_addr = tcg_temp_new();
5222
5223         disas_sparc_insn(dc, insn);
5224         num_insns++;
5225
5226         tcg_temp_free(cpu_addr);
5227         tcg_temp_free(cpu_val);
5228         tcg_temp_free(cpu_dst);
5229         tcg_temp_free_i64(cpu_tmp64);
5230         tcg_temp_free_i32(cpu_tmp32);
5231         tcg_temp_free(cpu_tmp0);
5232
5233         if (dc->is_br)
5234             break;
5235         /* if the next PC is different, we abort now */
5236         if (dc->pc != (last_pc + 4))
5237             break;
5238         /* if we reach a page boundary, we stop generation so that the
5239            PC of a TT_TFAULT exception is always in the right page */
5240         if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5241             break;
5242         /* if single step mode, we generate only one instruction and
5243            generate an exception */
5244         if (dc->singlestep) {
5245             break;
5246         }
5247     } while ((gen_opc_ptr < gen_opc_end) &&
5248              (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5249              num_insns < max_insns);
5250
5251  exit_gen_loop:
5252     if (tb->cflags & CF_LAST_IO) {
5253         gen_io_end();
5254     }
5255     if (!dc->is_br) {
5256         if (dc->pc != DYNAMIC_PC &&
5257             (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5258             /* static PC and NPC: we can use direct chaining */
5259             gen_goto_tb(dc, 0, dc->pc, dc->npc);
5260         } else {
5261             if (dc->pc != DYNAMIC_PC) {
5262                 tcg_gen_movi_tl(cpu_pc, dc->pc);
5263             }
5264             save_npc(dc);
5265             tcg_gen_exit_tb(0);
5266         }
5267     }
5268     gen_icount_end(tb, num_insns);
5269     *gen_opc_ptr = INDEX_op_end;
5270     if (spc) {
5271         j = gen_opc_ptr - gen_opc_buf;
5272         lj++;
5273         while (lj <= j)
5274             gen_opc_instr_start[lj++] = 0;
5275 #if 0
5276         log_page_dump();
5277 #endif
5278         gen_opc_jump_pc[0] = dc->jump_pc[0];
5279         gen_opc_jump_pc[1] = dc->jump_pc[1];
5280     } else {
5281         tb->size = last_pc + 4 - pc_start;
5282         tb->icount = num_insns;
5283     }
5284 #ifdef DEBUG_DISAS
5285     if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5286         qemu_log("--------------\n");
5287         qemu_log("IN: %s\n", lookup_symbol(pc_start));
5288         log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
5289         qemu_log("\n");
5290     }
5291 #endif
5292 }
5293
5294 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5295 {
5296     gen_intermediate_code_internal(tb, 0, env);
5297 }
5298
5299 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5300 {
5301     gen_intermediate_code_internal(tb, 1, env);
5302 }
5303
5304 void gen_intermediate_code_init(CPUSPARCState *env)
5305 {
5306     unsigned int i;
5307     static int inited;
5308     static const char * const gregnames[8] = {
5309         NULL, // g0 not used
5310         "g1",
5311         "g2",
5312         "g3",
5313         "g4",
5314         "g5",
5315         "g6",
5316         "g7",
5317     };
5318     static const char * const fregnames[32] = {
5319         "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5320         "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5321         "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5322         "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5323     };
5324
5325     /* init various static tables */
5326     if (!inited) {
5327         inited = 1;
5328
5329         cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5330         cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5331                                              offsetof(CPUSPARCState, regwptr),
5332                                              "regwptr");
5333 #ifdef TARGET_SPARC64
5334         cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, xcc),
5335                                          "xcc");
5336         cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, asi),
5337                                          "asi");
5338         cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, fprs),
5339                                           "fprs");
5340         cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, gsr),
5341                                      "gsr");
5342         cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5343                                            offsetof(CPUSPARCState, tick_cmpr),
5344                                            "tick_cmpr");
5345         cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5346                                             offsetof(CPUSPARCState, stick_cmpr),
5347                                             "stick_cmpr");
5348         cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5349                                              offsetof(CPUSPARCState, hstick_cmpr),
5350                                              "hstick_cmpr");
5351         cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hintp),
5352                                        "hintp");
5353         cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, htba),
5354                                       "htba");
5355         cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hver),
5356                                       "hver");
5357         cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5358                                      offsetof(CPUSPARCState, ssr), "ssr");
5359         cpu_ver = tcg_global_mem_new(TCG_AREG0,
5360                                      offsetof(CPUSPARCState, version), "ver");
5361         cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5362                                              offsetof(CPUSPARCState, softint),
5363                                              "softint");
5364 #else
5365         cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, wim),
5366                                      "wim");
5367 #endif
5368         cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cond),
5369                                       "cond");
5370         cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_src),
5371                                         "cc_src");
5372         cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5373                                          offsetof(CPUSPARCState, cc_src2),
5374                                          "cc_src2");
5375         cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_dst),
5376                                         "cc_dst");
5377         cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, cc_op),
5378                                            "cc_op");
5379         cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, psr),
5380                                          "psr");
5381         cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, fsr),
5382                                      "fsr");
5383         cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, pc),
5384                                     "pc");
5385         cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, npc),
5386                                      "npc");
5387         cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, y), "y");
5388 #ifndef CONFIG_USER_ONLY
5389         cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, tbr),
5390                                      "tbr");
5391 #endif
5392         for (i = 1; i < 8; i++) {
5393             cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5394                                               offsetof(CPUSPARCState, gregs[i]),
5395                                               gregnames[i]);
5396         }
5397         for (i = 0; i < TARGET_DPREGS; i++) {
5398             cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
5399                                                 offsetof(CPUSPARCState, fpr[i]),
5400                                                 fregnames[i]);
5401         }
5402
5403         /* register helpers */
5404
5405 #define GEN_HELPER 2
5406 #include "helper.h"
5407     }
5408 }
5409
5410 void restore_state_to_opc(CPUSPARCState *env, TranslationBlock *tb, int pc_pos)
5411 {
5412     target_ulong npc;
5413     env->pc = gen_opc_pc[pc_pos];
5414     npc = gen_opc_npc[pc_pos];
5415     if (npc == 1) {
5416         /* dynamic NPC: already stored */
5417     } else if (npc == 2) {
5418         /* jump PC: use 'cond' and the jump targets of the translation */
5419         if (env->cond) {
5420             env->npc = gen_opc_jump_pc[0];
5421         } else {
5422             env->npc = gen_opc_jump_pc[1];
5423         }
5424     } else {
5425         env->npc = npc;
5426     }
5427
5428     /* flush pending conditional evaluations before exposing cpu state */
5429     if (CC_OP != CC_OP_FLAGS) {
5430         helper_compute_psr(env);
5431     }
5432 }
This page took 0.360253 seconds and 2 git commands to generate.