]> Git Repo - qemu.git/blob - target-sh4/translate.c
coroutine-gthread.c: Avoid threading APIs deprecated in GLib 2.31
[qemu.git] / target-sh4 / translate.c
1 /*
2  *  SH4 translation
3  *
4  *  Copyright (c) 2005 Samuel Tardieu
5  *
6  * This library is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2 of the License, or (at your option) any later version.
10  *
11  * This library is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
18  */
19
20 #define DEBUG_DISAS
21 #define SH4_DEBUG_DISAS
22 //#define SH4_SINGLE_STEP
23
24 #include "cpu.h"
25 #include "disas.h"
26 #include "tcg-op.h"
27
28 #include "helper.h"
29 #define GEN_HELPER 1
30 #include "helper.h"
31
32 typedef struct DisasContext {
33     struct TranslationBlock *tb;
34     target_ulong pc;
35     uint32_t sr;
36     uint32_t fpscr;
37     uint16_t opcode;
38     uint32_t flags;
39     int bstate;
40     int memidx;
41     uint32_t delayed_pc;
42     int singlestep_enabled;
43     uint32_t features;
44     int has_movcal;
45 } DisasContext;
46
47 #if defined(CONFIG_USER_ONLY)
48 #define IS_USER(ctx) 1
49 #else
50 #define IS_USER(ctx) (!(ctx->sr & SR_MD))
51 #endif
52
53 enum {
54     BS_NONE     = 0, /* We go out of the TB without reaching a branch or an
55                       * exception condition
56                       */
57     BS_STOP     = 1, /* We want to stop translation for any reason */
58     BS_BRANCH   = 2, /* We reached a branch condition     */
59     BS_EXCP     = 3, /* We reached an exception condition */
60 };
61
62 /* global register indexes */
63 static TCGv_ptr cpu_env;
64 static TCGv cpu_gregs[24];
65 static TCGv cpu_pc, cpu_sr, cpu_ssr, cpu_spc, cpu_gbr;
66 static TCGv cpu_vbr, cpu_sgr, cpu_dbr, cpu_mach, cpu_macl;
67 static TCGv cpu_pr, cpu_fpscr, cpu_fpul, cpu_ldst;
68 static TCGv cpu_fregs[32];
69
70 /* internal register indexes */
71 static TCGv cpu_flags, cpu_delayed_pc;
72
73 static uint32_t gen_opc_hflags[OPC_BUF_SIZE];
74
75 #include "gen-icount.h"
76
77 static void sh4_translate_init(void)
78 {
79     int i;
80     static int done_init = 0;
81     static const char * const gregnames[24] = {
82         "R0_BANK0", "R1_BANK0", "R2_BANK0", "R3_BANK0",
83         "R4_BANK0", "R5_BANK0", "R6_BANK0", "R7_BANK0",
84         "R8", "R9", "R10", "R11", "R12", "R13", "R14", "R15",
85         "R0_BANK1", "R1_BANK1", "R2_BANK1", "R3_BANK1",
86         "R4_BANK1", "R5_BANK1", "R6_BANK1", "R7_BANK1"
87     };
88     static const char * const fregnames[32] = {
89          "FPR0_BANK0",  "FPR1_BANK0",  "FPR2_BANK0",  "FPR3_BANK0",
90          "FPR4_BANK0",  "FPR5_BANK0",  "FPR6_BANK0",  "FPR7_BANK0",
91          "FPR8_BANK0",  "FPR9_BANK0", "FPR10_BANK0", "FPR11_BANK0",
92         "FPR12_BANK0", "FPR13_BANK0", "FPR14_BANK0", "FPR15_BANK0",
93          "FPR0_BANK1",  "FPR1_BANK1",  "FPR2_BANK1",  "FPR3_BANK1",
94          "FPR4_BANK1",  "FPR5_BANK1",  "FPR6_BANK1",  "FPR7_BANK1",
95          "FPR8_BANK1",  "FPR9_BANK1", "FPR10_BANK1", "FPR11_BANK1",
96         "FPR12_BANK1", "FPR13_BANK1", "FPR14_BANK1", "FPR15_BANK1",
97     };
98
99     if (done_init)
100         return;
101
102     cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
103
104     for (i = 0; i < 24; i++)
105         cpu_gregs[i] = tcg_global_mem_new_i32(TCG_AREG0,
106                                               offsetof(CPUSH4State, gregs[i]),
107                                               gregnames[i]);
108
109     cpu_pc = tcg_global_mem_new_i32(TCG_AREG0,
110                                     offsetof(CPUSH4State, pc), "PC");
111     cpu_sr = tcg_global_mem_new_i32(TCG_AREG0,
112                                     offsetof(CPUSH4State, sr), "SR");
113     cpu_ssr = tcg_global_mem_new_i32(TCG_AREG0,
114                                      offsetof(CPUSH4State, ssr), "SSR");
115     cpu_spc = tcg_global_mem_new_i32(TCG_AREG0,
116                                      offsetof(CPUSH4State, spc), "SPC");
117     cpu_gbr = tcg_global_mem_new_i32(TCG_AREG0,
118                                      offsetof(CPUSH4State, gbr), "GBR");
119     cpu_vbr = tcg_global_mem_new_i32(TCG_AREG0,
120                                      offsetof(CPUSH4State, vbr), "VBR");
121     cpu_sgr = tcg_global_mem_new_i32(TCG_AREG0,
122                                      offsetof(CPUSH4State, sgr), "SGR");
123     cpu_dbr = tcg_global_mem_new_i32(TCG_AREG0,
124                                      offsetof(CPUSH4State, dbr), "DBR");
125     cpu_mach = tcg_global_mem_new_i32(TCG_AREG0,
126                                       offsetof(CPUSH4State, mach), "MACH");
127     cpu_macl = tcg_global_mem_new_i32(TCG_AREG0,
128                                       offsetof(CPUSH4State, macl), "MACL");
129     cpu_pr = tcg_global_mem_new_i32(TCG_AREG0,
130                                     offsetof(CPUSH4State, pr), "PR");
131     cpu_fpscr = tcg_global_mem_new_i32(TCG_AREG0,
132                                        offsetof(CPUSH4State, fpscr), "FPSCR");
133     cpu_fpul = tcg_global_mem_new_i32(TCG_AREG0,
134                                       offsetof(CPUSH4State, fpul), "FPUL");
135
136     cpu_flags = tcg_global_mem_new_i32(TCG_AREG0,
137                                        offsetof(CPUSH4State, flags), "_flags_");
138     cpu_delayed_pc = tcg_global_mem_new_i32(TCG_AREG0,
139                                             offsetof(CPUSH4State, delayed_pc),
140                                             "_delayed_pc_");
141     cpu_ldst = tcg_global_mem_new_i32(TCG_AREG0,
142                                       offsetof(CPUSH4State, ldst), "_ldst_");
143
144     for (i = 0; i < 32; i++)
145         cpu_fregs[i] = tcg_global_mem_new_i32(TCG_AREG0,
146                                               offsetof(CPUSH4State, fregs[i]),
147                                               fregnames[i]);
148
149     /* register helpers */
150 #define GEN_HELPER 2
151 #include "helper.h"
152
153     done_init = 1;
154 }
155
156 void cpu_dump_state(CPUSH4State * env, FILE * f,
157                     int (*cpu_fprintf) (FILE * f, const char *fmt, ...),
158                     int flags)
159 {
160     int i;
161     cpu_fprintf(f, "pc=0x%08x sr=0x%08x pr=0x%08x fpscr=0x%08x\n",
162                 env->pc, env->sr, env->pr, env->fpscr);
163     cpu_fprintf(f, "spc=0x%08x ssr=0x%08x gbr=0x%08x vbr=0x%08x\n",
164                 env->spc, env->ssr, env->gbr, env->vbr);
165     cpu_fprintf(f, "sgr=0x%08x dbr=0x%08x delayed_pc=0x%08x fpul=0x%08x\n",
166                 env->sgr, env->dbr, env->delayed_pc, env->fpul);
167     for (i = 0; i < 24; i += 4) {
168         cpu_fprintf(f, "r%d=0x%08x r%d=0x%08x r%d=0x%08x r%d=0x%08x\n",
169                     i, env->gregs[i], i + 1, env->gregs[i + 1],
170                     i + 2, env->gregs[i + 2], i + 3, env->gregs[i + 3]);
171     }
172     if (env->flags & DELAY_SLOT) {
173         cpu_fprintf(f, "in delay slot (delayed_pc=0x%08x)\n",
174                     env->delayed_pc);
175     } else if (env->flags & DELAY_SLOT_CONDITIONAL) {
176         cpu_fprintf(f, "in conditional delay slot (delayed_pc=0x%08x)\n",
177                     env->delayed_pc);
178     }
179 }
180
181 void cpu_state_reset(CPUSH4State *env)
182 {
183     if (qemu_loglevel_mask(CPU_LOG_RESET)) {
184         qemu_log("CPU Reset (CPU %d)\n", env->cpu_index);
185         log_cpu_state(env, 0);
186     }
187
188     memset(env, 0, offsetof(CPUSH4State, breakpoints));
189     tlb_flush(env, 1);
190
191     env->pc = 0xA0000000;
192 #if defined(CONFIG_USER_ONLY)
193     env->fpscr = FPSCR_PR; /* value for userspace according to the kernel */
194     set_float_rounding_mode(float_round_nearest_even, &env->fp_status); /* ?! */
195 #else
196     env->sr = SR_MD | SR_RB | SR_BL | SR_I3 | SR_I2 | SR_I1 | SR_I0;
197     env->fpscr = FPSCR_DN | FPSCR_RM_ZERO; /* CPU reset value according to SH4 manual */
198     set_float_rounding_mode(float_round_to_zero, &env->fp_status);
199     set_flush_to_zero(1, &env->fp_status);
200 #endif
201     set_default_nan_mode(1, &env->fp_status);
202 }
203
204 typedef struct {
205     const char *name;
206     int id;
207     uint32_t pvr;
208     uint32_t prr;
209     uint32_t cvr;
210     uint32_t features;
211 } sh4_def_t;
212
213 static sh4_def_t sh4_defs[] = {
214     {
215         .name = "SH7750R",
216         .id = SH_CPU_SH7750R,
217         .pvr = 0x00050000,
218         .prr = 0x00000100,
219         .cvr = 0x00110000,
220         .features = SH_FEATURE_BCR3_AND_BCR4,
221     }, {
222         .name = "SH7751R",
223         .id = SH_CPU_SH7751R,
224         .pvr = 0x04050005,
225         .prr = 0x00000113,
226         .cvr = 0x00110000,      /* Neutered caches, should be 0x20480000 */
227         .features = SH_FEATURE_BCR3_AND_BCR4,
228     }, {
229         .name = "SH7785",
230         .id = SH_CPU_SH7785,
231         .pvr = 0x10300700,
232         .prr = 0x00000200,
233         .cvr = 0x71440211,
234         .features = SH_FEATURE_SH4A,
235      },
236 };
237
238 static const sh4_def_t *cpu_sh4_find_by_name(const char *name)
239 {
240     int i;
241
242     if (strcasecmp(name, "any") == 0)
243         return &sh4_defs[0];
244
245     for (i = 0; i < ARRAY_SIZE(sh4_defs); i++)
246         if (strcasecmp(name, sh4_defs[i].name) == 0)
247             return &sh4_defs[i];
248
249     return NULL;
250 }
251
252 void sh4_cpu_list(FILE *f, fprintf_function cpu_fprintf)
253 {
254     int i;
255
256     for (i = 0; i < ARRAY_SIZE(sh4_defs); i++)
257         (*cpu_fprintf)(f, "%s\n", sh4_defs[i].name);
258 }
259
260 static void cpu_register(CPUSH4State *env, const sh4_def_t *def)
261 {
262     env->pvr = def->pvr;
263     env->prr = def->prr;
264     env->cvr = def->cvr;
265     env->id = def->id;
266 }
267
268 CPUSH4State *cpu_sh4_init(const char *cpu_model)
269 {
270     CPUSH4State *env;
271     const sh4_def_t *def;
272
273     def = cpu_sh4_find_by_name(cpu_model);
274     if (!def)
275         return NULL;
276     env = g_malloc0(sizeof(CPUSH4State));
277     env->features = def->features;
278     cpu_exec_init(env);
279     env->movcal_backup_tail = &(env->movcal_backup);
280     sh4_translate_init();
281     env->cpu_model_str = cpu_model;
282     cpu_state_reset(env);
283     cpu_register(env, def);
284     qemu_init_vcpu(env);
285     return env;
286 }
287
288 static void gen_goto_tb(DisasContext * ctx, int n, target_ulong dest)
289 {
290     TranslationBlock *tb;
291     tb = ctx->tb;
292
293     if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) &&
294         !ctx->singlestep_enabled) {
295         /* Use a direct jump if in same page and singlestep not enabled */
296         tcg_gen_goto_tb(n);
297         tcg_gen_movi_i32(cpu_pc, dest);
298         tcg_gen_exit_tb((tcg_target_long)tb + n);
299     } else {
300         tcg_gen_movi_i32(cpu_pc, dest);
301         if (ctx->singlestep_enabled)
302             gen_helper_debug();
303         tcg_gen_exit_tb(0);
304     }
305 }
306
307 static void gen_jump(DisasContext * ctx)
308 {
309     if (ctx->delayed_pc == (uint32_t) - 1) {
310         /* Target is not statically known, it comes necessarily from a
311            delayed jump as immediate jump are conditinal jumps */
312         tcg_gen_mov_i32(cpu_pc, cpu_delayed_pc);
313         if (ctx->singlestep_enabled)
314             gen_helper_debug();
315         tcg_gen_exit_tb(0);
316     } else {
317         gen_goto_tb(ctx, 0, ctx->delayed_pc);
318     }
319 }
320
321 static inline void gen_branch_slot(uint32_t delayed_pc, int t)
322 {
323     TCGv sr;
324     int label = gen_new_label();
325     tcg_gen_movi_i32(cpu_delayed_pc, delayed_pc);
326     sr = tcg_temp_new();
327     tcg_gen_andi_i32(sr, cpu_sr, SR_T);
328     tcg_gen_brcondi_i32(t ? TCG_COND_EQ:TCG_COND_NE, sr, 0, label);
329     tcg_gen_ori_i32(cpu_flags, cpu_flags, DELAY_SLOT_TRUE);
330     gen_set_label(label);
331 }
332
333 /* Immediate conditional jump (bt or bf) */
334 static void gen_conditional_jump(DisasContext * ctx,
335                                  target_ulong ift, target_ulong ifnott)
336 {
337     int l1;
338     TCGv sr;
339
340     l1 = gen_new_label();
341     sr = tcg_temp_new();
342     tcg_gen_andi_i32(sr, cpu_sr, SR_T);
343     tcg_gen_brcondi_i32(TCG_COND_NE, sr, 0, l1);
344     gen_goto_tb(ctx, 0, ifnott);
345     gen_set_label(l1);
346     gen_goto_tb(ctx, 1, ift);
347 }
348
349 /* Delayed conditional jump (bt or bf) */
350 static void gen_delayed_conditional_jump(DisasContext * ctx)
351 {
352     int l1;
353     TCGv ds;
354
355     l1 = gen_new_label();
356     ds = tcg_temp_new();
357     tcg_gen_andi_i32(ds, cpu_flags, DELAY_SLOT_TRUE);
358     tcg_gen_brcondi_i32(TCG_COND_NE, ds, 0, l1);
359     gen_goto_tb(ctx, 1, ctx->pc + 2);
360     gen_set_label(l1);
361     tcg_gen_andi_i32(cpu_flags, cpu_flags, ~DELAY_SLOT_TRUE);
362     gen_jump(ctx);
363 }
364
365 static inline void gen_set_t(void)
366 {
367     tcg_gen_ori_i32(cpu_sr, cpu_sr, SR_T);
368 }
369
370 static inline void gen_clr_t(void)
371 {
372     tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
373 }
374
375 static inline void gen_cmp(int cond, TCGv t0, TCGv t1)
376 {
377     TCGv t;
378
379     t = tcg_temp_new();
380     tcg_gen_setcond_i32(cond, t, t1, t0);
381     tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
382     tcg_gen_or_i32(cpu_sr, cpu_sr, t);
383
384     tcg_temp_free(t);
385 }
386
387 static inline void gen_cmp_imm(int cond, TCGv t0, int32_t imm)
388 {
389     TCGv t;
390
391     t = tcg_temp_new();
392     tcg_gen_setcondi_i32(cond, t, t0, imm);
393     tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
394     tcg_gen_or_i32(cpu_sr, cpu_sr, t);
395
396     tcg_temp_free(t);
397 }
398
399 static inline void gen_store_flags(uint32_t flags)
400 {
401     tcg_gen_andi_i32(cpu_flags, cpu_flags, DELAY_SLOT_TRUE);
402     tcg_gen_ori_i32(cpu_flags, cpu_flags, flags);
403 }
404
405 static inline void gen_copy_bit_i32(TCGv t0, int p0, TCGv t1, int p1)
406 {
407     TCGv tmp = tcg_temp_new();
408
409     p0 &= 0x1f;
410     p1 &= 0x1f;
411
412     tcg_gen_andi_i32(tmp, t1, (1 << p1));
413     tcg_gen_andi_i32(t0, t0, ~(1 << p0));
414     if (p0 < p1)
415         tcg_gen_shri_i32(tmp, tmp, p1 - p0);
416     else if (p0 > p1)
417         tcg_gen_shli_i32(tmp, tmp, p0 - p1);
418     tcg_gen_or_i32(t0, t0, tmp);
419
420     tcg_temp_free(tmp);
421 }
422
423 static inline void gen_load_fpr64(TCGv_i64 t, int reg)
424 {
425     tcg_gen_concat_i32_i64(t, cpu_fregs[reg + 1], cpu_fregs[reg]);
426 }
427
428 static inline void gen_store_fpr64 (TCGv_i64 t, int reg)
429 {
430     TCGv_i32 tmp = tcg_temp_new_i32();
431     tcg_gen_trunc_i64_i32(tmp, t);
432     tcg_gen_mov_i32(cpu_fregs[reg + 1], tmp);
433     tcg_gen_shri_i64(t, t, 32);
434     tcg_gen_trunc_i64_i32(tmp, t);
435     tcg_gen_mov_i32(cpu_fregs[reg], tmp);
436     tcg_temp_free_i32(tmp);
437 }
438
439 #define B3_0 (ctx->opcode & 0xf)
440 #define B6_4 ((ctx->opcode >> 4) & 0x7)
441 #define B7_4 ((ctx->opcode >> 4) & 0xf)
442 #define B7_0 (ctx->opcode & 0xff)
443 #define B7_0s ((int32_t) (int8_t) (ctx->opcode & 0xff))
444 #define B11_0s (ctx->opcode & 0x800 ? 0xfffff000 | (ctx->opcode & 0xfff) : \
445   (ctx->opcode & 0xfff))
446 #define B11_8 ((ctx->opcode >> 8) & 0xf)
447 #define B15_12 ((ctx->opcode >> 12) & 0xf)
448
449 #define REG(x) ((x) < 8 && (ctx->sr & (SR_MD | SR_RB)) == (SR_MD | SR_RB) ? \
450                 (cpu_gregs[x + 16]) : (cpu_gregs[x]))
451
452 #define ALTREG(x) ((x) < 8 && (ctx->sr & (SR_MD | SR_RB)) != (SR_MD | SR_RB) \
453                 ? (cpu_gregs[x + 16]) : (cpu_gregs[x]))
454
455 #define FREG(x) (ctx->fpscr & FPSCR_FR ? (x) ^ 0x10 : (x))
456 #define XHACK(x) ((((x) & 1 ) << 4) | ((x) & 0xe))
457 #define XREG(x) (ctx->fpscr & FPSCR_FR ? XHACK(x) ^ 0x10 : XHACK(x))
458 #define DREG(x) FREG(x) /* Assumes lsb of (x) is always 0 */
459
460 #define CHECK_NOT_DELAY_SLOT \
461   if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL))     \
462   {                                                           \
463       gen_helper_raise_slot_illegal_instruction();            \
464       ctx->bstate = BS_EXCP;                                  \
465       return;                                                 \
466   }
467
468 #define CHECK_PRIVILEGED                                        \
469   if (IS_USER(ctx)) {                                           \
470       if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) { \
471          gen_helper_raise_slot_illegal_instruction();           \
472       } else {                                                  \
473          gen_helper_raise_illegal_instruction();                \
474       }                                                         \
475       ctx->bstate = BS_EXCP;                                    \
476       return;                                                   \
477   }
478
479 #define CHECK_FPU_ENABLED                                       \
480   if (ctx->flags & SR_FD) {                                     \
481       if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) { \
482           gen_helper_raise_slot_fpu_disable();                  \
483       } else {                                                  \
484           gen_helper_raise_fpu_disable();                       \
485       }                                                         \
486       ctx->bstate = BS_EXCP;                                    \
487       return;                                                   \
488   }
489
490 static void _decode_opc(DisasContext * ctx)
491 {
492     /* This code tries to make movcal emulation sufficiently
493        accurate for Linux purposes.  This instruction writes
494        memory, and prior to that, always allocates a cache line.
495        It is used in two contexts:
496        - in memcpy, where data is copied in blocks, the first write
497        of to a block uses movca.l for performance.
498        - in arch/sh/mm/cache-sh4.c, movcal.l + ocbi combination is used
499        to flush the cache. Here, the data written by movcal.l is never
500        written to memory, and the data written is just bogus.
501
502        To simulate this, we simulate movcal.l, we store the value to memory,
503        but we also remember the previous content. If we see ocbi, we check
504        if movcal.l for that address was done previously. If so, the write should
505        not have hit the memory, so we restore the previous content.
506        When we see an instruction that is neither movca.l
507        nor ocbi, the previous content is discarded.
508
509        To optimize, we only try to flush stores when we're at the start of
510        TB, or if we already saw movca.l in this TB and did not flush stores
511        yet.  */
512     if (ctx->has_movcal)
513         {
514           int opcode = ctx->opcode & 0xf0ff;
515           if (opcode != 0x0093 /* ocbi */
516               && opcode != 0x00c3 /* movca.l */)
517               {
518                   gen_helper_discard_movcal_backup ();
519                   ctx->has_movcal = 0;
520               }
521         }
522
523 #if 0
524     fprintf(stderr, "Translating opcode 0x%04x\n", ctx->opcode);
525 #endif
526
527     switch (ctx->opcode) {
528     case 0x0019:                /* div0u */
529         tcg_gen_andi_i32(cpu_sr, cpu_sr, ~(SR_M | SR_Q | SR_T));
530         return;
531     case 0x000b:                /* rts */
532         CHECK_NOT_DELAY_SLOT
533         tcg_gen_mov_i32(cpu_delayed_pc, cpu_pr);
534         ctx->flags |= DELAY_SLOT;
535         ctx->delayed_pc = (uint32_t) - 1;
536         return;
537     case 0x0028:                /* clrmac */
538         tcg_gen_movi_i32(cpu_mach, 0);
539         tcg_gen_movi_i32(cpu_macl, 0);
540         return;
541     case 0x0048:                /* clrs */
542         tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_S);
543         return;
544     case 0x0008:                /* clrt */
545         gen_clr_t();
546         return;
547     case 0x0038:                /* ldtlb */
548         CHECK_PRIVILEGED
549         gen_helper_ldtlb();
550         return;
551     case 0x002b:                /* rte */
552         CHECK_PRIVILEGED
553         CHECK_NOT_DELAY_SLOT
554         tcg_gen_mov_i32(cpu_sr, cpu_ssr);
555         tcg_gen_mov_i32(cpu_delayed_pc, cpu_spc);
556         ctx->flags |= DELAY_SLOT;
557         ctx->delayed_pc = (uint32_t) - 1;
558         return;
559     case 0x0058:                /* sets */
560         tcg_gen_ori_i32(cpu_sr, cpu_sr, SR_S);
561         return;
562     case 0x0018:                /* sett */
563         gen_set_t();
564         return;
565     case 0xfbfd:                /* frchg */
566         tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_FR);
567         ctx->bstate = BS_STOP;
568         return;
569     case 0xf3fd:                /* fschg */
570         tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_SZ);
571         ctx->bstate = BS_STOP;
572         return;
573     case 0x0009:                /* nop */
574         return;
575     case 0x001b:                /* sleep */
576         CHECK_PRIVILEGED
577         gen_helper_sleep(tcg_const_i32(ctx->pc + 2));
578         return;
579     }
580
581     switch (ctx->opcode & 0xf000) {
582     case 0x1000:                /* mov.l Rm,@(disp,Rn) */
583         {
584             TCGv addr = tcg_temp_new();
585             tcg_gen_addi_i32(addr, REG(B11_8), B3_0 * 4);
586             tcg_gen_qemu_st32(REG(B7_4), addr, ctx->memidx);
587             tcg_temp_free(addr);
588         }
589         return;
590     case 0x5000:                /* mov.l @(disp,Rm),Rn */
591         {
592             TCGv addr = tcg_temp_new();
593             tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 4);
594             tcg_gen_qemu_ld32s(REG(B11_8), addr, ctx->memidx);
595             tcg_temp_free(addr);
596         }
597         return;
598     case 0xe000:                /* mov #imm,Rn */
599         tcg_gen_movi_i32(REG(B11_8), B7_0s);
600         return;
601     case 0x9000:                /* mov.w @(disp,PC),Rn */
602         {
603             TCGv addr = tcg_const_i32(ctx->pc + 4 + B7_0 * 2);
604             tcg_gen_qemu_ld16s(REG(B11_8), addr, ctx->memidx);
605             tcg_temp_free(addr);
606         }
607         return;
608     case 0xd000:                /* mov.l @(disp,PC),Rn */
609         {
610             TCGv addr = tcg_const_i32((ctx->pc + 4 + B7_0 * 4) & ~3);
611             tcg_gen_qemu_ld32s(REG(B11_8), addr, ctx->memidx);
612             tcg_temp_free(addr);
613         }
614         return;
615     case 0x7000:                /* add #imm,Rn */
616         tcg_gen_addi_i32(REG(B11_8), REG(B11_8), B7_0s);
617         return;
618     case 0xa000:                /* bra disp */
619         CHECK_NOT_DELAY_SLOT
620         ctx->delayed_pc = ctx->pc + 4 + B11_0s * 2;
621         tcg_gen_movi_i32(cpu_delayed_pc, ctx->delayed_pc);
622         ctx->flags |= DELAY_SLOT;
623         return;
624     case 0xb000:                /* bsr disp */
625         CHECK_NOT_DELAY_SLOT
626         tcg_gen_movi_i32(cpu_pr, ctx->pc + 4);
627         ctx->delayed_pc = ctx->pc + 4 + B11_0s * 2;
628         tcg_gen_movi_i32(cpu_delayed_pc, ctx->delayed_pc);
629         ctx->flags |= DELAY_SLOT;
630         return;
631     }
632
633     switch (ctx->opcode & 0xf00f) {
634     case 0x6003:                /* mov Rm,Rn */
635         tcg_gen_mov_i32(REG(B11_8), REG(B7_4));
636         return;
637     case 0x2000:                /* mov.b Rm,@Rn */
638         tcg_gen_qemu_st8(REG(B7_4), REG(B11_8), ctx->memidx);
639         return;
640     case 0x2001:                /* mov.w Rm,@Rn */
641         tcg_gen_qemu_st16(REG(B7_4), REG(B11_8), ctx->memidx);
642         return;
643     case 0x2002:                /* mov.l Rm,@Rn */
644         tcg_gen_qemu_st32(REG(B7_4), REG(B11_8), ctx->memidx);
645         return;
646     case 0x6000:                /* mov.b @Rm,Rn */
647         tcg_gen_qemu_ld8s(REG(B11_8), REG(B7_4), ctx->memidx);
648         return;
649     case 0x6001:                /* mov.w @Rm,Rn */
650         tcg_gen_qemu_ld16s(REG(B11_8), REG(B7_4), ctx->memidx);
651         return;
652     case 0x6002:                /* mov.l @Rm,Rn */
653         tcg_gen_qemu_ld32s(REG(B11_8), REG(B7_4), ctx->memidx);
654         return;
655     case 0x2004:                /* mov.b Rm,@-Rn */
656         {
657             TCGv addr = tcg_temp_new();
658             tcg_gen_subi_i32(addr, REG(B11_8), 1);
659             tcg_gen_qemu_st8(REG(B7_4), addr, ctx->memidx);     /* might cause re-execution */
660             tcg_gen_mov_i32(REG(B11_8), addr);                  /* modify register status */
661             tcg_temp_free(addr);
662         }
663         return;
664     case 0x2005:                /* mov.w Rm,@-Rn */
665         {
666             TCGv addr = tcg_temp_new();
667             tcg_gen_subi_i32(addr, REG(B11_8), 2);
668             tcg_gen_qemu_st16(REG(B7_4), addr, ctx->memidx);
669             tcg_gen_mov_i32(REG(B11_8), addr);
670             tcg_temp_free(addr);
671         }
672         return;
673     case 0x2006:                /* mov.l Rm,@-Rn */
674         {
675             TCGv addr = tcg_temp_new();
676             tcg_gen_subi_i32(addr, REG(B11_8), 4);
677             tcg_gen_qemu_st32(REG(B7_4), addr, ctx->memidx);
678             tcg_gen_mov_i32(REG(B11_8), addr);
679         }
680         return;
681     case 0x6004:                /* mov.b @Rm+,Rn */
682         tcg_gen_qemu_ld8s(REG(B11_8), REG(B7_4), ctx->memidx);
683         if ( B11_8 != B7_4 )
684                 tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 1);
685         return;
686     case 0x6005:                /* mov.w @Rm+,Rn */
687         tcg_gen_qemu_ld16s(REG(B11_8), REG(B7_4), ctx->memidx);
688         if ( B11_8 != B7_4 )
689                 tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 2);
690         return;
691     case 0x6006:                /* mov.l @Rm+,Rn */
692         tcg_gen_qemu_ld32s(REG(B11_8), REG(B7_4), ctx->memidx);
693         if ( B11_8 != B7_4 )
694                 tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4);
695         return;
696     case 0x0004:                /* mov.b Rm,@(R0,Rn) */
697         {
698             TCGv addr = tcg_temp_new();
699             tcg_gen_add_i32(addr, REG(B11_8), REG(0));
700             tcg_gen_qemu_st8(REG(B7_4), addr, ctx->memidx);
701             tcg_temp_free(addr);
702         }
703         return;
704     case 0x0005:                /* mov.w Rm,@(R0,Rn) */
705         {
706             TCGv addr = tcg_temp_new();
707             tcg_gen_add_i32(addr, REG(B11_8), REG(0));
708             tcg_gen_qemu_st16(REG(B7_4), addr, ctx->memidx);
709             tcg_temp_free(addr);
710         }
711         return;
712     case 0x0006:                /* mov.l Rm,@(R0,Rn) */
713         {
714             TCGv addr = tcg_temp_new();
715             tcg_gen_add_i32(addr, REG(B11_8), REG(0));
716             tcg_gen_qemu_st32(REG(B7_4), addr, ctx->memidx);
717             tcg_temp_free(addr);
718         }
719         return;
720     case 0x000c:                /* mov.b @(R0,Rm),Rn */
721         {
722             TCGv addr = tcg_temp_new();
723             tcg_gen_add_i32(addr, REG(B7_4), REG(0));
724             tcg_gen_qemu_ld8s(REG(B11_8), addr, ctx->memidx);
725             tcg_temp_free(addr);
726         }
727         return;
728     case 0x000d:                /* mov.w @(R0,Rm),Rn */
729         {
730             TCGv addr = tcg_temp_new();
731             tcg_gen_add_i32(addr, REG(B7_4), REG(0));
732             tcg_gen_qemu_ld16s(REG(B11_8), addr, ctx->memidx);
733             tcg_temp_free(addr);
734         }
735         return;
736     case 0x000e:                /* mov.l @(R0,Rm),Rn */
737         {
738             TCGv addr = tcg_temp_new();
739             tcg_gen_add_i32(addr, REG(B7_4), REG(0));
740             tcg_gen_qemu_ld32s(REG(B11_8), addr, ctx->memidx);
741             tcg_temp_free(addr);
742         }
743         return;
744     case 0x6008:                /* swap.b Rm,Rn */
745         {
746             TCGv high, low;
747             high = tcg_temp_new();
748             tcg_gen_andi_i32(high, REG(B7_4), 0xffff0000);
749             low = tcg_temp_new();
750             tcg_gen_ext16u_i32(low, REG(B7_4));
751             tcg_gen_bswap16_i32(low, low);
752             tcg_gen_or_i32(REG(B11_8), high, low);
753             tcg_temp_free(low);
754             tcg_temp_free(high);
755         }
756         return;
757     case 0x6009:                /* swap.w Rm,Rn */
758         {
759             TCGv high, low;
760             high = tcg_temp_new();
761             tcg_gen_shli_i32(high, REG(B7_4), 16);
762             low = tcg_temp_new();
763             tcg_gen_shri_i32(low, REG(B7_4), 16);
764             tcg_gen_ext16u_i32(low, low);
765             tcg_gen_or_i32(REG(B11_8), high, low);
766             tcg_temp_free(low);
767             tcg_temp_free(high);
768         }
769         return;
770     case 0x200d:                /* xtrct Rm,Rn */
771         {
772             TCGv high, low;
773             high = tcg_temp_new();
774             tcg_gen_shli_i32(high, REG(B7_4), 16);
775             low = tcg_temp_new();
776             tcg_gen_shri_i32(low, REG(B11_8), 16);
777             tcg_gen_ext16u_i32(low, low);
778             tcg_gen_or_i32(REG(B11_8), high, low);
779             tcg_temp_free(low);
780             tcg_temp_free(high);
781         }
782         return;
783     case 0x300c:                /* add Rm,Rn */
784         tcg_gen_add_i32(REG(B11_8), REG(B11_8), REG(B7_4));
785         return;
786     case 0x300e:                /* addc Rm,Rn */
787         gen_helper_addc(REG(B11_8), REG(B7_4), REG(B11_8));
788         return;
789     case 0x300f:                /* addv Rm,Rn */
790         gen_helper_addv(REG(B11_8), REG(B7_4), REG(B11_8));
791         return;
792     case 0x2009:                /* and Rm,Rn */
793         tcg_gen_and_i32(REG(B11_8), REG(B11_8), REG(B7_4));
794         return;
795     case 0x3000:                /* cmp/eq Rm,Rn */
796         gen_cmp(TCG_COND_EQ, REG(B7_4), REG(B11_8));
797         return;
798     case 0x3003:                /* cmp/ge Rm,Rn */
799         gen_cmp(TCG_COND_GE, REG(B7_4), REG(B11_8));
800         return;
801     case 0x3007:                /* cmp/gt Rm,Rn */
802         gen_cmp(TCG_COND_GT, REG(B7_4), REG(B11_8));
803         return;
804     case 0x3006:                /* cmp/hi Rm,Rn */
805         gen_cmp(TCG_COND_GTU, REG(B7_4), REG(B11_8));
806         return;
807     case 0x3002:                /* cmp/hs Rm,Rn */
808         gen_cmp(TCG_COND_GEU, REG(B7_4), REG(B11_8));
809         return;
810     case 0x200c:                /* cmp/str Rm,Rn */
811         {
812             TCGv cmp1 = tcg_temp_new();
813             TCGv cmp2 = tcg_temp_new();
814             tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
815             tcg_gen_xor_i32(cmp1, REG(B7_4), REG(B11_8));
816             tcg_gen_andi_i32(cmp2, cmp1, 0xff000000);
817             tcg_gen_setcondi_i32(TCG_COND_EQ, cmp2, cmp2, 0);
818             tcg_gen_or_i32(cpu_sr, cpu_sr, cmp2);
819             tcg_gen_andi_i32(cmp2, cmp1, 0x00ff0000);
820             tcg_gen_setcondi_i32(TCG_COND_EQ, cmp2, cmp2, 0);
821             tcg_gen_or_i32(cpu_sr, cpu_sr, cmp2);
822             tcg_gen_andi_i32(cmp2, cmp1, 0x0000ff00);
823             tcg_gen_setcondi_i32(TCG_COND_EQ, cmp2, cmp2, 0);
824             tcg_gen_or_i32(cpu_sr, cpu_sr, cmp2);
825             tcg_gen_andi_i32(cmp2, cmp1, 0x000000ff);
826             tcg_gen_setcondi_i32(TCG_COND_EQ, cmp2, cmp2, 0);
827             tcg_gen_or_i32(cpu_sr, cpu_sr, cmp2);
828             tcg_temp_free(cmp2);
829             tcg_temp_free(cmp1);
830         }
831         return;
832     case 0x2007:                /* div0s Rm,Rn */
833         {
834             gen_copy_bit_i32(cpu_sr, 8, REG(B11_8), 31);        /* SR_Q */
835             gen_copy_bit_i32(cpu_sr, 9, REG(B7_4), 31);         /* SR_M */
836             TCGv val = tcg_temp_new();
837             tcg_gen_xor_i32(val, REG(B7_4), REG(B11_8));
838             gen_copy_bit_i32(cpu_sr, 0, val, 31);               /* SR_T */
839             tcg_temp_free(val);
840         }
841         return;
842     case 0x3004:                /* div1 Rm,Rn */
843         gen_helper_div1(REG(B11_8), REG(B7_4), REG(B11_8));
844         return;
845     case 0x300d:                /* dmuls.l Rm,Rn */
846         {
847             TCGv_i64 tmp1 = tcg_temp_new_i64();
848             TCGv_i64 tmp2 = tcg_temp_new_i64();
849
850             tcg_gen_ext_i32_i64(tmp1, REG(B7_4));
851             tcg_gen_ext_i32_i64(tmp2, REG(B11_8));
852             tcg_gen_mul_i64(tmp1, tmp1, tmp2);
853             tcg_gen_trunc_i64_i32(cpu_macl, tmp1);
854             tcg_gen_shri_i64(tmp1, tmp1, 32);
855             tcg_gen_trunc_i64_i32(cpu_mach, tmp1);
856
857             tcg_temp_free_i64(tmp2);
858             tcg_temp_free_i64(tmp1);
859         }
860         return;
861     case 0x3005:                /* dmulu.l Rm,Rn */
862         {
863             TCGv_i64 tmp1 = tcg_temp_new_i64();
864             TCGv_i64 tmp2 = tcg_temp_new_i64();
865
866             tcg_gen_extu_i32_i64(tmp1, REG(B7_4));
867             tcg_gen_extu_i32_i64(tmp2, REG(B11_8));
868             tcg_gen_mul_i64(tmp1, tmp1, tmp2);
869             tcg_gen_trunc_i64_i32(cpu_macl, tmp1);
870             tcg_gen_shri_i64(tmp1, tmp1, 32);
871             tcg_gen_trunc_i64_i32(cpu_mach, tmp1);
872
873             tcg_temp_free_i64(tmp2);
874             tcg_temp_free_i64(tmp1);
875         }
876         return;
877     case 0x600e:                /* exts.b Rm,Rn */
878         tcg_gen_ext8s_i32(REG(B11_8), REG(B7_4));
879         return;
880     case 0x600f:                /* exts.w Rm,Rn */
881         tcg_gen_ext16s_i32(REG(B11_8), REG(B7_4));
882         return;
883     case 0x600c:                /* extu.b Rm,Rn */
884         tcg_gen_ext8u_i32(REG(B11_8), REG(B7_4));
885         return;
886     case 0x600d:                /* extu.w Rm,Rn */
887         tcg_gen_ext16u_i32(REG(B11_8), REG(B7_4));
888         return;
889     case 0x000f:                /* mac.l @Rm+,@Rn+ */
890         {
891             TCGv arg0, arg1;
892             arg0 = tcg_temp_new();
893             tcg_gen_qemu_ld32s(arg0, REG(B7_4), ctx->memidx);
894             arg1 = tcg_temp_new();
895             tcg_gen_qemu_ld32s(arg1, REG(B11_8), ctx->memidx);
896             gen_helper_macl(arg0, arg1);
897             tcg_temp_free(arg1);
898             tcg_temp_free(arg0);
899             tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4);
900             tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
901         }
902         return;
903     case 0x400f:                /* mac.w @Rm+,@Rn+ */
904         {
905             TCGv arg0, arg1;
906             arg0 = tcg_temp_new();
907             tcg_gen_qemu_ld32s(arg0, REG(B7_4), ctx->memidx);
908             arg1 = tcg_temp_new();
909             tcg_gen_qemu_ld32s(arg1, REG(B11_8), ctx->memidx);
910             gen_helper_macw(arg0, arg1);
911             tcg_temp_free(arg1);
912             tcg_temp_free(arg0);
913             tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 2);
914             tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 2);
915         }
916         return;
917     case 0x0007:                /* mul.l Rm,Rn */
918         tcg_gen_mul_i32(cpu_macl, REG(B7_4), REG(B11_8));
919         return;
920     case 0x200f:                /* muls.w Rm,Rn */
921         {
922             TCGv arg0, arg1;
923             arg0 = tcg_temp_new();
924             tcg_gen_ext16s_i32(arg0, REG(B7_4));
925             arg1 = tcg_temp_new();
926             tcg_gen_ext16s_i32(arg1, REG(B11_8));
927             tcg_gen_mul_i32(cpu_macl, arg0, arg1);
928             tcg_temp_free(arg1);
929             tcg_temp_free(arg0);
930         }
931         return;
932     case 0x200e:                /* mulu.w Rm,Rn */
933         {
934             TCGv arg0, arg1;
935             arg0 = tcg_temp_new();
936             tcg_gen_ext16u_i32(arg0, REG(B7_4));
937             arg1 = tcg_temp_new();
938             tcg_gen_ext16u_i32(arg1, REG(B11_8));
939             tcg_gen_mul_i32(cpu_macl, arg0, arg1);
940             tcg_temp_free(arg1);
941             tcg_temp_free(arg0);
942         }
943         return;
944     case 0x600b:                /* neg Rm,Rn */
945         tcg_gen_neg_i32(REG(B11_8), REG(B7_4));
946         return;
947     case 0x600a:                /* negc Rm,Rn */
948         {
949             TCGv t0, t1;
950             t0 = tcg_temp_new();
951             tcg_gen_neg_i32(t0, REG(B7_4));
952             t1 = tcg_temp_new();
953             tcg_gen_andi_i32(t1, cpu_sr, SR_T);
954             tcg_gen_sub_i32(REG(B11_8), t0, t1);
955             tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
956             tcg_gen_setcondi_i32(TCG_COND_GTU, t1, t0, 0);
957             tcg_gen_or_i32(cpu_sr, cpu_sr, t1);
958             tcg_gen_setcond_i32(TCG_COND_GTU, t1, REG(B11_8), t0);
959             tcg_gen_or_i32(cpu_sr, cpu_sr, t1);
960             tcg_temp_free(t0);
961             tcg_temp_free(t1);
962         }
963         return;
964     case 0x6007:                /* not Rm,Rn */
965         tcg_gen_not_i32(REG(B11_8), REG(B7_4));
966         return;
967     case 0x200b:                /* or Rm,Rn */
968         tcg_gen_or_i32(REG(B11_8), REG(B11_8), REG(B7_4));
969         return;
970     case 0x400c:                /* shad Rm,Rn */
971         {
972             int label1 = gen_new_label();
973             int label2 = gen_new_label();
974             int label3 = gen_new_label();
975             int label4 = gen_new_label();
976             TCGv shift;
977             tcg_gen_brcondi_i32(TCG_COND_LT, REG(B7_4), 0, label1);
978             /* Rm positive, shift to the left */
979             shift = tcg_temp_new();
980             tcg_gen_andi_i32(shift, REG(B7_4), 0x1f);
981             tcg_gen_shl_i32(REG(B11_8), REG(B11_8), shift);
982             tcg_temp_free(shift);
983             tcg_gen_br(label4);
984             /* Rm negative, shift to the right */
985             gen_set_label(label1);
986             shift = tcg_temp_new();
987             tcg_gen_andi_i32(shift, REG(B7_4), 0x1f);
988             tcg_gen_brcondi_i32(TCG_COND_EQ, shift, 0, label2);
989             tcg_gen_not_i32(shift, REG(B7_4));
990             tcg_gen_andi_i32(shift, shift, 0x1f);
991             tcg_gen_addi_i32(shift, shift, 1);
992             tcg_gen_sar_i32(REG(B11_8), REG(B11_8), shift);
993             tcg_temp_free(shift);
994             tcg_gen_br(label4);
995             /* Rm = -32 */
996             gen_set_label(label2);
997             tcg_gen_brcondi_i32(TCG_COND_LT, REG(B11_8), 0, label3);
998             tcg_gen_movi_i32(REG(B11_8), 0);
999             tcg_gen_br(label4);
1000             gen_set_label(label3);
1001             tcg_gen_movi_i32(REG(B11_8), 0xffffffff);
1002             gen_set_label(label4);
1003         }
1004         return;
1005     case 0x400d:                /* shld Rm,Rn */
1006         {
1007             int label1 = gen_new_label();
1008             int label2 = gen_new_label();
1009             int label3 = gen_new_label();
1010             TCGv shift;
1011             tcg_gen_brcondi_i32(TCG_COND_LT, REG(B7_4), 0, label1);
1012             /* Rm positive, shift to the left */
1013             shift = tcg_temp_new();
1014             tcg_gen_andi_i32(shift, REG(B7_4), 0x1f);
1015             tcg_gen_shl_i32(REG(B11_8), REG(B11_8), shift);
1016             tcg_temp_free(shift);
1017             tcg_gen_br(label3);
1018             /* Rm negative, shift to the right */
1019             gen_set_label(label1);
1020             shift = tcg_temp_new();
1021             tcg_gen_andi_i32(shift, REG(B7_4), 0x1f);
1022             tcg_gen_brcondi_i32(TCG_COND_EQ, shift, 0, label2);
1023             tcg_gen_not_i32(shift, REG(B7_4));
1024             tcg_gen_andi_i32(shift, shift, 0x1f);
1025             tcg_gen_addi_i32(shift, shift, 1);
1026             tcg_gen_shr_i32(REG(B11_8), REG(B11_8), shift);
1027             tcg_temp_free(shift);
1028             tcg_gen_br(label3);
1029             /* Rm = -32 */
1030             gen_set_label(label2);
1031             tcg_gen_movi_i32(REG(B11_8), 0);
1032             gen_set_label(label3);
1033         }
1034         return;
1035     case 0x3008:                /* sub Rm,Rn */
1036         tcg_gen_sub_i32(REG(B11_8), REG(B11_8), REG(B7_4));
1037         return;
1038     case 0x300a:                /* subc Rm,Rn */
1039         gen_helper_subc(REG(B11_8), REG(B7_4), REG(B11_8));
1040         return;
1041     case 0x300b:                /* subv Rm,Rn */
1042         gen_helper_subv(REG(B11_8), REG(B7_4), REG(B11_8));
1043         return;
1044     case 0x2008:                /* tst Rm,Rn */
1045         {
1046             TCGv val = tcg_temp_new();
1047             tcg_gen_and_i32(val, REG(B7_4), REG(B11_8));
1048             gen_cmp_imm(TCG_COND_EQ, val, 0);
1049             tcg_temp_free(val);
1050         }
1051         return;
1052     case 0x200a:                /* xor Rm,Rn */
1053         tcg_gen_xor_i32(REG(B11_8), REG(B11_8), REG(B7_4));
1054         return;
1055     case 0xf00c: /* fmov {F,D,X}Rm,{F,D,X}Rn - FPSCR: Nothing */
1056         CHECK_FPU_ENABLED
1057         if (ctx->fpscr & FPSCR_SZ) {
1058             TCGv_i64 fp = tcg_temp_new_i64();
1059             gen_load_fpr64(fp, XREG(B7_4));
1060             gen_store_fpr64(fp, XREG(B11_8));
1061             tcg_temp_free_i64(fp);
1062         } else {
1063             tcg_gen_mov_i32(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
1064         }
1065         return;
1066     case 0xf00a: /* fmov {F,D,X}Rm,@Rn - FPSCR: Nothing */
1067         CHECK_FPU_ENABLED
1068         if (ctx->fpscr & FPSCR_SZ) {
1069             TCGv addr_hi = tcg_temp_new();
1070             int fr = XREG(B7_4);
1071             tcg_gen_addi_i32(addr_hi, REG(B11_8), 4);
1072             tcg_gen_qemu_st32(cpu_fregs[fr  ], REG(B11_8), ctx->memidx);
1073             tcg_gen_qemu_st32(cpu_fregs[fr+1], addr_hi,    ctx->memidx);
1074             tcg_temp_free(addr_hi);
1075         } else {
1076             tcg_gen_qemu_st32(cpu_fregs[FREG(B7_4)], REG(B11_8), ctx->memidx);
1077         }
1078         return;
1079     case 0xf008: /* fmov @Rm,{F,D,X}Rn - FPSCR: Nothing */
1080         CHECK_FPU_ENABLED
1081         if (ctx->fpscr & FPSCR_SZ) {
1082             TCGv addr_hi = tcg_temp_new();
1083             int fr = XREG(B11_8);
1084             tcg_gen_addi_i32(addr_hi, REG(B7_4), 4);
1085             tcg_gen_qemu_ld32u(cpu_fregs[fr  ], REG(B7_4), ctx->memidx);
1086             tcg_gen_qemu_ld32u(cpu_fregs[fr+1], addr_hi,   ctx->memidx);
1087             tcg_temp_free(addr_hi);
1088         } else {
1089             tcg_gen_qemu_ld32u(cpu_fregs[FREG(B11_8)], REG(B7_4), ctx->memidx);
1090         }
1091         return;
1092     case 0xf009: /* fmov @Rm+,{F,D,X}Rn - FPSCR: Nothing */
1093         CHECK_FPU_ENABLED
1094         if (ctx->fpscr & FPSCR_SZ) {
1095             TCGv addr_hi = tcg_temp_new();
1096             int fr = XREG(B11_8);
1097             tcg_gen_addi_i32(addr_hi, REG(B7_4), 4);
1098             tcg_gen_qemu_ld32u(cpu_fregs[fr  ], REG(B7_4), ctx->memidx);
1099             tcg_gen_qemu_ld32u(cpu_fregs[fr+1], addr_hi,   ctx->memidx);
1100             tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 8);
1101             tcg_temp_free(addr_hi);
1102         } else {
1103             tcg_gen_qemu_ld32u(cpu_fregs[FREG(B11_8)], REG(B7_4), ctx->memidx);
1104             tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4);
1105         }
1106         return;
1107     case 0xf00b: /* fmov {F,D,X}Rm,@-Rn - FPSCR: Nothing */
1108         CHECK_FPU_ENABLED
1109         if (ctx->fpscr & FPSCR_SZ) {
1110             TCGv addr = tcg_temp_new_i32();
1111             int fr = XREG(B7_4);
1112             tcg_gen_subi_i32(addr, REG(B11_8), 4);
1113             tcg_gen_qemu_st32(cpu_fregs[fr+1], addr, ctx->memidx);
1114             tcg_gen_subi_i32(addr, addr, 4);
1115             tcg_gen_qemu_st32(cpu_fregs[fr  ], addr, ctx->memidx);
1116             tcg_gen_mov_i32(REG(B11_8), addr);
1117             tcg_temp_free(addr);
1118         } else {
1119             TCGv addr;
1120             addr = tcg_temp_new_i32();
1121             tcg_gen_subi_i32(addr, REG(B11_8), 4);
1122             tcg_gen_qemu_st32(cpu_fregs[FREG(B7_4)], addr, ctx->memidx);
1123             tcg_gen_mov_i32(REG(B11_8), addr);
1124             tcg_temp_free(addr);
1125         }
1126         return;
1127     case 0xf006: /* fmov @(R0,Rm),{F,D,X}Rm - FPSCR: Nothing */
1128         CHECK_FPU_ENABLED
1129         {
1130             TCGv addr = tcg_temp_new_i32();
1131             tcg_gen_add_i32(addr, REG(B7_4), REG(0));
1132             if (ctx->fpscr & FPSCR_SZ) {
1133                 int fr = XREG(B11_8);
1134                 tcg_gen_qemu_ld32u(cpu_fregs[fr  ], addr, ctx->memidx);
1135                 tcg_gen_addi_i32(addr, addr, 4);
1136                 tcg_gen_qemu_ld32u(cpu_fregs[fr+1], addr, ctx->memidx);
1137             } else {
1138                 tcg_gen_qemu_ld32u(cpu_fregs[FREG(B11_8)], addr, ctx->memidx);
1139             }
1140             tcg_temp_free(addr);
1141         }
1142         return;
1143     case 0xf007: /* fmov {F,D,X}Rn,@(R0,Rn) - FPSCR: Nothing */
1144         CHECK_FPU_ENABLED
1145         {
1146             TCGv addr = tcg_temp_new();
1147             tcg_gen_add_i32(addr, REG(B11_8), REG(0));
1148             if (ctx->fpscr & FPSCR_SZ) {
1149                 int fr = XREG(B7_4);
1150                 tcg_gen_qemu_ld32u(cpu_fregs[fr  ], addr, ctx->memidx);
1151                 tcg_gen_addi_i32(addr, addr, 4);
1152                 tcg_gen_qemu_ld32u(cpu_fregs[fr+1], addr, ctx->memidx);
1153             } else {
1154                 tcg_gen_qemu_st32(cpu_fregs[FREG(B7_4)], addr, ctx->memidx);
1155             }
1156             tcg_temp_free(addr);
1157         }
1158         return;
1159     case 0xf000: /* fadd Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1160     case 0xf001: /* fsub Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1161     case 0xf002: /* fmul Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1162     case 0xf003: /* fdiv Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1163     case 0xf004: /* fcmp/eq Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1164     case 0xf005: /* fcmp/gt Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1165         {
1166             CHECK_FPU_ENABLED
1167             if (ctx->fpscr & FPSCR_PR) {
1168                 TCGv_i64 fp0, fp1;
1169
1170                 if (ctx->opcode & 0x0110)
1171                     break; /* illegal instruction */
1172                 fp0 = tcg_temp_new_i64();
1173                 fp1 = tcg_temp_new_i64();
1174                 gen_load_fpr64(fp0, DREG(B11_8));
1175                 gen_load_fpr64(fp1, DREG(B7_4));
1176                 switch (ctx->opcode & 0xf00f) {
1177                 case 0xf000:            /* fadd Rm,Rn */
1178                     gen_helper_fadd_DT(fp0, fp0, fp1);
1179                     break;
1180                 case 0xf001:            /* fsub Rm,Rn */
1181                     gen_helper_fsub_DT(fp0, fp0, fp1);
1182                     break;
1183                 case 0xf002:            /* fmul Rm,Rn */
1184                     gen_helper_fmul_DT(fp0, fp0, fp1);
1185                     break;
1186                 case 0xf003:            /* fdiv Rm,Rn */
1187                     gen_helper_fdiv_DT(fp0, fp0, fp1);
1188                     break;
1189                 case 0xf004:            /* fcmp/eq Rm,Rn */
1190                     gen_helper_fcmp_eq_DT(fp0, fp1);
1191                     return;
1192                 case 0xf005:            /* fcmp/gt Rm,Rn */
1193                     gen_helper_fcmp_gt_DT(fp0, fp1);
1194                     return;
1195                 }
1196                 gen_store_fpr64(fp0, DREG(B11_8));
1197                 tcg_temp_free_i64(fp0);
1198                 tcg_temp_free_i64(fp1);
1199             } else {
1200                 switch (ctx->opcode & 0xf00f) {
1201                 case 0xf000:            /* fadd Rm,Rn */
1202                     gen_helper_fadd_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
1203                     break;
1204                 case 0xf001:            /* fsub Rm,Rn */
1205                     gen_helper_fsub_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
1206                     break;
1207                 case 0xf002:            /* fmul Rm,Rn */
1208                     gen_helper_fmul_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
1209                     break;
1210                 case 0xf003:            /* fdiv Rm,Rn */
1211                     gen_helper_fdiv_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
1212                     break;
1213                 case 0xf004:            /* fcmp/eq Rm,Rn */
1214                     gen_helper_fcmp_eq_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
1215                     return;
1216                 case 0xf005:            /* fcmp/gt Rm,Rn */
1217                     gen_helper_fcmp_gt_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
1218                     return;
1219                 }
1220             }
1221         }
1222         return;
1223     case 0xf00e: /* fmac FR0,RM,Rn */
1224         {
1225             CHECK_FPU_ENABLED
1226             if (ctx->fpscr & FPSCR_PR) {
1227                 break; /* illegal instruction */
1228             } else {
1229                 gen_helper_fmac_FT(cpu_fregs[FREG(B11_8)],
1230                                    cpu_fregs[FREG(0)], cpu_fregs[FREG(B7_4)], cpu_fregs[FREG(B11_8)]);
1231                 return;
1232             }
1233         }
1234     }
1235
1236     switch (ctx->opcode & 0xff00) {
1237     case 0xc900:                /* and #imm,R0 */
1238         tcg_gen_andi_i32(REG(0), REG(0), B7_0);
1239         return;
1240     case 0xcd00:                /* and.b #imm,@(R0,GBR) */
1241         {
1242             TCGv addr, val;
1243             addr = tcg_temp_new();
1244             tcg_gen_add_i32(addr, REG(0), cpu_gbr);
1245             val = tcg_temp_new();
1246             tcg_gen_qemu_ld8u(val, addr, ctx->memidx);
1247             tcg_gen_andi_i32(val, val, B7_0);
1248             tcg_gen_qemu_st8(val, addr, ctx->memidx);
1249             tcg_temp_free(val);
1250             tcg_temp_free(addr);
1251         }
1252         return;
1253     case 0x8b00:                /* bf label */
1254         CHECK_NOT_DELAY_SLOT
1255             gen_conditional_jump(ctx, ctx->pc + 2,
1256                                  ctx->pc + 4 + B7_0s * 2);
1257         ctx->bstate = BS_BRANCH;
1258         return;
1259     case 0x8f00:                /* bf/s label */
1260         CHECK_NOT_DELAY_SLOT
1261         gen_branch_slot(ctx->delayed_pc = ctx->pc + 4 + B7_0s * 2, 0);
1262         ctx->flags |= DELAY_SLOT_CONDITIONAL;
1263         return;
1264     case 0x8900:                /* bt label */
1265         CHECK_NOT_DELAY_SLOT
1266             gen_conditional_jump(ctx, ctx->pc + 4 + B7_0s * 2,
1267                                  ctx->pc + 2);
1268         ctx->bstate = BS_BRANCH;
1269         return;
1270     case 0x8d00:                /* bt/s label */
1271         CHECK_NOT_DELAY_SLOT
1272         gen_branch_slot(ctx->delayed_pc = ctx->pc + 4 + B7_0s * 2, 1);
1273         ctx->flags |= DELAY_SLOT_CONDITIONAL;
1274         return;
1275     case 0x8800:                /* cmp/eq #imm,R0 */
1276         gen_cmp_imm(TCG_COND_EQ, REG(0), B7_0s);
1277         return;
1278     case 0xc400:                /* mov.b @(disp,GBR),R0 */
1279         {
1280             TCGv addr = tcg_temp_new();
1281             tcg_gen_addi_i32(addr, cpu_gbr, B7_0);
1282             tcg_gen_qemu_ld8s(REG(0), addr, ctx->memidx);
1283             tcg_temp_free(addr);
1284         }
1285         return;
1286     case 0xc500:                /* mov.w @(disp,GBR),R0 */
1287         {
1288             TCGv addr = tcg_temp_new();
1289             tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 2);
1290             tcg_gen_qemu_ld16s(REG(0), addr, ctx->memidx);
1291             tcg_temp_free(addr);
1292         }
1293         return;
1294     case 0xc600:                /* mov.l @(disp,GBR),R0 */
1295         {
1296             TCGv addr = tcg_temp_new();
1297             tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 4);
1298             tcg_gen_qemu_ld32s(REG(0), addr, ctx->memidx);
1299             tcg_temp_free(addr);
1300         }
1301         return;
1302     case 0xc000:                /* mov.b R0,@(disp,GBR) */
1303         {
1304             TCGv addr = tcg_temp_new();
1305             tcg_gen_addi_i32(addr, cpu_gbr, B7_0);
1306             tcg_gen_qemu_st8(REG(0), addr, ctx->memidx);
1307             tcg_temp_free(addr);
1308         }
1309         return;
1310     case 0xc100:                /* mov.w R0,@(disp,GBR) */
1311         {
1312             TCGv addr = tcg_temp_new();
1313             tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 2);
1314             tcg_gen_qemu_st16(REG(0), addr, ctx->memidx);
1315             tcg_temp_free(addr);
1316         }
1317         return;
1318     case 0xc200:                /* mov.l R0,@(disp,GBR) */
1319         {
1320             TCGv addr = tcg_temp_new();
1321             tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 4);
1322             tcg_gen_qemu_st32(REG(0), addr, ctx->memidx);
1323             tcg_temp_free(addr);
1324         }
1325         return;
1326     case 0x8000:                /* mov.b R0,@(disp,Rn) */
1327         {
1328             TCGv addr = tcg_temp_new();
1329             tcg_gen_addi_i32(addr, REG(B7_4), B3_0);
1330             tcg_gen_qemu_st8(REG(0), addr, ctx->memidx);
1331             tcg_temp_free(addr);
1332         }
1333         return;
1334     case 0x8100:                /* mov.w R0,@(disp,Rn) */
1335         {
1336             TCGv addr = tcg_temp_new();
1337             tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 2);
1338             tcg_gen_qemu_st16(REG(0), addr, ctx->memidx);
1339             tcg_temp_free(addr);
1340         }
1341         return;
1342     case 0x8400:                /* mov.b @(disp,Rn),R0 */
1343         {
1344             TCGv addr = tcg_temp_new();
1345             tcg_gen_addi_i32(addr, REG(B7_4), B3_0);
1346             tcg_gen_qemu_ld8s(REG(0), addr, ctx->memidx);
1347             tcg_temp_free(addr);
1348         }
1349         return;
1350     case 0x8500:                /* mov.w @(disp,Rn),R0 */
1351         {
1352             TCGv addr = tcg_temp_new();
1353             tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 2);
1354             tcg_gen_qemu_ld16s(REG(0), addr, ctx->memidx);
1355             tcg_temp_free(addr);
1356         }
1357         return;
1358     case 0xc700:                /* mova @(disp,PC),R0 */
1359         tcg_gen_movi_i32(REG(0), ((ctx->pc & 0xfffffffc) + 4 + B7_0 * 4) & ~3);
1360         return;
1361     case 0xcb00:                /* or #imm,R0 */
1362         tcg_gen_ori_i32(REG(0), REG(0), B7_0);
1363         return;
1364     case 0xcf00:                /* or.b #imm,@(R0,GBR) */
1365         {
1366             TCGv addr, val;
1367             addr = tcg_temp_new();
1368             tcg_gen_add_i32(addr, REG(0), cpu_gbr);
1369             val = tcg_temp_new();
1370             tcg_gen_qemu_ld8u(val, addr, ctx->memidx);
1371             tcg_gen_ori_i32(val, val, B7_0);
1372             tcg_gen_qemu_st8(val, addr, ctx->memidx);
1373             tcg_temp_free(val);
1374             tcg_temp_free(addr);
1375         }
1376         return;
1377     case 0xc300:                /* trapa #imm */
1378         {
1379             TCGv imm;
1380             CHECK_NOT_DELAY_SLOT
1381             imm = tcg_const_i32(B7_0);
1382             gen_helper_trapa(imm);
1383             tcg_temp_free(imm);
1384             ctx->bstate = BS_BRANCH;
1385         }
1386         return;
1387     case 0xc800:                /* tst #imm,R0 */
1388         {
1389             TCGv val = tcg_temp_new();
1390             tcg_gen_andi_i32(val, REG(0), B7_0);
1391             gen_cmp_imm(TCG_COND_EQ, val, 0);
1392             tcg_temp_free(val);
1393         }
1394         return;
1395     case 0xcc00:                /* tst.b #imm,@(R0,GBR) */
1396         {
1397             TCGv val = tcg_temp_new();
1398             tcg_gen_add_i32(val, REG(0), cpu_gbr);
1399             tcg_gen_qemu_ld8u(val, val, ctx->memidx);
1400             tcg_gen_andi_i32(val, val, B7_0);
1401             gen_cmp_imm(TCG_COND_EQ, val, 0);
1402             tcg_temp_free(val);
1403         }
1404         return;
1405     case 0xca00:                /* xor #imm,R0 */
1406         tcg_gen_xori_i32(REG(0), REG(0), B7_0);
1407         return;
1408     case 0xce00:                /* xor.b #imm,@(R0,GBR) */
1409         {
1410             TCGv addr, val;
1411             addr = tcg_temp_new();
1412             tcg_gen_add_i32(addr, REG(0), cpu_gbr);
1413             val = tcg_temp_new();
1414             tcg_gen_qemu_ld8u(val, addr, ctx->memidx);
1415             tcg_gen_xori_i32(val, val, B7_0);
1416             tcg_gen_qemu_st8(val, addr, ctx->memidx);
1417             tcg_temp_free(val);
1418             tcg_temp_free(addr);
1419         }
1420         return;
1421     }
1422
1423     switch (ctx->opcode & 0xf08f) {
1424     case 0x408e:                /* ldc Rm,Rn_BANK */
1425         CHECK_PRIVILEGED
1426         tcg_gen_mov_i32(ALTREG(B6_4), REG(B11_8));
1427         return;
1428     case 0x4087:                /* ldc.l @Rm+,Rn_BANK */
1429         CHECK_PRIVILEGED
1430         tcg_gen_qemu_ld32s(ALTREG(B6_4), REG(B11_8), ctx->memidx);
1431         tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
1432         return;
1433     case 0x0082:                /* stc Rm_BANK,Rn */
1434         CHECK_PRIVILEGED
1435         tcg_gen_mov_i32(REG(B11_8), ALTREG(B6_4));
1436         return;
1437     case 0x4083:                /* stc.l Rm_BANK,@-Rn */
1438         CHECK_PRIVILEGED
1439         {
1440             TCGv addr = tcg_temp_new();
1441             tcg_gen_subi_i32(addr, REG(B11_8), 4);
1442             tcg_gen_qemu_st32(ALTREG(B6_4), addr, ctx->memidx);
1443             tcg_gen_mov_i32(REG(B11_8), addr);
1444             tcg_temp_free(addr);
1445         }
1446         return;
1447     }
1448
1449     switch (ctx->opcode & 0xf0ff) {
1450     case 0x0023:                /* braf Rn */
1451         CHECK_NOT_DELAY_SLOT
1452         tcg_gen_addi_i32(cpu_delayed_pc, REG(B11_8), ctx->pc + 4);
1453         ctx->flags |= DELAY_SLOT;
1454         ctx->delayed_pc = (uint32_t) - 1;
1455         return;
1456     case 0x0003:                /* bsrf Rn */
1457         CHECK_NOT_DELAY_SLOT
1458         tcg_gen_movi_i32(cpu_pr, ctx->pc + 4);
1459         tcg_gen_add_i32(cpu_delayed_pc, REG(B11_8), cpu_pr);
1460         ctx->flags |= DELAY_SLOT;
1461         ctx->delayed_pc = (uint32_t) - 1;
1462         return;
1463     case 0x4015:                /* cmp/pl Rn */
1464         gen_cmp_imm(TCG_COND_GT, REG(B11_8), 0);
1465         return;
1466     case 0x4011:                /* cmp/pz Rn */
1467         gen_cmp_imm(TCG_COND_GE, REG(B11_8), 0);
1468         return;
1469     case 0x4010:                /* dt Rn */
1470         tcg_gen_subi_i32(REG(B11_8), REG(B11_8), 1);
1471         gen_cmp_imm(TCG_COND_EQ, REG(B11_8), 0);
1472         return;
1473     case 0x402b:                /* jmp @Rn */
1474         CHECK_NOT_DELAY_SLOT
1475         tcg_gen_mov_i32(cpu_delayed_pc, REG(B11_8));
1476         ctx->flags |= DELAY_SLOT;
1477         ctx->delayed_pc = (uint32_t) - 1;
1478         return;
1479     case 0x400b:                /* jsr @Rn */
1480         CHECK_NOT_DELAY_SLOT
1481         tcg_gen_movi_i32(cpu_pr, ctx->pc + 4);
1482         tcg_gen_mov_i32(cpu_delayed_pc, REG(B11_8));
1483         ctx->flags |= DELAY_SLOT;
1484         ctx->delayed_pc = (uint32_t) - 1;
1485         return;
1486     case 0x400e:                /* ldc Rm,SR */
1487         CHECK_PRIVILEGED
1488         tcg_gen_andi_i32(cpu_sr, REG(B11_8), 0x700083f3);
1489         ctx->bstate = BS_STOP;
1490         return;
1491     case 0x4007:                /* ldc.l @Rm+,SR */
1492         CHECK_PRIVILEGED
1493         {
1494             TCGv val = tcg_temp_new();
1495             tcg_gen_qemu_ld32s(val, REG(B11_8), ctx->memidx);
1496             tcg_gen_andi_i32(cpu_sr, val, 0x700083f3);
1497             tcg_temp_free(val);
1498             tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
1499             ctx->bstate = BS_STOP;
1500         }
1501         return;
1502     case 0x0002:                /* stc SR,Rn */
1503         CHECK_PRIVILEGED
1504         tcg_gen_mov_i32(REG(B11_8), cpu_sr);
1505         return;
1506     case 0x4003:                /* stc SR,@-Rn */
1507         CHECK_PRIVILEGED
1508         {
1509             TCGv addr = tcg_temp_new();
1510             tcg_gen_subi_i32(addr, REG(B11_8), 4);
1511             tcg_gen_qemu_st32(cpu_sr, addr, ctx->memidx);
1512             tcg_gen_mov_i32(REG(B11_8), addr);
1513             tcg_temp_free(addr);
1514         }
1515         return;
1516 #define LD(reg,ldnum,ldpnum,prechk)             \
1517   case ldnum:                                                   \
1518     prechk                                                      \
1519     tcg_gen_mov_i32 (cpu_##reg, REG(B11_8));                    \
1520     return;                                                     \
1521   case ldpnum:                                                  \
1522     prechk                                                      \
1523     tcg_gen_qemu_ld32s (cpu_##reg, REG(B11_8), ctx->memidx);    \
1524     tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);                \
1525     return;
1526 #define ST(reg,stnum,stpnum,prechk)             \
1527   case stnum:                                                   \
1528     prechk                                                      \
1529     tcg_gen_mov_i32 (REG(B11_8), cpu_##reg);                    \
1530     return;                                                     \
1531   case stpnum:                                                  \
1532     prechk                                                      \
1533     {                                                           \
1534         TCGv addr = tcg_temp_new();                             \
1535         tcg_gen_subi_i32(addr, REG(B11_8), 4);                  \
1536         tcg_gen_qemu_st32 (cpu_##reg, addr, ctx->memidx);       \
1537         tcg_gen_mov_i32(REG(B11_8), addr);                      \
1538         tcg_temp_free(addr);                                    \
1539     }                                                           \
1540     return;
1541 #define LDST(reg,ldnum,ldpnum,stnum,stpnum,prechk)              \
1542         LD(reg,ldnum,ldpnum,prechk)                             \
1543         ST(reg,stnum,stpnum,prechk)
1544         LDST(gbr,  0x401e, 0x4017, 0x0012, 0x4013, {})
1545         LDST(vbr,  0x402e, 0x4027, 0x0022, 0x4023, CHECK_PRIVILEGED)
1546         LDST(ssr,  0x403e, 0x4037, 0x0032, 0x4033, CHECK_PRIVILEGED)
1547         LDST(spc,  0x404e, 0x4047, 0x0042, 0x4043, CHECK_PRIVILEGED)
1548         ST(sgr,  0x003a, 0x4032, CHECK_PRIVILEGED)
1549         LD(sgr,  0x403a, 0x4036, CHECK_PRIVILEGED if (!(ctx->features & SH_FEATURE_SH4A)) break;)
1550         LDST(dbr,  0x40fa, 0x40f6, 0x00fa, 0x40f2, CHECK_PRIVILEGED)
1551         LDST(mach, 0x400a, 0x4006, 0x000a, 0x4002, {})
1552         LDST(macl, 0x401a, 0x4016, 0x001a, 0x4012, {})
1553         LDST(pr,   0x402a, 0x4026, 0x002a, 0x4022, {})
1554         LDST(fpul, 0x405a, 0x4056, 0x005a, 0x4052, {CHECK_FPU_ENABLED})
1555     case 0x406a:                /* lds Rm,FPSCR */
1556         CHECK_FPU_ENABLED
1557         gen_helper_ld_fpscr(REG(B11_8));
1558         ctx->bstate = BS_STOP;
1559         return;
1560     case 0x4066:                /* lds.l @Rm+,FPSCR */
1561         CHECK_FPU_ENABLED
1562         {
1563             TCGv addr = tcg_temp_new();
1564             tcg_gen_qemu_ld32s(addr, REG(B11_8), ctx->memidx);
1565             tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
1566             gen_helper_ld_fpscr(addr);
1567             tcg_temp_free(addr);
1568             ctx->bstate = BS_STOP;
1569         }
1570         return;
1571     case 0x006a:                /* sts FPSCR,Rn */
1572         CHECK_FPU_ENABLED
1573         tcg_gen_andi_i32(REG(B11_8), cpu_fpscr, 0x003fffff);
1574         return;
1575     case 0x4062:                /* sts FPSCR,@-Rn */
1576         CHECK_FPU_ENABLED
1577         {
1578             TCGv addr, val;
1579             val = tcg_temp_new();
1580             tcg_gen_andi_i32(val, cpu_fpscr, 0x003fffff);
1581             addr = tcg_temp_new();
1582             tcg_gen_subi_i32(addr, REG(B11_8), 4);
1583             tcg_gen_qemu_st32(val, addr, ctx->memidx);
1584             tcg_gen_mov_i32(REG(B11_8), addr);
1585             tcg_temp_free(addr);
1586             tcg_temp_free(val);
1587         }
1588         return;
1589     case 0x00c3:                /* movca.l R0,@Rm */
1590         {
1591             TCGv val = tcg_temp_new();
1592             tcg_gen_qemu_ld32u(val, REG(B11_8), ctx->memidx);
1593             gen_helper_movcal (REG(B11_8), val);            
1594             tcg_gen_qemu_st32(REG(0), REG(B11_8), ctx->memidx);
1595         }
1596         ctx->has_movcal = 1;
1597         return;
1598     case 0x40a9:
1599         /* MOVUA.L @Rm,R0 (Rm) -> R0
1600            Load non-boundary-aligned data */
1601         tcg_gen_qemu_ld32u(REG(0), REG(B11_8), ctx->memidx);
1602         return;
1603     case 0x40e9:
1604         /* MOVUA.L @Rm+,R0   (Rm) -> R0, Rm + 4 -> Rm
1605            Load non-boundary-aligned data */
1606         tcg_gen_qemu_ld32u(REG(0), REG(B11_8), ctx->memidx);
1607         tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
1608         return;
1609     case 0x0029:                /* movt Rn */
1610         tcg_gen_andi_i32(REG(B11_8), cpu_sr, SR_T);
1611         return;
1612     case 0x0073:
1613         /* MOVCO.L
1614                LDST -> T
1615                If (T == 1) R0 -> (Rn)
1616                0 -> LDST
1617         */
1618         if (ctx->features & SH_FEATURE_SH4A) {
1619             int label = gen_new_label();
1620             gen_clr_t();
1621             tcg_gen_or_i32(cpu_sr, cpu_sr, cpu_ldst);
1622             tcg_gen_brcondi_i32(TCG_COND_EQ, cpu_ldst, 0, label);
1623             tcg_gen_qemu_st32(REG(0), REG(B11_8), ctx->memidx);
1624             gen_set_label(label);
1625             tcg_gen_movi_i32(cpu_ldst, 0);
1626             return;
1627         } else
1628             break;
1629     case 0x0063:
1630         /* MOVLI.L @Rm,R0
1631                1 -> LDST
1632                (Rm) -> R0
1633                When interrupt/exception
1634                occurred 0 -> LDST
1635         */
1636         if (ctx->features & SH_FEATURE_SH4A) {
1637             tcg_gen_movi_i32(cpu_ldst, 0);
1638             tcg_gen_qemu_ld32s(REG(0), REG(B11_8), ctx->memidx);
1639             tcg_gen_movi_i32(cpu_ldst, 1);
1640             return;
1641         } else
1642             break;
1643     case 0x0093:                /* ocbi @Rn */
1644         {
1645             gen_helper_ocbi (REG(B11_8));
1646         }
1647         return;
1648     case 0x00a3:                /* ocbp @Rn */
1649     case 0x00b3:                /* ocbwb @Rn */
1650         /* These instructions are supposed to do nothing in case of
1651            a cache miss. Given that we only partially emulate caches
1652            it is safe to simply ignore them. */
1653         return;
1654     case 0x0083:                /* pref @Rn */
1655         return;
1656     case 0x00d3:                /* prefi @Rn */
1657         if (ctx->features & SH_FEATURE_SH4A)
1658             return;
1659         else
1660             break;
1661     case 0x00e3:                /* icbi @Rn */
1662         if (ctx->features & SH_FEATURE_SH4A)
1663             return;
1664         else
1665             break;
1666     case 0x00ab:                /* synco */
1667         if (ctx->features & SH_FEATURE_SH4A)
1668             return;
1669         else
1670             break;
1671     case 0x4024:                /* rotcl Rn */
1672         {
1673             TCGv tmp = tcg_temp_new();
1674             tcg_gen_mov_i32(tmp, cpu_sr);
1675             gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 31);
1676             tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1);
1677             gen_copy_bit_i32(REG(B11_8), 0, tmp, 0);
1678             tcg_temp_free(tmp);
1679         }
1680         return;
1681     case 0x4025:                /* rotcr Rn */
1682         {
1683             TCGv tmp = tcg_temp_new();
1684             tcg_gen_mov_i32(tmp, cpu_sr);
1685             gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1686             tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 1);
1687             gen_copy_bit_i32(REG(B11_8), 31, tmp, 0);
1688             tcg_temp_free(tmp);
1689         }
1690         return;
1691     case 0x4004:                /* rotl Rn */
1692         tcg_gen_rotli_i32(REG(B11_8), REG(B11_8), 1);
1693         gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1694         return;
1695     case 0x4005:                /* rotr Rn */
1696         gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1697         tcg_gen_rotri_i32(REG(B11_8), REG(B11_8), 1);
1698         return;
1699     case 0x4000:                /* shll Rn */
1700     case 0x4020:                /* shal Rn */
1701         gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 31);
1702         tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1);
1703         return;
1704     case 0x4021:                /* shar Rn */
1705         gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1706         tcg_gen_sari_i32(REG(B11_8), REG(B11_8), 1);
1707         return;
1708     case 0x4001:                /* shlr Rn */
1709         gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1710         tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 1);
1711         return;
1712     case 0x4008:                /* shll2 Rn */
1713         tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 2);
1714         return;
1715     case 0x4018:                /* shll8 Rn */
1716         tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 8);
1717         return;
1718     case 0x4028:                /* shll16 Rn */
1719         tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 16);
1720         return;
1721     case 0x4009:                /* shlr2 Rn */
1722         tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 2);
1723         return;
1724     case 0x4019:                /* shlr8 Rn */
1725         tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 8);
1726         return;
1727     case 0x4029:                /* shlr16 Rn */
1728         tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 16);
1729         return;
1730     case 0x401b:                /* tas.b @Rn */
1731         {
1732             TCGv addr, val;
1733             addr = tcg_temp_local_new();
1734             tcg_gen_mov_i32(addr, REG(B11_8));
1735             val = tcg_temp_local_new();
1736             tcg_gen_qemu_ld8u(val, addr, ctx->memidx);
1737             gen_cmp_imm(TCG_COND_EQ, val, 0);
1738             tcg_gen_ori_i32(val, val, 0x80);
1739             tcg_gen_qemu_st8(val, addr, ctx->memidx);
1740             tcg_temp_free(val);
1741             tcg_temp_free(addr);
1742         }
1743         return;
1744     case 0xf00d: /* fsts FPUL,FRn - FPSCR: Nothing */
1745         CHECK_FPU_ENABLED
1746         tcg_gen_mov_i32(cpu_fregs[FREG(B11_8)], cpu_fpul);
1747         return;
1748     case 0xf01d: /* flds FRm,FPUL - FPSCR: Nothing */
1749         CHECK_FPU_ENABLED
1750         tcg_gen_mov_i32(cpu_fpul, cpu_fregs[FREG(B11_8)]);
1751         return;
1752     case 0xf02d: /* float FPUL,FRn/DRn - FPSCR: R[PR,Enable.I]/W[Cause,Flag] */
1753         CHECK_FPU_ENABLED
1754         if (ctx->fpscr & FPSCR_PR) {
1755             TCGv_i64 fp;
1756             if (ctx->opcode & 0x0100)
1757                 break; /* illegal instruction */
1758             fp = tcg_temp_new_i64();
1759             gen_helper_float_DT(fp, cpu_fpul);
1760             gen_store_fpr64(fp, DREG(B11_8));
1761             tcg_temp_free_i64(fp);
1762         }
1763         else {
1764             gen_helper_float_FT(cpu_fregs[FREG(B11_8)], cpu_fpul);
1765         }
1766         return;
1767     case 0xf03d: /* ftrc FRm/DRm,FPUL - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1768         CHECK_FPU_ENABLED
1769         if (ctx->fpscr & FPSCR_PR) {
1770             TCGv_i64 fp;
1771             if (ctx->opcode & 0x0100)
1772                 break; /* illegal instruction */
1773             fp = tcg_temp_new_i64();
1774             gen_load_fpr64(fp, DREG(B11_8));
1775             gen_helper_ftrc_DT(cpu_fpul, fp);
1776             tcg_temp_free_i64(fp);
1777         }
1778         else {
1779             gen_helper_ftrc_FT(cpu_fpul, cpu_fregs[FREG(B11_8)]);
1780         }
1781         return;
1782     case 0xf04d: /* fneg FRn/DRn - FPSCR: Nothing */
1783         CHECK_FPU_ENABLED
1784         {
1785             gen_helper_fneg_T(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)]);
1786         }
1787         return;
1788     case 0xf05d: /* fabs FRn/DRn */
1789         CHECK_FPU_ENABLED
1790         if (ctx->fpscr & FPSCR_PR) {
1791             if (ctx->opcode & 0x0100)
1792                 break; /* illegal instruction */
1793             TCGv_i64 fp = tcg_temp_new_i64();
1794             gen_load_fpr64(fp, DREG(B11_8));
1795             gen_helper_fabs_DT(fp, fp);
1796             gen_store_fpr64(fp, DREG(B11_8));
1797             tcg_temp_free_i64(fp);
1798         } else {
1799             gen_helper_fabs_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)]);
1800         }
1801         return;
1802     case 0xf06d: /* fsqrt FRn */
1803         CHECK_FPU_ENABLED
1804         if (ctx->fpscr & FPSCR_PR) {
1805             if (ctx->opcode & 0x0100)
1806                 break; /* illegal instruction */
1807             TCGv_i64 fp = tcg_temp_new_i64();
1808             gen_load_fpr64(fp, DREG(B11_8));
1809             gen_helper_fsqrt_DT(fp, fp);
1810             gen_store_fpr64(fp, DREG(B11_8));
1811             tcg_temp_free_i64(fp);
1812         } else {
1813             gen_helper_fsqrt_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)]);
1814         }
1815         return;
1816     case 0xf07d: /* fsrra FRn */
1817         CHECK_FPU_ENABLED
1818         break;
1819     case 0xf08d: /* fldi0 FRn - FPSCR: R[PR] */
1820         CHECK_FPU_ENABLED
1821         if (!(ctx->fpscr & FPSCR_PR)) {
1822             tcg_gen_movi_i32(cpu_fregs[FREG(B11_8)], 0);
1823         }
1824         return;
1825     case 0xf09d: /* fldi1 FRn - FPSCR: R[PR] */
1826         CHECK_FPU_ENABLED
1827         if (!(ctx->fpscr & FPSCR_PR)) {
1828             tcg_gen_movi_i32(cpu_fregs[FREG(B11_8)], 0x3f800000);
1829         }
1830         return;
1831     case 0xf0ad: /* fcnvsd FPUL,DRn */
1832         CHECK_FPU_ENABLED
1833         {
1834             TCGv_i64 fp = tcg_temp_new_i64();
1835             gen_helper_fcnvsd_FT_DT(fp, cpu_fpul);
1836             gen_store_fpr64(fp, DREG(B11_8));
1837             tcg_temp_free_i64(fp);
1838         }
1839         return;
1840     case 0xf0bd: /* fcnvds DRn,FPUL */
1841         CHECK_FPU_ENABLED
1842         {
1843             TCGv_i64 fp = tcg_temp_new_i64();
1844             gen_load_fpr64(fp, DREG(B11_8));
1845             gen_helper_fcnvds_DT_FT(cpu_fpul, fp);
1846             tcg_temp_free_i64(fp);
1847         }
1848         return;
1849     case 0xf0ed: /* fipr FVm,FVn */
1850         CHECK_FPU_ENABLED
1851         if ((ctx->fpscr & FPSCR_PR) == 0) {
1852             TCGv m, n;
1853             m = tcg_const_i32((ctx->opcode >> 8) & 3);
1854             n = tcg_const_i32((ctx->opcode >> 10) & 3);
1855             gen_helper_fipr(m, n);
1856             tcg_temp_free(m);
1857             tcg_temp_free(n);
1858             return;
1859         }
1860         break;
1861     case 0xf0fd: /* ftrv XMTRX,FVn */
1862         CHECK_FPU_ENABLED
1863         if ((ctx->opcode & 0x0300) == 0x0100 &&
1864             (ctx->fpscr & FPSCR_PR) == 0) {
1865             TCGv n;
1866             n = tcg_const_i32((ctx->opcode >> 10) & 3);
1867             gen_helper_ftrv(n);
1868             tcg_temp_free(n);
1869             return;
1870         }
1871         break;
1872     }
1873 #if 0
1874     fprintf(stderr, "unknown instruction 0x%04x at pc 0x%08x\n",
1875             ctx->opcode, ctx->pc);
1876     fflush(stderr);
1877 #endif
1878     if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) {
1879        gen_helper_raise_slot_illegal_instruction();
1880     } else {
1881        gen_helper_raise_illegal_instruction();
1882     }
1883     ctx->bstate = BS_EXCP;
1884 }
1885
1886 static void decode_opc(DisasContext * ctx)
1887 {
1888     uint32_t old_flags = ctx->flags;
1889
1890     if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP))) {
1891         tcg_gen_debug_insn_start(ctx->pc);
1892     }
1893
1894     _decode_opc(ctx);
1895
1896     if (old_flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) {
1897         if (ctx->flags & DELAY_SLOT_CLEARME) {
1898             gen_store_flags(0);
1899         } else {
1900             /* go out of the delay slot */
1901             uint32_t new_flags = ctx->flags;
1902             new_flags &= ~(DELAY_SLOT | DELAY_SLOT_CONDITIONAL);
1903             gen_store_flags(new_flags);
1904         }
1905         ctx->flags = 0;
1906         ctx->bstate = BS_BRANCH;
1907         if (old_flags & DELAY_SLOT_CONDITIONAL) {
1908             gen_delayed_conditional_jump(ctx);
1909         } else if (old_flags & DELAY_SLOT) {
1910             gen_jump(ctx);
1911         }
1912
1913     }
1914
1915     /* go into a delay slot */
1916     if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL))
1917         gen_store_flags(ctx->flags);
1918 }
1919
1920 static inline void
1921 gen_intermediate_code_internal(CPUSH4State * env, TranslationBlock * tb,
1922                                int search_pc)
1923 {
1924     DisasContext ctx;
1925     target_ulong pc_start;
1926     static uint16_t *gen_opc_end;
1927     CPUBreakpoint *bp;
1928     int i, ii;
1929     int num_insns;
1930     int max_insns;
1931
1932     pc_start = tb->pc;
1933     gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
1934     ctx.pc = pc_start;
1935     ctx.flags = (uint32_t)tb->flags;
1936     ctx.bstate = BS_NONE;
1937     ctx.sr = env->sr;
1938     ctx.fpscr = env->fpscr;
1939     ctx.memidx = (env->sr & SR_MD) == 0 ? 1 : 0;
1940     /* We don't know if the delayed pc came from a dynamic or static branch,
1941        so assume it is a dynamic branch.  */
1942     ctx.delayed_pc = -1; /* use delayed pc from env pointer */
1943     ctx.tb = tb;
1944     ctx.singlestep_enabled = env->singlestep_enabled;
1945     ctx.features = env->features;
1946     ctx.has_movcal = (tb->flags & TB_FLAG_PENDING_MOVCA);
1947
1948     ii = -1;
1949     num_insns = 0;
1950     max_insns = tb->cflags & CF_COUNT_MASK;
1951     if (max_insns == 0)
1952         max_insns = CF_COUNT_MASK;
1953     gen_icount_start();
1954     while (ctx.bstate == BS_NONE && gen_opc_ptr < gen_opc_end) {
1955         if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
1956             QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
1957                 if (ctx.pc == bp->pc) {
1958                     /* We have hit a breakpoint - make sure PC is up-to-date */
1959                     tcg_gen_movi_i32(cpu_pc, ctx.pc);
1960                     gen_helper_debug();
1961                     ctx.bstate = BS_EXCP;
1962                     break;
1963                 }
1964             }
1965         }
1966         if (search_pc) {
1967             i = gen_opc_ptr - gen_opc_buf;
1968             if (ii < i) {
1969                 ii++;
1970                 while (ii < i)
1971                     gen_opc_instr_start[ii++] = 0;
1972             }
1973             gen_opc_pc[ii] = ctx.pc;
1974             gen_opc_hflags[ii] = ctx.flags;
1975             gen_opc_instr_start[ii] = 1;
1976             gen_opc_icount[ii] = num_insns;
1977         }
1978         if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
1979             gen_io_start();
1980 #if 0
1981         fprintf(stderr, "Loading opcode at address 0x%08x\n", ctx.pc);
1982         fflush(stderr);
1983 #endif
1984         ctx.opcode = lduw_code(ctx.pc);
1985         decode_opc(&ctx);
1986         num_insns++;
1987         ctx.pc += 2;
1988         if ((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0)
1989             break;
1990         if (env->singlestep_enabled)
1991             break;
1992         if (num_insns >= max_insns)
1993             break;
1994         if (singlestep)
1995             break;
1996     }
1997     if (tb->cflags & CF_LAST_IO)
1998         gen_io_end();
1999     if (env->singlestep_enabled) {
2000         tcg_gen_movi_i32(cpu_pc, ctx.pc);
2001         gen_helper_debug();
2002     } else {
2003         switch (ctx.bstate) {
2004         case BS_STOP:
2005             /* gen_op_interrupt_restart(); */
2006             /* fall through */
2007         case BS_NONE:
2008             if (ctx.flags) {
2009                 gen_store_flags(ctx.flags | DELAY_SLOT_CLEARME);
2010             }
2011             gen_goto_tb(&ctx, 0, ctx.pc);
2012             break;
2013         case BS_EXCP:
2014             /* gen_op_interrupt_restart(); */
2015             tcg_gen_exit_tb(0);
2016             break;
2017         case BS_BRANCH:
2018         default:
2019             break;
2020         }
2021     }
2022
2023     gen_icount_end(tb, num_insns);
2024     *gen_opc_ptr = INDEX_op_end;
2025     if (search_pc) {
2026         i = gen_opc_ptr - gen_opc_buf;
2027         ii++;
2028         while (ii <= i)
2029             gen_opc_instr_start[ii++] = 0;
2030     } else {
2031         tb->size = ctx.pc - pc_start;
2032         tb->icount = num_insns;
2033     }
2034
2035 #ifdef DEBUG_DISAS
2036 #ifdef SH4_DEBUG_DISAS
2037     qemu_log_mask(CPU_LOG_TB_IN_ASM, "\n");
2038 #endif
2039     if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
2040         qemu_log("IN:\n");      /* , lookup_symbol(pc_start)); */
2041         log_target_disas(pc_start, ctx.pc - pc_start, 0);
2042         qemu_log("\n");
2043     }
2044 #endif
2045 }
2046
2047 void gen_intermediate_code(CPUSH4State * env, struct TranslationBlock *tb)
2048 {
2049     gen_intermediate_code_internal(env, tb, 0);
2050 }
2051
2052 void gen_intermediate_code_pc(CPUSH4State * env, struct TranslationBlock *tb)
2053 {
2054     gen_intermediate_code_internal(env, tb, 1);
2055 }
2056
2057 void restore_state_to_opc(CPUSH4State *env, TranslationBlock *tb, int pc_pos)
2058 {
2059     env->pc = gen_opc_pc[pc_pos];
2060     env->flags = gen_opc_hflags[pc_pos];
2061 }
This page took 0.138373 seconds and 4 git commands to generate.