]> Git Repo - qemu.git/blob - target-sh4/translate.c
xilinx_spips: seperate SPI and QSPI as two classes
[qemu.git] / target-sh4 / translate.c
1 /*
2  *  SH4 translation
3  *
4  *  Copyright (c) 2005 Samuel Tardieu
5  *
6  * This library is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2 of the License, or (at your option) any later version.
10  *
11  * This library is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
18  */
19
20 #define DEBUG_DISAS
21 //#define SH4_SINGLE_STEP
22
23 #include "cpu.h"
24 #include "disas/disas.h"
25 #include "tcg-op.h"
26
27 #include "helper.h"
28 #define GEN_HELPER 1
29 #include "helper.h"
30
31 typedef struct DisasContext {
32     struct TranslationBlock *tb;
33     target_ulong pc;
34     uint16_t opcode;
35     uint32_t flags;
36     int bstate;
37     int memidx;
38     uint32_t delayed_pc;
39     int singlestep_enabled;
40     uint32_t features;
41     int has_movcal;
42 } DisasContext;
43
44 #if defined(CONFIG_USER_ONLY)
45 #define IS_USER(ctx) 1
46 #else
47 #define IS_USER(ctx) (!(ctx->flags & SR_MD))
48 #endif
49
50 enum {
51     BS_NONE     = 0, /* We go out of the TB without reaching a branch or an
52                       * exception condition
53                       */
54     BS_STOP     = 1, /* We want to stop translation for any reason */
55     BS_BRANCH   = 2, /* We reached a branch condition     */
56     BS_EXCP     = 3, /* We reached an exception condition */
57 };
58
59 /* global register indexes */
60 static TCGv_ptr cpu_env;
61 static TCGv cpu_gregs[24];
62 static TCGv cpu_pc, cpu_sr, cpu_ssr, cpu_spc, cpu_gbr;
63 static TCGv cpu_vbr, cpu_sgr, cpu_dbr, cpu_mach, cpu_macl;
64 static TCGv cpu_pr, cpu_fpscr, cpu_fpul, cpu_ldst;
65 static TCGv cpu_fregs[32];
66
67 /* internal register indexes */
68 static TCGv cpu_flags, cpu_delayed_pc;
69
70 static uint32_t gen_opc_hflags[OPC_BUF_SIZE];
71
72 #include "exec/gen-icount.h"
73
74 void sh4_translate_init(void)
75 {
76     int i;
77     static int done_init = 0;
78     static const char * const gregnames[24] = {
79         "R0_BANK0", "R1_BANK0", "R2_BANK0", "R3_BANK0",
80         "R4_BANK0", "R5_BANK0", "R6_BANK0", "R7_BANK0",
81         "R8", "R9", "R10", "R11", "R12", "R13", "R14", "R15",
82         "R0_BANK1", "R1_BANK1", "R2_BANK1", "R3_BANK1",
83         "R4_BANK1", "R5_BANK1", "R6_BANK1", "R7_BANK1"
84     };
85     static const char * const fregnames[32] = {
86          "FPR0_BANK0",  "FPR1_BANK0",  "FPR2_BANK0",  "FPR3_BANK0",
87          "FPR4_BANK0",  "FPR5_BANK0",  "FPR6_BANK0",  "FPR7_BANK0",
88          "FPR8_BANK0",  "FPR9_BANK0", "FPR10_BANK0", "FPR11_BANK0",
89         "FPR12_BANK0", "FPR13_BANK0", "FPR14_BANK0", "FPR15_BANK0",
90          "FPR0_BANK1",  "FPR1_BANK1",  "FPR2_BANK1",  "FPR3_BANK1",
91          "FPR4_BANK1",  "FPR5_BANK1",  "FPR6_BANK1",  "FPR7_BANK1",
92          "FPR8_BANK1",  "FPR9_BANK1", "FPR10_BANK1", "FPR11_BANK1",
93         "FPR12_BANK1", "FPR13_BANK1", "FPR14_BANK1", "FPR15_BANK1",
94     };
95
96     if (done_init)
97         return;
98
99     cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
100
101     for (i = 0; i < 24; i++)
102         cpu_gregs[i] = tcg_global_mem_new_i32(TCG_AREG0,
103                                               offsetof(CPUSH4State, gregs[i]),
104                                               gregnames[i]);
105
106     cpu_pc = tcg_global_mem_new_i32(TCG_AREG0,
107                                     offsetof(CPUSH4State, pc), "PC");
108     cpu_sr = tcg_global_mem_new_i32(TCG_AREG0,
109                                     offsetof(CPUSH4State, sr), "SR");
110     cpu_ssr = tcg_global_mem_new_i32(TCG_AREG0,
111                                      offsetof(CPUSH4State, ssr), "SSR");
112     cpu_spc = tcg_global_mem_new_i32(TCG_AREG0,
113                                      offsetof(CPUSH4State, spc), "SPC");
114     cpu_gbr = tcg_global_mem_new_i32(TCG_AREG0,
115                                      offsetof(CPUSH4State, gbr), "GBR");
116     cpu_vbr = tcg_global_mem_new_i32(TCG_AREG0,
117                                      offsetof(CPUSH4State, vbr), "VBR");
118     cpu_sgr = tcg_global_mem_new_i32(TCG_AREG0,
119                                      offsetof(CPUSH4State, sgr), "SGR");
120     cpu_dbr = tcg_global_mem_new_i32(TCG_AREG0,
121                                      offsetof(CPUSH4State, dbr), "DBR");
122     cpu_mach = tcg_global_mem_new_i32(TCG_AREG0,
123                                       offsetof(CPUSH4State, mach), "MACH");
124     cpu_macl = tcg_global_mem_new_i32(TCG_AREG0,
125                                       offsetof(CPUSH4State, macl), "MACL");
126     cpu_pr = tcg_global_mem_new_i32(TCG_AREG0,
127                                     offsetof(CPUSH4State, pr), "PR");
128     cpu_fpscr = tcg_global_mem_new_i32(TCG_AREG0,
129                                        offsetof(CPUSH4State, fpscr), "FPSCR");
130     cpu_fpul = tcg_global_mem_new_i32(TCG_AREG0,
131                                       offsetof(CPUSH4State, fpul), "FPUL");
132
133     cpu_flags = tcg_global_mem_new_i32(TCG_AREG0,
134                                        offsetof(CPUSH4State, flags), "_flags_");
135     cpu_delayed_pc = tcg_global_mem_new_i32(TCG_AREG0,
136                                             offsetof(CPUSH4State, delayed_pc),
137                                             "_delayed_pc_");
138     cpu_ldst = tcg_global_mem_new_i32(TCG_AREG0,
139                                       offsetof(CPUSH4State, ldst), "_ldst_");
140
141     for (i = 0; i < 32; i++)
142         cpu_fregs[i] = tcg_global_mem_new_i32(TCG_AREG0,
143                                               offsetof(CPUSH4State, fregs[i]),
144                                               fregnames[i]);
145
146     /* register helpers */
147 #define GEN_HELPER 2
148 #include "helper.h"
149
150     done_init = 1;
151 }
152
153 void cpu_dump_state(CPUSH4State * env, FILE * f,
154                     int (*cpu_fprintf) (FILE * f, const char *fmt, ...),
155                     int flags)
156 {
157     int i;
158     cpu_fprintf(f, "pc=0x%08x sr=0x%08x pr=0x%08x fpscr=0x%08x\n",
159                 env->pc, env->sr, env->pr, env->fpscr);
160     cpu_fprintf(f, "spc=0x%08x ssr=0x%08x gbr=0x%08x vbr=0x%08x\n",
161                 env->spc, env->ssr, env->gbr, env->vbr);
162     cpu_fprintf(f, "sgr=0x%08x dbr=0x%08x delayed_pc=0x%08x fpul=0x%08x\n",
163                 env->sgr, env->dbr, env->delayed_pc, env->fpul);
164     for (i = 0; i < 24; i += 4) {
165         cpu_fprintf(f, "r%d=0x%08x r%d=0x%08x r%d=0x%08x r%d=0x%08x\n",
166                     i, env->gregs[i], i + 1, env->gregs[i + 1],
167                     i + 2, env->gregs[i + 2], i + 3, env->gregs[i + 3]);
168     }
169     if (env->flags & DELAY_SLOT) {
170         cpu_fprintf(f, "in delay slot (delayed_pc=0x%08x)\n",
171                     env->delayed_pc);
172     } else if (env->flags & DELAY_SLOT_CONDITIONAL) {
173         cpu_fprintf(f, "in conditional delay slot (delayed_pc=0x%08x)\n",
174                     env->delayed_pc);
175     }
176 }
177
178 static void gen_goto_tb(DisasContext * ctx, int n, target_ulong dest)
179 {
180     TranslationBlock *tb;
181     tb = ctx->tb;
182
183     if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) &&
184         !ctx->singlestep_enabled) {
185         /* Use a direct jump if in same page and singlestep not enabled */
186         tcg_gen_goto_tb(n);
187         tcg_gen_movi_i32(cpu_pc, dest);
188         tcg_gen_exit_tb((tcg_target_long)tb + n);
189     } else {
190         tcg_gen_movi_i32(cpu_pc, dest);
191         if (ctx->singlestep_enabled)
192             gen_helper_debug(cpu_env);
193         tcg_gen_exit_tb(0);
194     }
195 }
196
197 static void gen_jump(DisasContext * ctx)
198 {
199     if (ctx->delayed_pc == (uint32_t) - 1) {
200         /* Target is not statically known, it comes necessarily from a
201            delayed jump as immediate jump are conditinal jumps */
202         tcg_gen_mov_i32(cpu_pc, cpu_delayed_pc);
203         if (ctx->singlestep_enabled)
204             gen_helper_debug(cpu_env);
205         tcg_gen_exit_tb(0);
206     } else {
207         gen_goto_tb(ctx, 0, ctx->delayed_pc);
208     }
209 }
210
211 static inline void gen_branch_slot(uint32_t delayed_pc, int t)
212 {
213     TCGv sr;
214     int label = gen_new_label();
215     tcg_gen_movi_i32(cpu_delayed_pc, delayed_pc);
216     sr = tcg_temp_new();
217     tcg_gen_andi_i32(sr, cpu_sr, SR_T);
218     tcg_gen_brcondi_i32(t ? TCG_COND_EQ:TCG_COND_NE, sr, 0, label);
219     tcg_gen_ori_i32(cpu_flags, cpu_flags, DELAY_SLOT_TRUE);
220     gen_set_label(label);
221 }
222
223 /* Immediate conditional jump (bt or bf) */
224 static void gen_conditional_jump(DisasContext * ctx,
225                                  target_ulong ift, target_ulong ifnott)
226 {
227     int l1;
228     TCGv sr;
229
230     l1 = gen_new_label();
231     sr = tcg_temp_new();
232     tcg_gen_andi_i32(sr, cpu_sr, SR_T);
233     tcg_gen_brcondi_i32(TCG_COND_NE, sr, 0, l1);
234     gen_goto_tb(ctx, 0, ifnott);
235     gen_set_label(l1);
236     gen_goto_tb(ctx, 1, ift);
237 }
238
239 /* Delayed conditional jump (bt or bf) */
240 static void gen_delayed_conditional_jump(DisasContext * ctx)
241 {
242     int l1;
243     TCGv ds;
244
245     l1 = gen_new_label();
246     ds = tcg_temp_new();
247     tcg_gen_andi_i32(ds, cpu_flags, DELAY_SLOT_TRUE);
248     tcg_gen_brcondi_i32(TCG_COND_NE, ds, 0, l1);
249     gen_goto_tb(ctx, 1, ctx->pc + 2);
250     gen_set_label(l1);
251     tcg_gen_andi_i32(cpu_flags, cpu_flags, ~DELAY_SLOT_TRUE);
252     gen_jump(ctx);
253 }
254
255 static inline void gen_cmp(int cond, TCGv t0, TCGv t1)
256 {
257     TCGv t;
258
259     t = tcg_temp_new();
260     tcg_gen_setcond_i32(cond, t, t1, t0);
261     tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
262     tcg_gen_or_i32(cpu_sr, cpu_sr, t);
263
264     tcg_temp_free(t);
265 }
266
267 static inline void gen_cmp_imm(int cond, TCGv t0, int32_t imm)
268 {
269     TCGv t;
270
271     t = tcg_temp_new();
272     tcg_gen_setcondi_i32(cond, t, t0, imm);
273     tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
274     tcg_gen_or_i32(cpu_sr, cpu_sr, t);
275
276     tcg_temp_free(t);
277 }
278
279 static inline void gen_store_flags(uint32_t flags)
280 {
281     tcg_gen_andi_i32(cpu_flags, cpu_flags, DELAY_SLOT_TRUE);
282     tcg_gen_ori_i32(cpu_flags, cpu_flags, flags);
283 }
284
285 static inline void gen_copy_bit_i32(TCGv t0, int p0, TCGv t1, int p1)
286 {
287     TCGv tmp = tcg_temp_new();
288
289     p0 &= 0x1f;
290     p1 &= 0x1f;
291
292     tcg_gen_andi_i32(tmp, t1, (1 << p1));
293     tcg_gen_andi_i32(t0, t0, ~(1 << p0));
294     if (p0 < p1)
295         tcg_gen_shri_i32(tmp, tmp, p1 - p0);
296     else if (p0 > p1)
297         tcg_gen_shli_i32(tmp, tmp, p0 - p1);
298     tcg_gen_or_i32(t0, t0, tmp);
299
300     tcg_temp_free(tmp);
301 }
302
303 static inline void gen_load_fpr64(TCGv_i64 t, int reg)
304 {
305     tcg_gen_concat_i32_i64(t, cpu_fregs[reg + 1], cpu_fregs[reg]);
306 }
307
308 static inline void gen_store_fpr64 (TCGv_i64 t, int reg)
309 {
310     TCGv_i32 tmp = tcg_temp_new_i32();
311     tcg_gen_trunc_i64_i32(tmp, t);
312     tcg_gen_mov_i32(cpu_fregs[reg + 1], tmp);
313     tcg_gen_shri_i64(t, t, 32);
314     tcg_gen_trunc_i64_i32(tmp, t);
315     tcg_gen_mov_i32(cpu_fregs[reg], tmp);
316     tcg_temp_free_i32(tmp);
317 }
318
319 #define B3_0 (ctx->opcode & 0xf)
320 #define B6_4 ((ctx->opcode >> 4) & 0x7)
321 #define B7_4 ((ctx->opcode >> 4) & 0xf)
322 #define B7_0 (ctx->opcode & 0xff)
323 #define B7_0s ((int32_t) (int8_t) (ctx->opcode & 0xff))
324 #define B11_0s (ctx->opcode & 0x800 ? 0xfffff000 | (ctx->opcode & 0xfff) : \
325   (ctx->opcode & 0xfff))
326 #define B11_8 ((ctx->opcode >> 8) & 0xf)
327 #define B15_12 ((ctx->opcode >> 12) & 0xf)
328
329 #define REG(x) ((x) < 8 && (ctx->flags & (SR_MD | SR_RB)) == (SR_MD | SR_RB) \
330                 ? (cpu_gregs[x + 16]) : (cpu_gregs[x]))
331
332 #define ALTREG(x) ((x) < 8 && (ctx->flags & (SR_MD | SR_RB)) != (SR_MD | SR_RB)\
333                 ? (cpu_gregs[x + 16]) : (cpu_gregs[x]))
334
335 #define FREG(x) (ctx->flags & FPSCR_FR ? (x) ^ 0x10 : (x))
336 #define XHACK(x) ((((x) & 1 ) << 4) | ((x) & 0xe))
337 #define XREG(x) (ctx->flags & FPSCR_FR ? XHACK(x) ^ 0x10 : XHACK(x))
338 #define DREG(x) FREG(x) /* Assumes lsb of (x) is always 0 */
339
340 #define CHECK_NOT_DELAY_SLOT \
341   if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL))     \
342   {                                                           \
343       tcg_gen_movi_i32(cpu_pc, ctx->pc);                      \
344       gen_helper_raise_slot_illegal_instruction(cpu_env);     \
345       ctx->bstate = BS_BRANCH;                                \
346       return;                                                 \
347   }
348
349 #define CHECK_PRIVILEGED                                        \
350   if (IS_USER(ctx)) {                                           \
351       tcg_gen_movi_i32(cpu_pc, ctx->pc);                        \
352       if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) { \
353           gen_helper_raise_slot_illegal_instruction(cpu_env);   \
354       } else {                                                  \
355           gen_helper_raise_illegal_instruction(cpu_env);        \
356       }                                                         \
357       ctx->bstate = BS_BRANCH;                                  \
358       return;                                                   \
359   }
360
361 #define CHECK_FPU_ENABLED                                       \
362   if (ctx->flags & SR_FD) {                                     \
363       tcg_gen_movi_i32(cpu_pc, ctx->pc);                        \
364       if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) { \
365           gen_helper_raise_slot_fpu_disable(cpu_env);           \
366       } else {                                                  \
367           gen_helper_raise_fpu_disable(cpu_env);                \
368       }                                                         \
369       ctx->bstate = BS_BRANCH;                                  \
370       return;                                                   \
371   }
372
373 static void _decode_opc(DisasContext * ctx)
374 {
375     /* This code tries to make movcal emulation sufficiently
376        accurate for Linux purposes.  This instruction writes
377        memory, and prior to that, always allocates a cache line.
378        It is used in two contexts:
379        - in memcpy, where data is copied in blocks, the first write
380        of to a block uses movca.l for performance.
381        - in arch/sh/mm/cache-sh4.c, movcal.l + ocbi combination is used
382        to flush the cache. Here, the data written by movcal.l is never
383        written to memory, and the data written is just bogus.
384
385        To simulate this, we simulate movcal.l, we store the value to memory,
386        but we also remember the previous content. If we see ocbi, we check
387        if movcal.l for that address was done previously. If so, the write should
388        not have hit the memory, so we restore the previous content.
389        When we see an instruction that is neither movca.l
390        nor ocbi, the previous content is discarded.
391
392        To optimize, we only try to flush stores when we're at the start of
393        TB, or if we already saw movca.l in this TB and did not flush stores
394        yet.  */
395     if (ctx->has_movcal)
396         {
397           int opcode = ctx->opcode & 0xf0ff;
398           if (opcode != 0x0093 /* ocbi */
399               && opcode != 0x00c3 /* movca.l */)
400               {
401                   gen_helper_discard_movcal_backup(cpu_env);
402                   ctx->has_movcal = 0;
403               }
404         }
405
406 #if 0
407     fprintf(stderr, "Translating opcode 0x%04x\n", ctx->opcode);
408 #endif
409
410     switch (ctx->opcode) {
411     case 0x0019:                /* div0u */
412         tcg_gen_andi_i32(cpu_sr, cpu_sr, ~(SR_M | SR_Q | SR_T));
413         return;
414     case 0x000b:                /* rts */
415         CHECK_NOT_DELAY_SLOT
416         tcg_gen_mov_i32(cpu_delayed_pc, cpu_pr);
417         ctx->flags |= DELAY_SLOT;
418         ctx->delayed_pc = (uint32_t) - 1;
419         return;
420     case 0x0028:                /* clrmac */
421         tcg_gen_movi_i32(cpu_mach, 0);
422         tcg_gen_movi_i32(cpu_macl, 0);
423         return;
424     case 0x0048:                /* clrs */
425         tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_S);
426         return;
427     case 0x0008:                /* clrt */
428         tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
429         return;
430     case 0x0038:                /* ldtlb */
431         CHECK_PRIVILEGED
432         gen_helper_ldtlb(cpu_env);
433         return;
434     case 0x002b:                /* rte */
435         CHECK_PRIVILEGED
436         CHECK_NOT_DELAY_SLOT
437         tcg_gen_mov_i32(cpu_sr, cpu_ssr);
438         tcg_gen_mov_i32(cpu_delayed_pc, cpu_spc);
439         ctx->flags |= DELAY_SLOT;
440         ctx->delayed_pc = (uint32_t) - 1;
441         return;
442     case 0x0058:                /* sets */
443         tcg_gen_ori_i32(cpu_sr, cpu_sr, SR_S);
444         return;
445     case 0x0018:                /* sett */
446         tcg_gen_ori_i32(cpu_sr, cpu_sr, SR_T);
447         return;
448     case 0xfbfd:                /* frchg */
449         tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_FR);
450         ctx->bstate = BS_STOP;
451         return;
452     case 0xf3fd:                /* fschg */
453         tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_SZ);
454         ctx->bstate = BS_STOP;
455         return;
456     case 0x0009:                /* nop */
457         return;
458     case 0x001b:                /* sleep */
459         CHECK_PRIVILEGED
460         tcg_gen_movi_i32(cpu_pc, ctx->pc + 2);
461         gen_helper_sleep(cpu_env);
462         return;
463     }
464
465     switch (ctx->opcode & 0xf000) {
466     case 0x1000:                /* mov.l Rm,@(disp,Rn) */
467         {
468             TCGv addr = tcg_temp_new();
469             tcg_gen_addi_i32(addr, REG(B11_8), B3_0 * 4);
470             tcg_gen_qemu_st32(REG(B7_4), addr, ctx->memidx);
471             tcg_temp_free(addr);
472         }
473         return;
474     case 0x5000:                /* mov.l @(disp,Rm),Rn */
475         {
476             TCGv addr = tcg_temp_new();
477             tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 4);
478             tcg_gen_qemu_ld32s(REG(B11_8), addr, ctx->memidx);
479             tcg_temp_free(addr);
480         }
481         return;
482     case 0xe000:                /* mov #imm,Rn */
483         tcg_gen_movi_i32(REG(B11_8), B7_0s);
484         return;
485     case 0x9000:                /* mov.w @(disp,PC),Rn */
486         {
487             TCGv addr = tcg_const_i32(ctx->pc + 4 + B7_0 * 2);
488             tcg_gen_qemu_ld16s(REG(B11_8), addr, ctx->memidx);
489             tcg_temp_free(addr);
490         }
491         return;
492     case 0xd000:                /* mov.l @(disp,PC),Rn */
493         {
494             TCGv addr = tcg_const_i32((ctx->pc + 4 + B7_0 * 4) & ~3);
495             tcg_gen_qemu_ld32s(REG(B11_8), addr, ctx->memidx);
496             tcg_temp_free(addr);
497         }
498         return;
499     case 0x7000:                /* add #imm,Rn */
500         tcg_gen_addi_i32(REG(B11_8), REG(B11_8), B7_0s);
501         return;
502     case 0xa000:                /* bra disp */
503         CHECK_NOT_DELAY_SLOT
504         ctx->delayed_pc = ctx->pc + 4 + B11_0s * 2;
505         tcg_gen_movi_i32(cpu_delayed_pc, ctx->delayed_pc);
506         ctx->flags |= DELAY_SLOT;
507         return;
508     case 0xb000:                /* bsr disp */
509         CHECK_NOT_DELAY_SLOT
510         tcg_gen_movi_i32(cpu_pr, ctx->pc + 4);
511         ctx->delayed_pc = ctx->pc + 4 + B11_0s * 2;
512         tcg_gen_movi_i32(cpu_delayed_pc, ctx->delayed_pc);
513         ctx->flags |= DELAY_SLOT;
514         return;
515     }
516
517     switch (ctx->opcode & 0xf00f) {
518     case 0x6003:                /* mov Rm,Rn */
519         tcg_gen_mov_i32(REG(B11_8), REG(B7_4));
520         return;
521     case 0x2000:                /* mov.b Rm,@Rn */
522         tcg_gen_qemu_st8(REG(B7_4), REG(B11_8), ctx->memidx);
523         return;
524     case 0x2001:                /* mov.w Rm,@Rn */
525         tcg_gen_qemu_st16(REG(B7_4), REG(B11_8), ctx->memidx);
526         return;
527     case 0x2002:                /* mov.l Rm,@Rn */
528         tcg_gen_qemu_st32(REG(B7_4), REG(B11_8), ctx->memidx);
529         return;
530     case 0x6000:                /* mov.b @Rm,Rn */
531         tcg_gen_qemu_ld8s(REG(B11_8), REG(B7_4), ctx->memidx);
532         return;
533     case 0x6001:                /* mov.w @Rm,Rn */
534         tcg_gen_qemu_ld16s(REG(B11_8), REG(B7_4), ctx->memidx);
535         return;
536     case 0x6002:                /* mov.l @Rm,Rn */
537         tcg_gen_qemu_ld32s(REG(B11_8), REG(B7_4), ctx->memidx);
538         return;
539     case 0x2004:                /* mov.b Rm,@-Rn */
540         {
541             TCGv addr = tcg_temp_new();
542             tcg_gen_subi_i32(addr, REG(B11_8), 1);
543             tcg_gen_qemu_st8(REG(B7_4), addr, ctx->memidx);     /* might cause re-execution */
544             tcg_gen_mov_i32(REG(B11_8), addr);                  /* modify register status */
545             tcg_temp_free(addr);
546         }
547         return;
548     case 0x2005:                /* mov.w Rm,@-Rn */
549         {
550             TCGv addr = tcg_temp_new();
551             tcg_gen_subi_i32(addr, REG(B11_8), 2);
552             tcg_gen_qemu_st16(REG(B7_4), addr, ctx->memidx);
553             tcg_gen_mov_i32(REG(B11_8), addr);
554             tcg_temp_free(addr);
555         }
556         return;
557     case 0x2006:                /* mov.l Rm,@-Rn */
558         {
559             TCGv addr = tcg_temp_new();
560             tcg_gen_subi_i32(addr, REG(B11_8), 4);
561             tcg_gen_qemu_st32(REG(B7_4), addr, ctx->memidx);
562             tcg_gen_mov_i32(REG(B11_8), addr);
563         }
564         return;
565     case 0x6004:                /* mov.b @Rm+,Rn */
566         tcg_gen_qemu_ld8s(REG(B11_8), REG(B7_4), ctx->memidx);
567         if ( B11_8 != B7_4 )
568                 tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 1);
569         return;
570     case 0x6005:                /* mov.w @Rm+,Rn */
571         tcg_gen_qemu_ld16s(REG(B11_8), REG(B7_4), ctx->memidx);
572         if ( B11_8 != B7_4 )
573                 tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 2);
574         return;
575     case 0x6006:                /* mov.l @Rm+,Rn */
576         tcg_gen_qemu_ld32s(REG(B11_8), REG(B7_4), ctx->memidx);
577         if ( B11_8 != B7_4 )
578                 tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4);
579         return;
580     case 0x0004:                /* mov.b Rm,@(R0,Rn) */
581         {
582             TCGv addr = tcg_temp_new();
583             tcg_gen_add_i32(addr, REG(B11_8), REG(0));
584             tcg_gen_qemu_st8(REG(B7_4), addr, ctx->memidx);
585             tcg_temp_free(addr);
586         }
587         return;
588     case 0x0005:                /* mov.w Rm,@(R0,Rn) */
589         {
590             TCGv addr = tcg_temp_new();
591             tcg_gen_add_i32(addr, REG(B11_8), REG(0));
592             tcg_gen_qemu_st16(REG(B7_4), addr, ctx->memidx);
593             tcg_temp_free(addr);
594         }
595         return;
596     case 0x0006:                /* mov.l Rm,@(R0,Rn) */
597         {
598             TCGv addr = tcg_temp_new();
599             tcg_gen_add_i32(addr, REG(B11_8), REG(0));
600             tcg_gen_qemu_st32(REG(B7_4), addr, ctx->memidx);
601             tcg_temp_free(addr);
602         }
603         return;
604     case 0x000c:                /* mov.b @(R0,Rm),Rn */
605         {
606             TCGv addr = tcg_temp_new();
607             tcg_gen_add_i32(addr, REG(B7_4), REG(0));
608             tcg_gen_qemu_ld8s(REG(B11_8), addr, ctx->memidx);
609             tcg_temp_free(addr);
610         }
611         return;
612     case 0x000d:                /* mov.w @(R0,Rm),Rn */
613         {
614             TCGv addr = tcg_temp_new();
615             tcg_gen_add_i32(addr, REG(B7_4), REG(0));
616             tcg_gen_qemu_ld16s(REG(B11_8), addr, ctx->memidx);
617             tcg_temp_free(addr);
618         }
619         return;
620     case 0x000e:                /* mov.l @(R0,Rm),Rn */
621         {
622             TCGv addr = tcg_temp_new();
623             tcg_gen_add_i32(addr, REG(B7_4), REG(0));
624             tcg_gen_qemu_ld32s(REG(B11_8), addr, ctx->memidx);
625             tcg_temp_free(addr);
626         }
627         return;
628     case 0x6008:                /* swap.b Rm,Rn */
629         {
630             TCGv high, low;
631             high = tcg_temp_new();
632             tcg_gen_andi_i32(high, REG(B7_4), 0xffff0000);
633             low = tcg_temp_new();
634             tcg_gen_ext16u_i32(low, REG(B7_4));
635             tcg_gen_bswap16_i32(low, low);
636             tcg_gen_or_i32(REG(B11_8), high, low);
637             tcg_temp_free(low);
638             tcg_temp_free(high);
639         }
640         return;
641     case 0x6009:                /* swap.w Rm,Rn */
642         tcg_gen_rotli_i32(REG(B11_8), REG(B7_4), 16);
643         return;
644     case 0x200d:                /* xtrct Rm,Rn */
645         {
646             TCGv high, low;
647             high = tcg_temp_new();
648             tcg_gen_shli_i32(high, REG(B7_4), 16);
649             low = tcg_temp_new();
650             tcg_gen_shri_i32(low, REG(B11_8), 16);
651             tcg_gen_or_i32(REG(B11_8), high, low);
652             tcg_temp_free(low);
653             tcg_temp_free(high);
654         }
655         return;
656     case 0x300c:                /* add Rm,Rn */
657         tcg_gen_add_i32(REG(B11_8), REG(B11_8), REG(B7_4));
658         return;
659     case 0x300e:                /* addc Rm,Rn */
660         {
661             TCGv t0, t1, t2;
662             t0 = tcg_temp_new();
663             tcg_gen_andi_i32(t0, cpu_sr, SR_T);
664             t1 = tcg_temp_new();
665             tcg_gen_add_i32(t1, REG(B7_4), REG(B11_8));
666             tcg_gen_add_i32(t0, t0, t1);
667             t2 = tcg_temp_new();
668             tcg_gen_setcond_i32(TCG_COND_GTU, t2, REG(B11_8), t1);
669             tcg_gen_setcond_i32(TCG_COND_GTU, t1, t1, t0);
670             tcg_gen_or_i32(t1, t1, t2);
671             tcg_temp_free(t2);
672             tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
673             tcg_gen_or_i32(cpu_sr, cpu_sr, t1);
674             tcg_temp_free(t1);
675             tcg_gen_mov_i32(REG(B11_8), t0);
676             tcg_temp_free(t0);
677         }
678         return;
679     case 0x300f:                /* addv Rm,Rn */
680         {
681             TCGv t0, t1, t2;
682             t0 = tcg_temp_new();
683             tcg_gen_add_i32(t0, REG(B7_4), REG(B11_8));
684             t1 = tcg_temp_new();
685             tcg_gen_xor_i32(t1, t0, REG(B11_8));
686             t2 = tcg_temp_new();
687             tcg_gen_xor_i32(t2, REG(B7_4), REG(B11_8));
688             tcg_gen_andc_i32(t1, t1, t2);
689             tcg_temp_free(t2);
690             tcg_gen_shri_i32(t1, t1, 31);
691             tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
692             tcg_gen_or_i32(cpu_sr, cpu_sr, t1);
693             tcg_temp_free(t1);
694             tcg_gen_mov_i32(REG(B7_4), t0);
695             tcg_temp_free(t0);
696         }
697         return;
698     case 0x2009:                /* and Rm,Rn */
699         tcg_gen_and_i32(REG(B11_8), REG(B11_8), REG(B7_4));
700         return;
701     case 0x3000:                /* cmp/eq Rm,Rn */
702         gen_cmp(TCG_COND_EQ, REG(B7_4), REG(B11_8));
703         return;
704     case 0x3003:                /* cmp/ge Rm,Rn */
705         gen_cmp(TCG_COND_GE, REG(B7_4), REG(B11_8));
706         return;
707     case 0x3007:                /* cmp/gt Rm,Rn */
708         gen_cmp(TCG_COND_GT, REG(B7_4), REG(B11_8));
709         return;
710     case 0x3006:                /* cmp/hi Rm,Rn */
711         gen_cmp(TCG_COND_GTU, REG(B7_4), REG(B11_8));
712         return;
713     case 0x3002:                /* cmp/hs Rm,Rn */
714         gen_cmp(TCG_COND_GEU, REG(B7_4), REG(B11_8));
715         return;
716     case 0x200c:                /* cmp/str Rm,Rn */
717         {
718             TCGv cmp1 = tcg_temp_new();
719             TCGv cmp2 = tcg_temp_new();
720             tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
721             tcg_gen_xor_i32(cmp1, REG(B7_4), REG(B11_8));
722             tcg_gen_andi_i32(cmp2, cmp1, 0xff000000);
723             tcg_gen_setcondi_i32(TCG_COND_EQ, cmp2, cmp2, 0);
724             tcg_gen_or_i32(cpu_sr, cpu_sr, cmp2);
725             tcg_gen_andi_i32(cmp2, cmp1, 0x00ff0000);
726             tcg_gen_setcondi_i32(TCG_COND_EQ, cmp2, cmp2, 0);
727             tcg_gen_or_i32(cpu_sr, cpu_sr, cmp2);
728             tcg_gen_andi_i32(cmp2, cmp1, 0x0000ff00);
729             tcg_gen_setcondi_i32(TCG_COND_EQ, cmp2, cmp2, 0);
730             tcg_gen_or_i32(cpu_sr, cpu_sr, cmp2);
731             tcg_gen_andi_i32(cmp2, cmp1, 0x000000ff);
732             tcg_gen_setcondi_i32(TCG_COND_EQ, cmp2, cmp2, 0);
733             tcg_gen_or_i32(cpu_sr, cpu_sr, cmp2);
734             tcg_temp_free(cmp2);
735             tcg_temp_free(cmp1);
736         }
737         return;
738     case 0x2007:                /* div0s Rm,Rn */
739         {
740             gen_copy_bit_i32(cpu_sr, 8, REG(B11_8), 31);        /* SR_Q */
741             gen_copy_bit_i32(cpu_sr, 9, REG(B7_4), 31);         /* SR_M */
742             TCGv val = tcg_temp_new();
743             tcg_gen_xor_i32(val, REG(B7_4), REG(B11_8));
744             gen_copy_bit_i32(cpu_sr, 0, val, 31);               /* SR_T */
745             tcg_temp_free(val);
746         }
747         return;
748     case 0x3004:                /* div1 Rm,Rn */
749         gen_helper_div1(REG(B11_8), cpu_env, REG(B7_4), REG(B11_8));
750         return;
751     case 0x300d:                /* dmuls.l Rm,Rn */
752         tcg_gen_muls2_i32(cpu_macl, cpu_mach, REG(B7_4), REG(B11_8));
753         return;
754     case 0x3005:                /* dmulu.l Rm,Rn */
755         tcg_gen_mulu2_i32(cpu_macl, cpu_mach, REG(B7_4), REG(B11_8));
756         return;
757     case 0x600e:                /* exts.b Rm,Rn */
758         tcg_gen_ext8s_i32(REG(B11_8), REG(B7_4));
759         return;
760     case 0x600f:                /* exts.w Rm,Rn */
761         tcg_gen_ext16s_i32(REG(B11_8), REG(B7_4));
762         return;
763     case 0x600c:                /* extu.b Rm,Rn */
764         tcg_gen_ext8u_i32(REG(B11_8), REG(B7_4));
765         return;
766     case 0x600d:                /* extu.w Rm,Rn */
767         tcg_gen_ext16u_i32(REG(B11_8), REG(B7_4));
768         return;
769     case 0x000f:                /* mac.l @Rm+,@Rn+ */
770         {
771             TCGv arg0, arg1;
772             arg0 = tcg_temp_new();
773             tcg_gen_qemu_ld32s(arg0, REG(B7_4), ctx->memidx);
774             arg1 = tcg_temp_new();
775             tcg_gen_qemu_ld32s(arg1, REG(B11_8), ctx->memidx);
776             gen_helper_macl(cpu_env, arg0, arg1);
777             tcg_temp_free(arg1);
778             tcg_temp_free(arg0);
779             tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4);
780             tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
781         }
782         return;
783     case 0x400f:                /* mac.w @Rm+,@Rn+ */
784         {
785             TCGv arg0, arg1;
786             arg0 = tcg_temp_new();
787             tcg_gen_qemu_ld32s(arg0, REG(B7_4), ctx->memidx);
788             arg1 = tcg_temp_new();
789             tcg_gen_qemu_ld32s(arg1, REG(B11_8), ctx->memidx);
790             gen_helper_macw(cpu_env, arg0, arg1);
791             tcg_temp_free(arg1);
792             tcg_temp_free(arg0);
793             tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 2);
794             tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 2);
795         }
796         return;
797     case 0x0007:                /* mul.l Rm,Rn */
798         tcg_gen_mul_i32(cpu_macl, REG(B7_4), REG(B11_8));
799         return;
800     case 0x200f:                /* muls.w Rm,Rn */
801         {
802             TCGv arg0, arg1;
803             arg0 = tcg_temp_new();
804             tcg_gen_ext16s_i32(arg0, REG(B7_4));
805             arg1 = tcg_temp_new();
806             tcg_gen_ext16s_i32(arg1, REG(B11_8));
807             tcg_gen_mul_i32(cpu_macl, arg0, arg1);
808             tcg_temp_free(arg1);
809             tcg_temp_free(arg0);
810         }
811         return;
812     case 0x200e:                /* mulu.w Rm,Rn */
813         {
814             TCGv arg0, arg1;
815             arg0 = tcg_temp_new();
816             tcg_gen_ext16u_i32(arg0, REG(B7_4));
817             arg1 = tcg_temp_new();
818             tcg_gen_ext16u_i32(arg1, REG(B11_8));
819             tcg_gen_mul_i32(cpu_macl, arg0, arg1);
820             tcg_temp_free(arg1);
821             tcg_temp_free(arg0);
822         }
823         return;
824     case 0x600b:                /* neg Rm,Rn */
825         tcg_gen_neg_i32(REG(B11_8), REG(B7_4));
826         return;
827     case 0x600a:                /* negc Rm,Rn */
828         {
829             TCGv t0, t1;
830             t0 = tcg_temp_new();
831             tcg_gen_neg_i32(t0, REG(B7_4));
832             t1 = tcg_temp_new();
833             tcg_gen_andi_i32(t1, cpu_sr, SR_T);
834             tcg_gen_sub_i32(REG(B11_8), t0, t1);
835             tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
836             tcg_gen_setcondi_i32(TCG_COND_GTU, t1, t0, 0);
837             tcg_gen_or_i32(cpu_sr, cpu_sr, t1);
838             tcg_gen_setcond_i32(TCG_COND_GTU, t1, REG(B11_8), t0);
839             tcg_gen_or_i32(cpu_sr, cpu_sr, t1);
840             tcg_temp_free(t0);
841             tcg_temp_free(t1);
842         }
843         return;
844     case 0x6007:                /* not Rm,Rn */
845         tcg_gen_not_i32(REG(B11_8), REG(B7_4));
846         return;
847     case 0x200b:                /* or Rm,Rn */
848         tcg_gen_or_i32(REG(B11_8), REG(B11_8), REG(B7_4));
849         return;
850     case 0x400c:                /* shad Rm,Rn */
851         {
852             int label1 = gen_new_label();
853             int label2 = gen_new_label();
854             int label3 = gen_new_label();
855             int label4 = gen_new_label();
856             TCGv shift;
857             tcg_gen_brcondi_i32(TCG_COND_LT, REG(B7_4), 0, label1);
858             /* Rm positive, shift to the left */
859             shift = tcg_temp_new();
860             tcg_gen_andi_i32(shift, REG(B7_4), 0x1f);
861             tcg_gen_shl_i32(REG(B11_8), REG(B11_8), shift);
862             tcg_temp_free(shift);
863             tcg_gen_br(label4);
864             /* Rm negative, shift to the right */
865             gen_set_label(label1);
866             shift = tcg_temp_new();
867             tcg_gen_andi_i32(shift, REG(B7_4), 0x1f);
868             tcg_gen_brcondi_i32(TCG_COND_EQ, shift, 0, label2);
869             tcg_gen_not_i32(shift, REG(B7_4));
870             tcg_gen_andi_i32(shift, shift, 0x1f);
871             tcg_gen_addi_i32(shift, shift, 1);
872             tcg_gen_sar_i32(REG(B11_8), REG(B11_8), shift);
873             tcg_temp_free(shift);
874             tcg_gen_br(label4);
875             /* Rm = -32 */
876             gen_set_label(label2);
877             tcg_gen_brcondi_i32(TCG_COND_LT, REG(B11_8), 0, label3);
878             tcg_gen_movi_i32(REG(B11_8), 0);
879             tcg_gen_br(label4);
880             gen_set_label(label3);
881             tcg_gen_movi_i32(REG(B11_8), 0xffffffff);
882             gen_set_label(label4);
883         }
884         return;
885     case 0x400d:                /* shld Rm,Rn */
886         {
887             int label1 = gen_new_label();
888             int label2 = gen_new_label();
889             int label3 = gen_new_label();
890             TCGv shift;
891             tcg_gen_brcondi_i32(TCG_COND_LT, REG(B7_4), 0, label1);
892             /* Rm positive, shift to the left */
893             shift = tcg_temp_new();
894             tcg_gen_andi_i32(shift, REG(B7_4), 0x1f);
895             tcg_gen_shl_i32(REG(B11_8), REG(B11_8), shift);
896             tcg_temp_free(shift);
897             tcg_gen_br(label3);
898             /* Rm negative, shift to the right */
899             gen_set_label(label1);
900             shift = tcg_temp_new();
901             tcg_gen_andi_i32(shift, REG(B7_4), 0x1f);
902             tcg_gen_brcondi_i32(TCG_COND_EQ, shift, 0, label2);
903             tcg_gen_not_i32(shift, REG(B7_4));
904             tcg_gen_andi_i32(shift, shift, 0x1f);
905             tcg_gen_addi_i32(shift, shift, 1);
906             tcg_gen_shr_i32(REG(B11_8), REG(B11_8), shift);
907             tcg_temp_free(shift);
908             tcg_gen_br(label3);
909             /* Rm = -32 */
910             gen_set_label(label2);
911             tcg_gen_movi_i32(REG(B11_8), 0);
912             gen_set_label(label3);
913         }
914         return;
915     case 0x3008:                /* sub Rm,Rn */
916         tcg_gen_sub_i32(REG(B11_8), REG(B11_8), REG(B7_4));
917         return;
918     case 0x300a:                /* subc Rm,Rn */
919         {
920             TCGv t0, t1, t2;
921             t0 = tcg_temp_new();
922             tcg_gen_andi_i32(t0, cpu_sr, SR_T);
923             t1 = tcg_temp_new();
924             tcg_gen_sub_i32(t1, REG(B11_8), REG(B7_4));
925             tcg_gen_sub_i32(t0, t1, t0);
926             t2 = tcg_temp_new();
927             tcg_gen_setcond_i32(TCG_COND_LTU, t2, REG(B11_8), t1);
928             tcg_gen_setcond_i32(TCG_COND_LTU, t1, t1, t0);
929             tcg_gen_or_i32(t1, t1, t2);
930             tcg_temp_free(t2);
931             tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
932             tcg_gen_or_i32(cpu_sr, cpu_sr, t1);
933             tcg_temp_free(t1);
934             tcg_gen_mov_i32(REG(B11_8), t0);
935             tcg_temp_free(t0);
936         }
937         return;
938     case 0x300b:                /* subv Rm,Rn */
939         {
940             TCGv t0, t1, t2;
941             t0 = tcg_temp_new();
942             tcg_gen_sub_i32(t0, REG(B11_8), REG(B7_4));
943             t1 = tcg_temp_new();
944             tcg_gen_xor_i32(t1, t0, REG(B7_4));
945             t2 = tcg_temp_new();
946             tcg_gen_xor_i32(t2, REG(B11_8), REG(B7_4));
947             tcg_gen_and_i32(t1, t1, t2);
948             tcg_temp_free(t2);
949             tcg_gen_shri_i32(t1, t1, 31);
950             tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
951             tcg_gen_or_i32(cpu_sr, cpu_sr, t1);
952             tcg_temp_free(t1);
953             tcg_gen_mov_i32(REG(B11_8), t0);
954             tcg_temp_free(t0);
955         }
956         return;
957     case 0x2008:                /* tst Rm,Rn */
958         {
959             TCGv val = tcg_temp_new();
960             tcg_gen_and_i32(val, REG(B7_4), REG(B11_8));
961             gen_cmp_imm(TCG_COND_EQ, val, 0);
962             tcg_temp_free(val);
963         }
964         return;
965     case 0x200a:                /* xor Rm,Rn */
966         tcg_gen_xor_i32(REG(B11_8), REG(B11_8), REG(B7_4));
967         return;
968     case 0xf00c: /* fmov {F,D,X}Rm,{F,D,X}Rn - FPSCR: Nothing */
969         CHECK_FPU_ENABLED
970         if (ctx->flags & FPSCR_SZ) {
971             TCGv_i64 fp = tcg_temp_new_i64();
972             gen_load_fpr64(fp, XREG(B7_4));
973             gen_store_fpr64(fp, XREG(B11_8));
974             tcg_temp_free_i64(fp);
975         } else {
976             tcg_gen_mov_i32(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
977         }
978         return;
979     case 0xf00a: /* fmov {F,D,X}Rm,@Rn - FPSCR: Nothing */
980         CHECK_FPU_ENABLED
981         if (ctx->flags & FPSCR_SZ) {
982             TCGv addr_hi = tcg_temp_new();
983             int fr = XREG(B7_4);
984             tcg_gen_addi_i32(addr_hi, REG(B11_8), 4);
985             tcg_gen_qemu_st32(cpu_fregs[fr  ], REG(B11_8), ctx->memidx);
986             tcg_gen_qemu_st32(cpu_fregs[fr+1], addr_hi,    ctx->memidx);
987             tcg_temp_free(addr_hi);
988         } else {
989             tcg_gen_qemu_st32(cpu_fregs[FREG(B7_4)], REG(B11_8), ctx->memidx);
990         }
991         return;
992     case 0xf008: /* fmov @Rm,{F,D,X}Rn - FPSCR: Nothing */
993         CHECK_FPU_ENABLED
994         if (ctx->flags & FPSCR_SZ) {
995             TCGv addr_hi = tcg_temp_new();
996             int fr = XREG(B11_8);
997             tcg_gen_addi_i32(addr_hi, REG(B7_4), 4);
998             tcg_gen_qemu_ld32u(cpu_fregs[fr  ], REG(B7_4), ctx->memidx);
999             tcg_gen_qemu_ld32u(cpu_fregs[fr+1], addr_hi,   ctx->memidx);
1000             tcg_temp_free(addr_hi);
1001         } else {
1002             tcg_gen_qemu_ld32u(cpu_fregs[FREG(B11_8)], REG(B7_4), ctx->memidx);
1003         }
1004         return;
1005     case 0xf009: /* fmov @Rm+,{F,D,X}Rn - FPSCR: Nothing */
1006         CHECK_FPU_ENABLED
1007         if (ctx->flags & FPSCR_SZ) {
1008             TCGv addr_hi = tcg_temp_new();
1009             int fr = XREG(B11_8);
1010             tcg_gen_addi_i32(addr_hi, REG(B7_4), 4);
1011             tcg_gen_qemu_ld32u(cpu_fregs[fr  ], REG(B7_4), ctx->memidx);
1012             tcg_gen_qemu_ld32u(cpu_fregs[fr+1], addr_hi,   ctx->memidx);
1013             tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 8);
1014             tcg_temp_free(addr_hi);
1015         } else {
1016             tcg_gen_qemu_ld32u(cpu_fregs[FREG(B11_8)], REG(B7_4), ctx->memidx);
1017             tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4);
1018         }
1019         return;
1020     case 0xf00b: /* fmov {F,D,X}Rm,@-Rn - FPSCR: Nothing */
1021         CHECK_FPU_ENABLED
1022         if (ctx->flags & FPSCR_SZ) {
1023             TCGv addr = tcg_temp_new_i32();
1024             int fr = XREG(B7_4);
1025             tcg_gen_subi_i32(addr, REG(B11_8), 4);
1026             tcg_gen_qemu_st32(cpu_fregs[fr+1], addr, ctx->memidx);
1027             tcg_gen_subi_i32(addr, addr, 4);
1028             tcg_gen_qemu_st32(cpu_fregs[fr  ], addr, ctx->memidx);
1029             tcg_gen_mov_i32(REG(B11_8), addr);
1030             tcg_temp_free(addr);
1031         } else {
1032             TCGv addr;
1033             addr = tcg_temp_new_i32();
1034             tcg_gen_subi_i32(addr, REG(B11_8), 4);
1035             tcg_gen_qemu_st32(cpu_fregs[FREG(B7_4)], addr, ctx->memidx);
1036             tcg_gen_mov_i32(REG(B11_8), addr);
1037             tcg_temp_free(addr);
1038         }
1039         return;
1040     case 0xf006: /* fmov @(R0,Rm),{F,D,X}Rm - FPSCR: Nothing */
1041         CHECK_FPU_ENABLED
1042         {
1043             TCGv addr = tcg_temp_new_i32();
1044             tcg_gen_add_i32(addr, REG(B7_4), REG(0));
1045             if (ctx->flags & FPSCR_SZ) {
1046                 int fr = XREG(B11_8);
1047                 tcg_gen_qemu_ld32u(cpu_fregs[fr  ], addr, ctx->memidx);
1048                 tcg_gen_addi_i32(addr, addr, 4);
1049                 tcg_gen_qemu_ld32u(cpu_fregs[fr+1], addr, ctx->memidx);
1050             } else {
1051                 tcg_gen_qemu_ld32u(cpu_fregs[FREG(B11_8)], addr, ctx->memidx);
1052             }
1053             tcg_temp_free(addr);
1054         }
1055         return;
1056     case 0xf007: /* fmov {F,D,X}Rn,@(R0,Rn) - FPSCR: Nothing */
1057         CHECK_FPU_ENABLED
1058         {
1059             TCGv addr = tcg_temp_new();
1060             tcg_gen_add_i32(addr, REG(B11_8), REG(0));
1061             if (ctx->flags & FPSCR_SZ) {
1062                 int fr = XREG(B7_4);
1063                 tcg_gen_qemu_ld32u(cpu_fregs[fr  ], addr, ctx->memidx);
1064                 tcg_gen_addi_i32(addr, addr, 4);
1065                 tcg_gen_qemu_ld32u(cpu_fregs[fr+1], addr, ctx->memidx);
1066             } else {
1067                 tcg_gen_qemu_st32(cpu_fregs[FREG(B7_4)], addr, ctx->memidx);
1068             }
1069             tcg_temp_free(addr);
1070         }
1071         return;
1072     case 0xf000: /* fadd Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1073     case 0xf001: /* fsub Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1074     case 0xf002: /* fmul Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1075     case 0xf003: /* fdiv Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1076     case 0xf004: /* fcmp/eq Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1077     case 0xf005: /* fcmp/gt Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1078         {
1079             CHECK_FPU_ENABLED
1080             if (ctx->flags & FPSCR_PR) {
1081                 TCGv_i64 fp0, fp1;
1082
1083                 if (ctx->opcode & 0x0110)
1084                     break; /* illegal instruction */
1085                 fp0 = tcg_temp_new_i64();
1086                 fp1 = tcg_temp_new_i64();
1087                 gen_load_fpr64(fp0, DREG(B11_8));
1088                 gen_load_fpr64(fp1, DREG(B7_4));
1089                 switch (ctx->opcode & 0xf00f) {
1090                 case 0xf000:            /* fadd Rm,Rn */
1091                     gen_helper_fadd_DT(fp0, cpu_env, fp0, fp1);
1092                     break;
1093                 case 0xf001:            /* fsub Rm,Rn */
1094                     gen_helper_fsub_DT(fp0, cpu_env, fp0, fp1);
1095                     break;
1096                 case 0xf002:            /* fmul Rm,Rn */
1097                     gen_helper_fmul_DT(fp0, cpu_env, fp0, fp1);
1098                     break;
1099                 case 0xf003:            /* fdiv Rm,Rn */
1100                     gen_helper_fdiv_DT(fp0, cpu_env, fp0, fp1);
1101                     break;
1102                 case 0xf004:            /* fcmp/eq Rm,Rn */
1103                     gen_helper_fcmp_eq_DT(cpu_env, fp0, fp1);
1104                     return;
1105                 case 0xf005:            /* fcmp/gt Rm,Rn */
1106                     gen_helper_fcmp_gt_DT(cpu_env, fp0, fp1);
1107                     return;
1108                 }
1109                 gen_store_fpr64(fp0, DREG(B11_8));
1110                 tcg_temp_free_i64(fp0);
1111                 tcg_temp_free_i64(fp1);
1112             } else {
1113                 switch (ctx->opcode & 0xf00f) {
1114                 case 0xf000:            /* fadd Rm,Rn */
1115                     gen_helper_fadd_FT(cpu_fregs[FREG(B11_8)], cpu_env,
1116                                        cpu_fregs[FREG(B11_8)],
1117                                        cpu_fregs[FREG(B7_4)]);
1118                     break;
1119                 case 0xf001:            /* fsub Rm,Rn */
1120                     gen_helper_fsub_FT(cpu_fregs[FREG(B11_8)], cpu_env,
1121                                        cpu_fregs[FREG(B11_8)],
1122                                        cpu_fregs[FREG(B7_4)]);
1123                     break;
1124                 case 0xf002:            /* fmul Rm,Rn */
1125                     gen_helper_fmul_FT(cpu_fregs[FREG(B11_8)], cpu_env,
1126                                        cpu_fregs[FREG(B11_8)],
1127                                        cpu_fregs[FREG(B7_4)]);
1128                     break;
1129                 case 0xf003:            /* fdiv Rm,Rn */
1130                     gen_helper_fdiv_FT(cpu_fregs[FREG(B11_8)], cpu_env,
1131                                        cpu_fregs[FREG(B11_8)],
1132                                        cpu_fregs[FREG(B7_4)]);
1133                     break;
1134                 case 0xf004:            /* fcmp/eq Rm,Rn */
1135                     gen_helper_fcmp_eq_FT(cpu_env, cpu_fregs[FREG(B11_8)],
1136                                           cpu_fregs[FREG(B7_4)]);
1137                     return;
1138                 case 0xf005:            /* fcmp/gt Rm,Rn */
1139                     gen_helper_fcmp_gt_FT(cpu_env, cpu_fregs[FREG(B11_8)],
1140                                           cpu_fregs[FREG(B7_4)]);
1141                     return;
1142                 }
1143             }
1144         }
1145         return;
1146     case 0xf00e: /* fmac FR0,RM,Rn */
1147         {
1148             CHECK_FPU_ENABLED
1149             if (ctx->flags & FPSCR_PR) {
1150                 break; /* illegal instruction */
1151             } else {
1152                 gen_helper_fmac_FT(cpu_fregs[FREG(B11_8)], cpu_env,
1153                                    cpu_fregs[FREG(0)], cpu_fregs[FREG(B7_4)],
1154                                    cpu_fregs[FREG(B11_8)]);
1155                 return;
1156             }
1157         }
1158     }
1159
1160     switch (ctx->opcode & 0xff00) {
1161     case 0xc900:                /* and #imm,R0 */
1162         tcg_gen_andi_i32(REG(0), REG(0), B7_0);
1163         return;
1164     case 0xcd00:                /* and.b #imm,@(R0,GBR) */
1165         {
1166             TCGv addr, val;
1167             addr = tcg_temp_new();
1168             tcg_gen_add_i32(addr, REG(0), cpu_gbr);
1169             val = tcg_temp_new();
1170             tcg_gen_qemu_ld8u(val, addr, ctx->memidx);
1171             tcg_gen_andi_i32(val, val, B7_0);
1172             tcg_gen_qemu_st8(val, addr, ctx->memidx);
1173             tcg_temp_free(val);
1174             tcg_temp_free(addr);
1175         }
1176         return;
1177     case 0x8b00:                /* bf label */
1178         CHECK_NOT_DELAY_SLOT
1179             gen_conditional_jump(ctx, ctx->pc + 2,
1180                                  ctx->pc + 4 + B7_0s * 2);
1181         ctx->bstate = BS_BRANCH;
1182         return;
1183     case 0x8f00:                /* bf/s label */
1184         CHECK_NOT_DELAY_SLOT
1185         gen_branch_slot(ctx->delayed_pc = ctx->pc + 4 + B7_0s * 2, 0);
1186         ctx->flags |= DELAY_SLOT_CONDITIONAL;
1187         return;
1188     case 0x8900:                /* bt label */
1189         CHECK_NOT_DELAY_SLOT
1190             gen_conditional_jump(ctx, ctx->pc + 4 + B7_0s * 2,
1191                                  ctx->pc + 2);
1192         ctx->bstate = BS_BRANCH;
1193         return;
1194     case 0x8d00:                /* bt/s label */
1195         CHECK_NOT_DELAY_SLOT
1196         gen_branch_slot(ctx->delayed_pc = ctx->pc + 4 + B7_0s * 2, 1);
1197         ctx->flags |= DELAY_SLOT_CONDITIONAL;
1198         return;
1199     case 0x8800:                /* cmp/eq #imm,R0 */
1200         gen_cmp_imm(TCG_COND_EQ, REG(0), B7_0s);
1201         return;
1202     case 0xc400:                /* mov.b @(disp,GBR),R0 */
1203         {
1204             TCGv addr = tcg_temp_new();
1205             tcg_gen_addi_i32(addr, cpu_gbr, B7_0);
1206             tcg_gen_qemu_ld8s(REG(0), addr, ctx->memidx);
1207             tcg_temp_free(addr);
1208         }
1209         return;
1210     case 0xc500:                /* mov.w @(disp,GBR),R0 */
1211         {
1212             TCGv addr = tcg_temp_new();
1213             tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 2);
1214             tcg_gen_qemu_ld16s(REG(0), addr, ctx->memidx);
1215             tcg_temp_free(addr);
1216         }
1217         return;
1218     case 0xc600:                /* mov.l @(disp,GBR),R0 */
1219         {
1220             TCGv addr = tcg_temp_new();
1221             tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 4);
1222             tcg_gen_qemu_ld32s(REG(0), addr, ctx->memidx);
1223             tcg_temp_free(addr);
1224         }
1225         return;
1226     case 0xc000:                /* mov.b R0,@(disp,GBR) */
1227         {
1228             TCGv addr = tcg_temp_new();
1229             tcg_gen_addi_i32(addr, cpu_gbr, B7_0);
1230             tcg_gen_qemu_st8(REG(0), addr, ctx->memidx);
1231             tcg_temp_free(addr);
1232         }
1233         return;
1234     case 0xc100:                /* mov.w R0,@(disp,GBR) */
1235         {
1236             TCGv addr = tcg_temp_new();
1237             tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 2);
1238             tcg_gen_qemu_st16(REG(0), addr, ctx->memidx);
1239             tcg_temp_free(addr);
1240         }
1241         return;
1242     case 0xc200:                /* mov.l R0,@(disp,GBR) */
1243         {
1244             TCGv addr = tcg_temp_new();
1245             tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 4);
1246             tcg_gen_qemu_st32(REG(0), addr, ctx->memidx);
1247             tcg_temp_free(addr);
1248         }
1249         return;
1250     case 0x8000:                /* mov.b R0,@(disp,Rn) */
1251         {
1252             TCGv addr = tcg_temp_new();
1253             tcg_gen_addi_i32(addr, REG(B7_4), B3_0);
1254             tcg_gen_qemu_st8(REG(0), addr, ctx->memidx);
1255             tcg_temp_free(addr);
1256         }
1257         return;
1258     case 0x8100:                /* mov.w R0,@(disp,Rn) */
1259         {
1260             TCGv addr = tcg_temp_new();
1261             tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 2);
1262             tcg_gen_qemu_st16(REG(0), addr, ctx->memidx);
1263             tcg_temp_free(addr);
1264         }
1265         return;
1266     case 0x8400:                /* mov.b @(disp,Rn),R0 */
1267         {
1268             TCGv addr = tcg_temp_new();
1269             tcg_gen_addi_i32(addr, REG(B7_4), B3_0);
1270             tcg_gen_qemu_ld8s(REG(0), addr, ctx->memidx);
1271             tcg_temp_free(addr);
1272         }
1273         return;
1274     case 0x8500:                /* mov.w @(disp,Rn),R0 */
1275         {
1276             TCGv addr = tcg_temp_new();
1277             tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 2);
1278             tcg_gen_qemu_ld16s(REG(0), addr, ctx->memidx);
1279             tcg_temp_free(addr);
1280         }
1281         return;
1282     case 0xc700:                /* mova @(disp,PC),R0 */
1283         tcg_gen_movi_i32(REG(0), ((ctx->pc & 0xfffffffc) + 4 + B7_0 * 4) & ~3);
1284         return;
1285     case 0xcb00:                /* or #imm,R0 */
1286         tcg_gen_ori_i32(REG(0), REG(0), B7_0);
1287         return;
1288     case 0xcf00:                /* or.b #imm,@(R0,GBR) */
1289         {
1290             TCGv addr, val;
1291             addr = tcg_temp_new();
1292             tcg_gen_add_i32(addr, REG(0), cpu_gbr);
1293             val = tcg_temp_new();
1294             tcg_gen_qemu_ld8u(val, addr, ctx->memidx);
1295             tcg_gen_ori_i32(val, val, B7_0);
1296             tcg_gen_qemu_st8(val, addr, ctx->memidx);
1297             tcg_temp_free(val);
1298             tcg_temp_free(addr);
1299         }
1300         return;
1301     case 0xc300:                /* trapa #imm */
1302         {
1303             TCGv imm;
1304             CHECK_NOT_DELAY_SLOT
1305             tcg_gen_movi_i32(cpu_pc, ctx->pc);
1306             imm = tcg_const_i32(B7_0);
1307             gen_helper_trapa(cpu_env, imm);
1308             tcg_temp_free(imm);
1309             ctx->bstate = BS_BRANCH;
1310         }
1311         return;
1312     case 0xc800:                /* tst #imm,R0 */
1313         {
1314             TCGv val = tcg_temp_new();
1315             tcg_gen_andi_i32(val, REG(0), B7_0);
1316             gen_cmp_imm(TCG_COND_EQ, val, 0);
1317             tcg_temp_free(val);
1318         }
1319         return;
1320     case 0xcc00:                /* tst.b #imm,@(R0,GBR) */
1321         {
1322             TCGv val = tcg_temp_new();
1323             tcg_gen_add_i32(val, REG(0), cpu_gbr);
1324             tcg_gen_qemu_ld8u(val, val, ctx->memidx);
1325             tcg_gen_andi_i32(val, val, B7_0);
1326             gen_cmp_imm(TCG_COND_EQ, val, 0);
1327             tcg_temp_free(val);
1328         }
1329         return;
1330     case 0xca00:                /* xor #imm,R0 */
1331         tcg_gen_xori_i32(REG(0), REG(0), B7_0);
1332         return;
1333     case 0xce00:                /* xor.b #imm,@(R0,GBR) */
1334         {
1335             TCGv addr, val;
1336             addr = tcg_temp_new();
1337             tcg_gen_add_i32(addr, REG(0), cpu_gbr);
1338             val = tcg_temp_new();
1339             tcg_gen_qemu_ld8u(val, addr, ctx->memidx);
1340             tcg_gen_xori_i32(val, val, B7_0);
1341             tcg_gen_qemu_st8(val, addr, ctx->memidx);
1342             tcg_temp_free(val);
1343             tcg_temp_free(addr);
1344         }
1345         return;
1346     }
1347
1348     switch (ctx->opcode & 0xf08f) {
1349     case 0x408e:                /* ldc Rm,Rn_BANK */
1350         CHECK_PRIVILEGED
1351         tcg_gen_mov_i32(ALTREG(B6_4), REG(B11_8));
1352         return;
1353     case 0x4087:                /* ldc.l @Rm+,Rn_BANK */
1354         CHECK_PRIVILEGED
1355         tcg_gen_qemu_ld32s(ALTREG(B6_4), REG(B11_8), ctx->memidx);
1356         tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
1357         return;
1358     case 0x0082:                /* stc Rm_BANK,Rn */
1359         CHECK_PRIVILEGED
1360         tcg_gen_mov_i32(REG(B11_8), ALTREG(B6_4));
1361         return;
1362     case 0x4083:                /* stc.l Rm_BANK,@-Rn */
1363         CHECK_PRIVILEGED
1364         {
1365             TCGv addr = tcg_temp_new();
1366             tcg_gen_subi_i32(addr, REG(B11_8), 4);
1367             tcg_gen_qemu_st32(ALTREG(B6_4), addr, ctx->memidx);
1368             tcg_gen_mov_i32(REG(B11_8), addr);
1369             tcg_temp_free(addr);
1370         }
1371         return;
1372     }
1373
1374     switch (ctx->opcode & 0xf0ff) {
1375     case 0x0023:                /* braf Rn */
1376         CHECK_NOT_DELAY_SLOT
1377         tcg_gen_addi_i32(cpu_delayed_pc, REG(B11_8), ctx->pc + 4);
1378         ctx->flags |= DELAY_SLOT;
1379         ctx->delayed_pc = (uint32_t) - 1;
1380         return;
1381     case 0x0003:                /* bsrf Rn */
1382         CHECK_NOT_DELAY_SLOT
1383         tcg_gen_movi_i32(cpu_pr, ctx->pc + 4);
1384         tcg_gen_add_i32(cpu_delayed_pc, REG(B11_8), cpu_pr);
1385         ctx->flags |= DELAY_SLOT;
1386         ctx->delayed_pc = (uint32_t) - 1;
1387         return;
1388     case 0x4015:                /* cmp/pl Rn */
1389         gen_cmp_imm(TCG_COND_GT, REG(B11_8), 0);
1390         return;
1391     case 0x4011:                /* cmp/pz Rn */
1392         gen_cmp_imm(TCG_COND_GE, REG(B11_8), 0);
1393         return;
1394     case 0x4010:                /* dt Rn */
1395         tcg_gen_subi_i32(REG(B11_8), REG(B11_8), 1);
1396         gen_cmp_imm(TCG_COND_EQ, REG(B11_8), 0);
1397         return;
1398     case 0x402b:                /* jmp @Rn */
1399         CHECK_NOT_DELAY_SLOT
1400         tcg_gen_mov_i32(cpu_delayed_pc, REG(B11_8));
1401         ctx->flags |= DELAY_SLOT;
1402         ctx->delayed_pc = (uint32_t) - 1;
1403         return;
1404     case 0x400b:                /* jsr @Rn */
1405         CHECK_NOT_DELAY_SLOT
1406         tcg_gen_movi_i32(cpu_pr, ctx->pc + 4);
1407         tcg_gen_mov_i32(cpu_delayed_pc, REG(B11_8));
1408         ctx->flags |= DELAY_SLOT;
1409         ctx->delayed_pc = (uint32_t) - 1;
1410         return;
1411     case 0x400e:                /* ldc Rm,SR */
1412         CHECK_PRIVILEGED
1413         tcg_gen_andi_i32(cpu_sr, REG(B11_8), 0x700083f3);
1414         ctx->bstate = BS_STOP;
1415         return;
1416     case 0x4007:                /* ldc.l @Rm+,SR */
1417         CHECK_PRIVILEGED
1418         {
1419             TCGv val = tcg_temp_new();
1420             tcg_gen_qemu_ld32s(val, REG(B11_8), ctx->memidx);
1421             tcg_gen_andi_i32(cpu_sr, val, 0x700083f3);
1422             tcg_temp_free(val);
1423             tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
1424             ctx->bstate = BS_STOP;
1425         }
1426         return;
1427     case 0x0002:                /* stc SR,Rn */
1428         CHECK_PRIVILEGED
1429         tcg_gen_mov_i32(REG(B11_8), cpu_sr);
1430         return;
1431     case 0x4003:                /* stc SR,@-Rn */
1432         CHECK_PRIVILEGED
1433         {
1434             TCGv addr = tcg_temp_new();
1435             tcg_gen_subi_i32(addr, REG(B11_8), 4);
1436             tcg_gen_qemu_st32(cpu_sr, addr, ctx->memidx);
1437             tcg_gen_mov_i32(REG(B11_8), addr);
1438             tcg_temp_free(addr);
1439         }
1440         return;
1441 #define LD(reg,ldnum,ldpnum,prechk)             \
1442   case ldnum:                                                   \
1443     prechk                                                      \
1444     tcg_gen_mov_i32 (cpu_##reg, REG(B11_8));                    \
1445     return;                                                     \
1446   case ldpnum:                                                  \
1447     prechk                                                      \
1448     tcg_gen_qemu_ld32s (cpu_##reg, REG(B11_8), ctx->memidx);    \
1449     tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);                \
1450     return;
1451 #define ST(reg,stnum,stpnum,prechk)             \
1452   case stnum:                                                   \
1453     prechk                                                      \
1454     tcg_gen_mov_i32 (REG(B11_8), cpu_##reg);                    \
1455     return;                                                     \
1456   case stpnum:                                                  \
1457     prechk                                                      \
1458     {                                                           \
1459         TCGv addr = tcg_temp_new();                             \
1460         tcg_gen_subi_i32(addr, REG(B11_8), 4);                  \
1461         tcg_gen_qemu_st32 (cpu_##reg, addr, ctx->memidx);       \
1462         tcg_gen_mov_i32(REG(B11_8), addr);                      \
1463         tcg_temp_free(addr);                                    \
1464     }                                                           \
1465     return;
1466 #define LDST(reg,ldnum,ldpnum,stnum,stpnum,prechk)              \
1467         LD(reg,ldnum,ldpnum,prechk)                             \
1468         ST(reg,stnum,stpnum,prechk)
1469         LDST(gbr,  0x401e, 0x4017, 0x0012, 0x4013, {})
1470         LDST(vbr,  0x402e, 0x4027, 0x0022, 0x4023, CHECK_PRIVILEGED)
1471         LDST(ssr,  0x403e, 0x4037, 0x0032, 0x4033, CHECK_PRIVILEGED)
1472         LDST(spc,  0x404e, 0x4047, 0x0042, 0x4043, CHECK_PRIVILEGED)
1473         ST(sgr,  0x003a, 0x4032, CHECK_PRIVILEGED)
1474         LD(sgr,  0x403a, 0x4036, CHECK_PRIVILEGED if (!(ctx->features & SH_FEATURE_SH4A)) break;)
1475         LDST(dbr,  0x40fa, 0x40f6, 0x00fa, 0x40f2, CHECK_PRIVILEGED)
1476         LDST(mach, 0x400a, 0x4006, 0x000a, 0x4002, {})
1477         LDST(macl, 0x401a, 0x4016, 0x001a, 0x4012, {})
1478         LDST(pr,   0x402a, 0x4026, 0x002a, 0x4022, {})
1479         LDST(fpul, 0x405a, 0x4056, 0x005a, 0x4052, {CHECK_FPU_ENABLED})
1480     case 0x406a:                /* lds Rm,FPSCR */
1481         CHECK_FPU_ENABLED
1482         gen_helper_ld_fpscr(cpu_env, REG(B11_8));
1483         ctx->bstate = BS_STOP;
1484         return;
1485     case 0x4066:                /* lds.l @Rm+,FPSCR */
1486         CHECK_FPU_ENABLED
1487         {
1488             TCGv addr = tcg_temp_new();
1489             tcg_gen_qemu_ld32s(addr, REG(B11_8), ctx->memidx);
1490             tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
1491             gen_helper_ld_fpscr(cpu_env, addr);
1492             tcg_temp_free(addr);
1493             ctx->bstate = BS_STOP;
1494         }
1495         return;
1496     case 0x006a:                /* sts FPSCR,Rn */
1497         CHECK_FPU_ENABLED
1498         tcg_gen_andi_i32(REG(B11_8), cpu_fpscr, 0x003fffff);
1499         return;
1500     case 0x4062:                /* sts FPSCR,@-Rn */
1501         CHECK_FPU_ENABLED
1502         {
1503             TCGv addr, val;
1504             val = tcg_temp_new();
1505             tcg_gen_andi_i32(val, cpu_fpscr, 0x003fffff);
1506             addr = tcg_temp_new();
1507             tcg_gen_subi_i32(addr, REG(B11_8), 4);
1508             tcg_gen_qemu_st32(val, addr, ctx->memidx);
1509             tcg_gen_mov_i32(REG(B11_8), addr);
1510             tcg_temp_free(addr);
1511             tcg_temp_free(val);
1512         }
1513         return;
1514     case 0x00c3:                /* movca.l R0,@Rm */
1515         {
1516             TCGv val = tcg_temp_new();
1517             tcg_gen_qemu_ld32u(val, REG(B11_8), ctx->memidx);
1518             gen_helper_movcal(cpu_env, REG(B11_8), val);
1519             tcg_gen_qemu_st32(REG(0), REG(B11_8), ctx->memidx);
1520         }
1521         ctx->has_movcal = 1;
1522         return;
1523     case 0x40a9:
1524         /* MOVUA.L @Rm,R0 (Rm) -> R0
1525            Load non-boundary-aligned data */
1526         tcg_gen_qemu_ld32u(REG(0), REG(B11_8), ctx->memidx);
1527         return;
1528     case 0x40e9:
1529         /* MOVUA.L @Rm+,R0   (Rm) -> R0, Rm + 4 -> Rm
1530            Load non-boundary-aligned data */
1531         tcg_gen_qemu_ld32u(REG(0), REG(B11_8), ctx->memidx);
1532         tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
1533         return;
1534     case 0x0029:                /* movt Rn */
1535         tcg_gen_andi_i32(REG(B11_8), cpu_sr, SR_T);
1536         return;
1537     case 0x0073:
1538         /* MOVCO.L
1539                LDST -> T
1540                If (T == 1) R0 -> (Rn)
1541                0 -> LDST
1542         */
1543         if (ctx->features & SH_FEATURE_SH4A) {
1544             int label = gen_new_label();
1545             tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
1546             tcg_gen_or_i32(cpu_sr, cpu_sr, cpu_ldst);
1547             tcg_gen_brcondi_i32(TCG_COND_EQ, cpu_ldst, 0, label);
1548             tcg_gen_qemu_st32(REG(0), REG(B11_8), ctx->memidx);
1549             gen_set_label(label);
1550             tcg_gen_movi_i32(cpu_ldst, 0);
1551             return;
1552         } else
1553             break;
1554     case 0x0063:
1555         /* MOVLI.L @Rm,R0
1556                1 -> LDST
1557                (Rm) -> R0
1558                When interrupt/exception
1559                occurred 0 -> LDST
1560         */
1561         if (ctx->features & SH_FEATURE_SH4A) {
1562             tcg_gen_movi_i32(cpu_ldst, 0);
1563             tcg_gen_qemu_ld32s(REG(0), REG(B11_8), ctx->memidx);
1564             tcg_gen_movi_i32(cpu_ldst, 1);
1565             return;
1566         } else
1567             break;
1568     case 0x0093:                /* ocbi @Rn */
1569         {
1570             gen_helper_ocbi(cpu_env, REG(B11_8));
1571         }
1572         return;
1573     case 0x00a3:                /* ocbp @Rn */
1574     case 0x00b3:                /* ocbwb @Rn */
1575         /* These instructions are supposed to do nothing in case of
1576            a cache miss. Given that we only partially emulate caches
1577            it is safe to simply ignore them. */
1578         return;
1579     case 0x0083:                /* pref @Rn */
1580         return;
1581     case 0x00d3:                /* prefi @Rn */
1582         if (ctx->features & SH_FEATURE_SH4A)
1583             return;
1584         else
1585             break;
1586     case 0x00e3:                /* icbi @Rn */
1587         if (ctx->features & SH_FEATURE_SH4A)
1588             return;
1589         else
1590             break;
1591     case 0x00ab:                /* synco */
1592         if (ctx->features & SH_FEATURE_SH4A)
1593             return;
1594         else
1595             break;
1596     case 0x4024:                /* rotcl Rn */
1597         {
1598             TCGv tmp = tcg_temp_new();
1599             tcg_gen_mov_i32(tmp, cpu_sr);
1600             gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 31);
1601             tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1);
1602             gen_copy_bit_i32(REG(B11_8), 0, tmp, 0);
1603             tcg_temp_free(tmp);
1604         }
1605         return;
1606     case 0x4025:                /* rotcr Rn */
1607         {
1608             TCGv tmp = tcg_temp_new();
1609             tcg_gen_mov_i32(tmp, cpu_sr);
1610             gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1611             tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 1);
1612             gen_copy_bit_i32(REG(B11_8), 31, tmp, 0);
1613             tcg_temp_free(tmp);
1614         }
1615         return;
1616     case 0x4004:                /* rotl Rn */
1617         tcg_gen_rotli_i32(REG(B11_8), REG(B11_8), 1);
1618         gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1619         return;
1620     case 0x4005:                /* rotr Rn */
1621         gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1622         tcg_gen_rotri_i32(REG(B11_8), REG(B11_8), 1);
1623         return;
1624     case 0x4000:                /* shll Rn */
1625     case 0x4020:                /* shal Rn */
1626         gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 31);
1627         tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1);
1628         return;
1629     case 0x4021:                /* shar Rn */
1630         gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1631         tcg_gen_sari_i32(REG(B11_8), REG(B11_8), 1);
1632         return;
1633     case 0x4001:                /* shlr Rn */
1634         gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1635         tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 1);
1636         return;
1637     case 0x4008:                /* shll2 Rn */
1638         tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 2);
1639         return;
1640     case 0x4018:                /* shll8 Rn */
1641         tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 8);
1642         return;
1643     case 0x4028:                /* shll16 Rn */
1644         tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 16);
1645         return;
1646     case 0x4009:                /* shlr2 Rn */
1647         tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 2);
1648         return;
1649     case 0x4019:                /* shlr8 Rn */
1650         tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 8);
1651         return;
1652     case 0x4029:                /* shlr16 Rn */
1653         tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 16);
1654         return;
1655     case 0x401b:                /* tas.b @Rn */
1656         {
1657             TCGv addr, val;
1658             addr = tcg_temp_local_new();
1659             tcg_gen_mov_i32(addr, REG(B11_8));
1660             val = tcg_temp_local_new();
1661             tcg_gen_qemu_ld8u(val, addr, ctx->memidx);
1662             gen_cmp_imm(TCG_COND_EQ, val, 0);
1663             tcg_gen_ori_i32(val, val, 0x80);
1664             tcg_gen_qemu_st8(val, addr, ctx->memidx);
1665             tcg_temp_free(val);
1666             tcg_temp_free(addr);
1667         }
1668         return;
1669     case 0xf00d: /* fsts FPUL,FRn - FPSCR: Nothing */
1670         CHECK_FPU_ENABLED
1671         tcg_gen_mov_i32(cpu_fregs[FREG(B11_8)], cpu_fpul);
1672         return;
1673     case 0xf01d: /* flds FRm,FPUL - FPSCR: Nothing */
1674         CHECK_FPU_ENABLED
1675         tcg_gen_mov_i32(cpu_fpul, cpu_fregs[FREG(B11_8)]);
1676         return;
1677     case 0xf02d: /* float FPUL,FRn/DRn - FPSCR: R[PR,Enable.I]/W[Cause,Flag] */
1678         CHECK_FPU_ENABLED
1679         if (ctx->flags & FPSCR_PR) {
1680             TCGv_i64 fp;
1681             if (ctx->opcode & 0x0100)
1682                 break; /* illegal instruction */
1683             fp = tcg_temp_new_i64();
1684             gen_helper_float_DT(fp, cpu_env, cpu_fpul);
1685             gen_store_fpr64(fp, DREG(B11_8));
1686             tcg_temp_free_i64(fp);
1687         }
1688         else {
1689             gen_helper_float_FT(cpu_fregs[FREG(B11_8)], cpu_env, cpu_fpul);
1690         }
1691         return;
1692     case 0xf03d: /* ftrc FRm/DRm,FPUL - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1693         CHECK_FPU_ENABLED
1694         if (ctx->flags & FPSCR_PR) {
1695             TCGv_i64 fp;
1696             if (ctx->opcode & 0x0100)
1697                 break; /* illegal instruction */
1698             fp = tcg_temp_new_i64();
1699             gen_load_fpr64(fp, DREG(B11_8));
1700             gen_helper_ftrc_DT(cpu_fpul, cpu_env, fp);
1701             tcg_temp_free_i64(fp);
1702         }
1703         else {
1704             gen_helper_ftrc_FT(cpu_fpul, cpu_env, cpu_fregs[FREG(B11_8)]);
1705         }
1706         return;
1707     case 0xf04d: /* fneg FRn/DRn - FPSCR: Nothing */
1708         CHECK_FPU_ENABLED
1709         {
1710             gen_helper_fneg_T(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)]);
1711         }
1712         return;
1713     case 0xf05d: /* fabs FRn/DRn */
1714         CHECK_FPU_ENABLED
1715         if (ctx->flags & FPSCR_PR) {
1716             if (ctx->opcode & 0x0100)
1717                 break; /* illegal instruction */
1718             TCGv_i64 fp = tcg_temp_new_i64();
1719             gen_load_fpr64(fp, DREG(B11_8));
1720             gen_helper_fabs_DT(fp, fp);
1721             gen_store_fpr64(fp, DREG(B11_8));
1722             tcg_temp_free_i64(fp);
1723         } else {
1724             gen_helper_fabs_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)]);
1725         }
1726         return;
1727     case 0xf06d: /* fsqrt FRn */
1728         CHECK_FPU_ENABLED
1729         if (ctx->flags & FPSCR_PR) {
1730             if (ctx->opcode & 0x0100)
1731                 break; /* illegal instruction */
1732             TCGv_i64 fp = tcg_temp_new_i64();
1733             gen_load_fpr64(fp, DREG(B11_8));
1734             gen_helper_fsqrt_DT(fp, cpu_env, fp);
1735             gen_store_fpr64(fp, DREG(B11_8));
1736             tcg_temp_free_i64(fp);
1737         } else {
1738             gen_helper_fsqrt_FT(cpu_fregs[FREG(B11_8)], cpu_env,
1739                                 cpu_fregs[FREG(B11_8)]);
1740         }
1741         return;
1742     case 0xf07d: /* fsrra FRn */
1743         CHECK_FPU_ENABLED
1744         break;
1745     case 0xf08d: /* fldi0 FRn - FPSCR: R[PR] */
1746         CHECK_FPU_ENABLED
1747         if (!(ctx->flags & FPSCR_PR)) {
1748             tcg_gen_movi_i32(cpu_fregs[FREG(B11_8)], 0);
1749         }
1750         return;
1751     case 0xf09d: /* fldi1 FRn - FPSCR: R[PR] */
1752         CHECK_FPU_ENABLED
1753         if (!(ctx->flags & FPSCR_PR)) {
1754             tcg_gen_movi_i32(cpu_fregs[FREG(B11_8)], 0x3f800000);
1755         }
1756         return;
1757     case 0xf0ad: /* fcnvsd FPUL,DRn */
1758         CHECK_FPU_ENABLED
1759         {
1760             TCGv_i64 fp = tcg_temp_new_i64();
1761             gen_helper_fcnvsd_FT_DT(fp, cpu_env, cpu_fpul);
1762             gen_store_fpr64(fp, DREG(B11_8));
1763             tcg_temp_free_i64(fp);
1764         }
1765         return;
1766     case 0xf0bd: /* fcnvds DRn,FPUL */
1767         CHECK_FPU_ENABLED
1768         {
1769             TCGv_i64 fp = tcg_temp_new_i64();
1770             gen_load_fpr64(fp, DREG(B11_8));
1771             gen_helper_fcnvds_DT_FT(cpu_fpul, cpu_env, fp);
1772             tcg_temp_free_i64(fp);
1773         }
1774         return;
1775     case 0xf0ed: /* fipr FVm,FVn */
1776         CHECK_FPU_ENABLED
1777         if ((ctx->flags & FPSCR_PR) == 0) {
1778             TCGv m, n;
1779             m = tcg_const_i32((ctx->opcode >> 8) & 3);
1780             n = tcg_const_i32((ctx->opcode >> 10) & 3);
1781             gen_helper_fipr(cpu_env, m, n);
1782             tcg_temp_free(m);
1783             tcg_temp_free(n);
1784             return;
1785         }
1786         break;
1787     case 0xf0fd: /* ftrv XMTRX,FVn */
1788         CHECK_FPU_ENABLED
1789         if ((ctx->opcode & 0x0300) == 0x0100 &&
1790             (ctx->flags & FPSCR_PR) == 0) {
1791             TCGv n;
1792             n = tcg_const_i32((ctx->opcode >> 10) & 3);
1793             gen_helper_ftrv(cpu_env, n);
1794             tcg_temp_free(n);
1795             return;
1796         }
1797         break;
1798     }
1799 #if 0
1800     fprintf(stderr, "unknown instruction 0x%04x at pc 0x%08x\n",
1801             ctx->opcode, ctx->pc);
1802     fflush(stderr);
1803 #endif
1804     tcg_gen_movi_i32(cpu_pc, ctx->pc);
1805     if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) {
1806         gen_helper_raise_slot_illegal_instruction(cpu_env);
1807     } else {
1808         gen_helper_raise_illegal_instruction(cpu_env);
1809     }
1810     ctx->bstate = BS_BRANCH;
1811 }
1812
1813 static void decode_opc(DisasContext * ctx)
1814 {
1815     uint32_t old_flags = ctx->flags;
1816
1817     if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP | CPU_LOG_TB_OP_OPT))) {
1818         tcg_gen_debug_insn_start(ctx->pc);
1819     }
1820
1821     _decode_opc(ctx);
1822
1823     if (old_flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) {
1824         if (ctx->flags & DELAY_SLOT_CLEARME) {
1825             gen_store_flags(0);
1826         } else {
1827             /* go out of the delay slot */
1828             uint32_t new_flags = ctx->flags;
1829             new_flags &= ~(DELAY_SLOT | DELAY_SLOT_CONDITIONAL);
1830             gen_store_flags(new_flags);
1831         }
1832         ctx->flags = 0;
1833         ctx->bstate = BS_BRANCH;
1834         if (old_flags & DELAY_SLOT_CONDITIONAL) {
1835             gen_delayed_conditional_jump(ctx);
1836         } else if (old_flags & DELAY_SLOT) {
1837             gen_jump(ctx);
1838         }
1839
1840     }
1841
1842     /* go into a delay slot */
1843     if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL))
1844         gen_store_flags(ctx->flags);
1845 }
1846
1847 static inline void
1848 gen_intermediate_code_internal(CPUSH4State * env, TranslationBlock * tb,
1849                                int search_pc)
1850 {
1851     DisasContext ctx;
1852     target_ulong pc_start;
1853     static uint16_t *gen_opc_end;
1854     CPUBreakpoint *bp;
1855     int i, ii;
1856     int num_insns;
1857     int max_insns;
1858
1859     pc_start = tb->pc;
1860     gen_opc_end = tcg_ctx.gen_opc_buf + OPC_MAX_SIZE;
1861     ctx.pc = pc_start;
1862     ctx.flags = (uint32_t)tb->flags;
1863     ctx.bstate = BS_NONE;
1864     ctx.memidx = (ctx.flags & SR_MD) == 0 ? 1 : 0;
1865     /* We don't know if the delayed pc came from a dynamic or static branch,
1866        so assume it is a dynamic branch.  */
1867     ctx.delayed_pc = -1; /* use delayed pc from env pointer */
1868     ctx.tb = tb;
1869     ctx.singlestep_enabled = env->singlestep_enabled;
1870     ctx.features = env->features;
1871     ctx.has_movcal = (ctx.flags & TB_FLAG_PENDING_MOVCA);
1872
1873     ii = -1;
1874     num_insns = 0;
1875     max_insns = tb->cflags & CF_COUNT_MASK;
1876     if (max_insns == 0)
1877         max_insns = CF_COUNT_MASK;
1878     gen_tb_start();
1879     while (ctx.bstate == BS_NONE && tcg_ctx.gen_opc_ptr < gen_opc_end) {
1880         if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
1881             QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
1882                 if (ctx.pc == bp->pc) {
1883                     /* We have hit a breakpoint - make sure PC is up-to-date */
1884                     tcg_gen_movi_i32(cpu_pc, ctx.pc);
1885                     gen_helper_debug(cpu_env);
1886                     ctx.bstate = BS_BRANCH;
1887                     break;
1888                 }
1889             }
1890         }
1891         if (search_pc) {
1892             i = tcg_ctx.gen_opc_ptr - tcg_ctx.gen_opc_buf;
1893             if (ii < i) {
1894                 ii++;
1895                 while (ii < i)
1896                     tcg_ctx.gen_opc_instr_start[ii++] = 0;
1897             }
1898             tcg_ctx.gen_opc_pc[ii] = ctx.pc;
1899             gen_opc_hflags[ii] = ctx.flags;
1900             tcg_ctx.gen_opc_instr_start[ii] = 1;
1901             tcg_ctx.gen_opc_icount[ii] = num_insns;
1902         }
1903         if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
1904             gen_io_start();
1905 #if 0
1906         fprintf(stderr, "Loading opcode at address 0x%08x\n", ctx.pc);
1907         fflush(stderr);
1908 #endif
1909         ctx.opcode = cpu_lduw_code(env, ctx.pc);
1910         decode_opc(&ctx);
1911         num_insns++;
1912         ctx.pc += 2;
1913         if ((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0)
1914             break;
1915         if (env->singlestep_enabled)
1916             break;
1917         if (num_insns >= max_insns)
1918             break;
1919         if (singlestep)
1920             break;
1921     }
1922     if (tb->cflags & CF_LAST_IO)
1923         gen_io_end();
1924     if (env->singlestep_enabled) {
1925         tcg_gen_movi_i32(cpu_pc, ctx.pc);
1926         gen_helper_debug(cpu_env);
1927     } else {
1928         switch (ctx.bstate) {
1929         case BS_STOP:
1930             /* gen_op_interrupt_restart(); */
1931             /* fall through */
1932         case BS_NONE:
1933             if (ctx.flags) {
1934                 gen_store_flags(ctx.flags | DELAY_SLOT_CLEARME);
1935             }
1936             gen_goto_tb(&ctx, 0, ctx.pc);
1937             break;
1938         case BS_EXCP:
1939             /* gen_op_interrupt_restart(); */
1940             tcg_gen_exit_tb(0);
1941             break;
1942         case BS_BRANCH:
1943         default:
1944             break;
1945         }
1946     }
1947
1948     gen_tb_end(tb, num_insns);
1949     *tcg_ctx.gen_opc_ptr = INDEX_op_end;
1950     if (search_pc) {
1951         i = tcg_ctx.gen_opc_ptr - tcg_ctx.gen_opc_buf;
1952         ii++;
1953         while (ii <= i)
1954             tcg_ctx.gen_opc_instr_start[ii++] = 0;
1955     } else {
1956         tb->size = ctx.pc - pc_start;
1957         tb->icount = num_insns;
1958     }
1959
1960 #ifdef DEBUG_DISAS
1961     if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
1962         qemu_log("IN:\n");      /* , lookup_symbol(pc_start)); */
1963         log_target_disas(env, pc_start, ctx.pc - pc_start, 0);
1964         qemu_log("\n");
1965     }
1966 #endif
1967 }
1968
1969 void gen_intermediate_code(CPUSH4State * env, struct TranslationBlock *tb)
1970 {
1971     gen_intermediate_code_internal(env, tb, 0);
1972 }
1973
1974 void gen_intermediate_code_pc(CPUSH4State * env, struct TranslationBlock *tb)
1975 {
1976     gen_intermediate_code_internal(env, tb, 1);
1977 }
1978
1979 void restore_state_to_opc(CPUSH4State *env, TranslationBlock *tb, int pc_pos)
1980 {
1981     env->pc = tcg_ctx.gen_opc_pc[pc_pos];
1982     env->flags = gen_opc_hflags[pc_pos];
1983 }
This page took 0.136589 seconds and 4 git commands to generate.