]> Git Repo - qemu.git/blob - target-sh4/translate.c
char: io_channel_send: don't lose written bytes
[qemu.git] / target-sh4 / translate.c
1 /*
2  *  SH4 translation
3  *
4  *  Copyright (c) 2005 Samuel Tardieu
5  *
6  * This library is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2 of the License, or (at your option) any later version.
10  *
11  * This library is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
18  */
19
20 #define DEBUG_DISAS
21 //#define SH4_SINGLE_STEP
22
23 #include "cpu.h"
24 #include "disas/disas.h"
25 #include "tcg-op.h"
26
27 #include "helper.h"
28 #define GEN_HELPER 1
29 #include "helper.h"
30
31 typedef struct DisasContext {
32     struct TranslationBlock *tb;
33     target_ulong pc;
34     uint16_t opcode;
35     uint32_t flags;
36     int bstate;
37     int memidx;
38     uint32_t delayed_pc;
39     int singlestep_enabled;
40     uint32_t features;
41     int has_movcal;
42 } DisasContext;
43
44 #if defined(CONFIG_USER_ONLY)
45 #define IS_USER(ctx) 1
46 #else
47 #define IS_USER(ctx) (!(ctx->flags & SR_MD))
48 #endif
49
50 enum {
51     BS_NONE     = 0, /* We go out of the TB without reaching a branch or an
52                       * exception condition
53                       */
54     BS_STOP     = 1, /* We want to stop translation for any reason */
55     BS_BRANCH   = 2, /* We reached a branch condition     */
56     BS_EXCP     = 3, /* We reached an exception condition */
57 };
58
59 /* global register indexes */
60 static TCGv_ptr cpu_env;
61 static TCGv cpu_gregs[24];
62 static TCGv cpu_pc, cpu_sr, cpu_ssr, cpu_spc, cpu_gbr;
63 static TCGv cpu_vbr, cpu_sgr, cpu_dbr, cpu_mach, cpu_macl;
64 static TCGv cpu_pr, cpu_fpscr, cpu_fpul, cpu_ldst;
65 static TCGv cpu_fregs[32];
66
67 /* internal register indexes */
68 static TCGv cpu_flags, cpu_delayed_pc;
69
70 static uint32_t gen_opc_hflags[OPC_BUF_SIZE];
71
72 #include "exec/gen-icount.h"
73
74 void sh4_translate_init(void)
75 {
76     int i;
77     static int done_init = 0;
78     static const char * const gregnames[24] = {
79         "R0_BANK0", "R1_BANK0", "R2_BANK0", "R3_BANK0",
80         "R4_BANK0", "R5_BANK0", "R6_BANK0", "R7_BANK0",
81         "R8", "R9", "R10", "R11", "R12", "R13", "R14", "R15",
82         "R0_BANK1", "R1_BANK1", "R2_BANK1", "R3_BANK1",
83         "R4_BANK1", "R5_BANK1", "R6_BANK1", "R7_BANK1"
84     };
85     static const char * const fregnames[32] = {
86          "FPR0_BANK0",  "FPR1_BANK0",  "FPR2_BANK0",  "FPR3_BANK0",
87          "FPR4_BANK0",  "FPR5_BANK0",  "FPR6_BANK0",  "FPR7_BANK0",
88          "FPR8_BANK0",  "FPR9_BANK0", "FPR10_BANK0", "FPR11_BANK0",
89         "FPR12_BANK0", "FPR13_BANK0", "FPR14_BANK0", "FPR15_BANK0",
90          "FPR0_BANK1",  "FPR1_BANK1",  "FPR2_BANK1",  "FPR3_BANK1",
91          "FPR4_BANK1",  "FPR5_BANK1",  "FPR6_BANK1",  "FPR7_BANK1",
92          "FPR8_BANK1",  "FPR9_BANK1", "FPR10_BANK1", "FPR11_BANK1",
93         "FPR12_BANK1", "FPR13_BANK1", "FPR14_BANK1", "FPR15_BANK1",
94     };
95
96     if (done_init)
97         return;
98
99     cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
100
101     for (i = 0; i < 24; i++)
102         cpu_gregs[i] = tcg_global_mem_new_i32(TCG_AREG0,
103                                               offsetof(CPUSH4State, gregs[i]),
104                                               gregnames[i]);
105
106     cpu_pc = tcg_global_mem_new_i32(TCG_AREG0,
107                                     offsetof(CPUSH4State, pc), "PC");
108     cpu_sr = tcg_global_mem_new_i32(TCG_AREG0,
109                                     offsetof(CPUSH4State, sr), "SR");
110     cpu_ssr = tcg_global_mem_new_i32(TCG_AREG0,
111                                      offsetof(CPUSH4State, ssr), "SSR");
112     cpu_spc = tcg_global_mem_new_i32(TCG_AREG0,
113                                      offsetof(CPUSH4State, spc), "SPC");
114     cpu_gbr = tcg_global_mem_new_i32(TCG_AREG0,
115                                      offsetof(CPUSH4State, gbr), "GBR");
116     cpu_vbr = tcg_global_mem_new_i32(TCG_AREG0,
117                                      offsetof(CPUSH4State, vbr), "VBR");
118     cpu_sgr = tcg_global_mem_new_i32(TCG_AREG0,
119                                      offsetof(CPUSH4State, sgr), "SGR");
120     cpu_dbr = tcg_global_mem_new_i32(TCG_AREG0,
121                                      offsetof(CPUSH4State, dbr), "DBR");
122     cpu_mach = tcg_global_mem_new_i32(TCG_AREG0,
123                                       offsetof(CPUSH4State, mach), "MACH");
124     cpu_macl = tcg_global_mem_new_i32(TCG_AREG0,
125                                       offsetof(CPUSH4State, macl), "MACL");
126     cpu_pr = tcg_global_mem_new_i32(TCG_AREG0,
127                                     offsetof(CPUSH4State, pr), "PR");
128     cpu_fpscr = tcg_global_mem_new_i32(TCG_AREG0,
129                                        offsetof(CPUSH4State, fpscr), "FPSCR");
130     cpu_fpul = tcg_global_mem_new_i32(TCG_AREG0,
131                                       offsetof(CPUSH4State, fpul), "FPUL");
132
133     cpu_flags = tcg_global_mem_new_i32(TCG_AREG0,
134                                        offsetof(CPUSH4State, flags), "_flags_");
135     cpu_delayed_pc = tcg_global_mem_new_i32(TCG_AREG0,
136                                             offsetof(CPUSH4State, delayed_pc),
137                                             "_delayed_pc_");
138     cpu_ldst = tcg_global_mem_new_i32(TCG_AREG0,
139                                       offsetof(CPUSH4State, ldst), "_ldst_");
140
141     for (i = 0; i < 32; i++)
142         cpu_fregs[i] = tcg_global_mem_new_i32(TCG_AREG0,
143                                               offsetof(CPUSH4State, fregs[i]),
144                                               fregnames[i]);
145
146     /* register helpers */
147 #define GEN_HELPER 2
148 #include "helper.h"
149
150     done_init = 1;
151 }
152
153 void superh_cpu_dump_state(CPUState *cs, FILE *f,
154                            fprintf_function cpu_fprintf, int flags)
155 {
156     SuperHCPU *cpu = SUPERH_CPU(cs);
157     CPUSH4State *env = &cpu->env;
158     int i;
159     cpu_fprintf(f, "pc=0x%08x sr=0x%08x pr=0x%08x fpscr=0x%08x\n",
160                 env->pc, env->sr, env->pr, env->fpscr);
161     cpu_fprintf(f, "spc=0x%08x ssr=0x%08x gbr=0x%08x vbr=0x%08x\n",
162                 env->spc, env->ssr, env->gbr, env->vbr);
163     cpu_fprintf(f, "sgr=0x%08x dbr=0x%08x delayed_pc=0x%08x fpul=0x%08x\n",
164                 env->sgr, env->dbr, env->delayed_pc, env->fpul);
165     for (i = 0; i < 24; i += 4) {
166         cpu_fprintf(f, "r%d=0x%08x r%d=0x%08x r%d=0x%08x r%d=0x%08x\n",
167                     i, env->gregs[i], i + 1, env->gregs[i + 1],
168                     i + 2, env->gregs[i + 2], i + 3, env->gregs[i + 3]);
169     }
170     if (env->flags & DELAY_SLOT) {
171         cpu_fprintf(f, "in delay slot (delayed_pc=0x%08x)\n",
172                     env->delayed_pc);
173     } else if (env->flags & DELAY_SLOT_CONDITIONAL) {
174         cpu_fprintf(f, "in conditional delay slot (delayed_pc=0x%08x)\n",
175                     env->delayed_pc);
176     }
177 }
178
179 static void gen_goto_tb(DisasContext * ctx, int n, target_ulong dest)
180 {
181     TranslationBlock *tb;
182     tb = ctx->tb;
183
184     if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) &&
185         !ctx->singlestep_enabled) {
186         /* Use a direct jump if in same page and singlestep not enabled */
187         tcg_gen_goto_tb(n);
188         tcg_gen_movi_i32(cpu_pc, dest);
189         tcg_gen_exit_tb((tcg_target_long)tb + n);
190     } else {
191         tcg_gen_movi_i32(cpu_pc, dest);
192         if (ctx->singlestep_enabled)
193             gen_helper_debug(cpu_env);
194         tcg_gen_exit_tb(0);
195     }
196 }
197
198 static void gen_jump(DisasContext * ctx)
199 {
200     if (ctx->delayed_pc == (uint32_t) - 1) {
201         /* Target is not statically known, it comes necessarily from a
202            delayed jump as immediate jump are conditinal jumps */
203         tcg_gen_mov_i32(cpu_pc, cpu_delayed_pc);
204         if (ctx->singlestep_enabled)
205             gen_helper_debug(cpu_env);
206         tcg_gen_exit_tb(0);
207     } else {
208         gen_goto_tb(ctx, 0, ctx->delayed_pc);
209     }
210 }
211
212 static inline void gen_branch_slot(uint32_t delayed_pc, int t)
213 {
214     TCGv sr;
215     int label = gen_new_label();
216     tcg_gen_movi_i32(cpu_delayed_pc, delayed_pc);
217     sr = tcg_temp_new();
218     tcg_gen_andi_i32(sr, cpu_sr, SR_T);
219     tcg_gen_brcondi_i32(t ? TCG_COND_EQ:TCG_COND_NE, sr, 0, label);
220     tcg_gen_ori_i32(cpu_flags, cpu_flags, DELAY_SLOT_TRUE);
221     gen_set_label(label);
222 }
223
224 /* Immediate conditional jump (bt or bf) */
225 static void gen_conditional_jump(DisasContext * ctx,
226                                  target_ulong ift, target_ulong ifnott)
227 {
228     int l1;
229     TCGv sr;
230
231     l1 = gen_new_label();
232     sr = tcg_temp_new();
233     tcg_gen_andi_i32(sr, cpu_sr, SR_T);
234     tcg_gen_brcondi_i32(TCG_COND_NE, sr, 0, l1);
235     gen_goto_tb(ctx, 0, ifnott);
236     gen_set_label(l1);
237     gen_goto_tb(ctx, 1, ift);
238 }
239
240 /* Delayed conditional jump (bt or bf) */
241 static void gen_delayed_conditional_jump(DisasContext * ctx)
242 {
243     int l1;
244     TCGv ds;
245
246     l1 = gen_new_label();
247     ds = tcg_temp_new();
248     tcg_gen_andi_i32(ds, cpu_flags, DELAY_SLOT_TRUE);
249     tcg_gen_brcondi_i32(TCG_COND_NE, ds, 0, l1);
250     gen_goto_tb(ctx, 1, ctx->pc + 2);
251     gen_set_label(l1);
252     tcg_gen_andi_i32(cpu_flags, cpu_flags, ~DELAY_SLOT_TRUE);
253     gen_jump(ctx);
254 }
255
256 static inline void gen_cmp(int cond, TCGv t0, TCGv t1)
257 {
258     TCGv t;
259
260     t = tcg_temp_new();
261     tcg_gen_setcond_i32(cond, t, t1, t0);
262     tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
263     tcg_gen_or_i32(cpu_sr, cpu_sr, t);
264
265     tcg_temp_free(t);
266 }
267
268 static inline void gen_cmp_imm(int cond, TCGv t0, int32_t imm)
269 {
270     TCGv t;
271
272     t = tcg_temp_new();
273     tcg_gen_setcondi_i32(cond, t, t0, imm);
274     tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
275     tcg_gen_or_i32(cpu_sr, cpu_sr, t);
276
277     tcg_temp_free(t);
278 }
279
280 static inline void gen_store_flags(uint32_t flags)
281 {
282     tcg_gen_andi_i32(cpu_flags, cpu_flags, DELAY_SLOT_TRUE);
283     tcg_gen_ori_i32(cpu_flags, cpu_flags, flags);
284 }
285
286 static inline void gen_copy_bit_i32(TCGv t0, int p0, TCGv t1, int p1)
287 {
288     TCGv tmp = tcg_temp_new();
289
290     p0 &= 0x1f;
291     p1 &= 0x1f;
292
293     tcg_gen_andi_i32(tmp, t1, (1 << p1));
294     tcg_gen_andi_i32(t0, t0, ~(1 << p0));
295     if (p0 < p1)
296         tcg_gen_shri_i32(tmp, tmp, p1 - p0);
297     else if (p0 > p1)
298         tcg_gen_shli_i32(tmp, tmp, p0 - p1);
299     tcg_gen_or_i32(t0, t0, tmp);
300
301     tcg_temp_free(tmp);
302 }
303
304 static inline void gen_load_fpr64(TCGv_i64 t, int reg)
305 {
306     tcg_gen_concat_i32_i64(t, cpu_fregs[reg + 1], cpu_fregs[reg]);
307 }
308
309 static inline void gen_store_fpr64 (TCGv_i64 t, int reg)
310 {
311     TCGv_i32 tmp = tcg_temp_new_i32();
312     tcg_gen_trunc_i64_i32(tmp, t);
313     tcg_gen_mov_i32(cpu_fregs[reg + 1], tmp);
314     tcg_gen_shri_i64(t, t, 32);
315     tcg_gen_trunc_i64_i32(tmp, t);
316     tcg_gen_mov_i32(cpu_fregs[reg], tmp);
317     tcg_temp_free_i32(tmp);
318 }
319
320 #define B3_0 (ctx->opcode & 0xf)
321 #define B6_4 ((ctx->opcode >> 4) & 0x7)
322 #define B7_4 ((ctx->opcode >> 4) & 0xf)
323 #define B7_0 (ctx->opcode & 0xff)
324 #define B7_0s ((int32_t) (int8_t) (ctx->opcode & 0xff))
325 #define B11_0s (ctx->opcode & 0x800 ? 0xfffff000 | (ctx->opcode & 0xfff) : \
326   (ctx->opcode & 0xfff))
327 #define B11_8 ((ctx->opcode >> 8) & 0xf)
328 #define B15_12 ((ctx->opcode >> 12) & 0xf)
329
330 #define REG(x) ((x) < 8 && (ctx->flags & (SR_MD | SR_RB)) == (SR_MD | SR_RB) \
331                 ? (cpu_gregs[x + 16]) : (cpu_gregs[x]))
332
333 #define ALTREG(x) ((x) < 8 && (ctx->flags & (SR_MD | SR_RB)) != (SR_MD | SR_RB)\
334                 ? (cpu_gregs[x + 16]) : (cpu_gregs[x]))
335
336 #define FREG(x) (ctx->flags & FPSCR_FR ? (x) ^ 0x10 : (x))
337 #define XHACK(x) ((((x) & 1 ) << 4) | ((x) & 0xe))
338 #define XREG(x) (ctx->flags & FPSCR_FR ? XHACK(x) ^ 0x10 : XHACK(x))
339 #define DREG(x) FREG(x) /* Assumes lsb of (x) is always 0 */
340
341 #define CHECK_NOT_DELAY_SLOT \
342   if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL))     \
343   {                                                           \
344       tcg_gen_movi_i32(cpu_pc, ctx->pc);                      \
345       gen_helper_raise_slot_illegal_instruction(cpu_env);     \
346       ctx->bstate = BS_BRANCH;                                \
347       return;                                                 \
348   }
349
350 #define CHECK_PRIVILEGED                                        \
351   if (IS_USER(ctx)) {                                           \
352       tcg_gen_movi_i32(cpu_pc, ctx->pc);                        \
353       if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) { \
354           gen_helper_raise_slot_illegal_instruction(cpu_env);   \
355       } else {                                                  \
356           gen_helper_raise_illegal_instruction(cpu_env);        \
357       }                                                         \
358       ctx->bstate = BS_BRANCH;                                  \
359       return;                                                   \
360   }
361
362 #define CHECK_FPU_ENABLED                                       \
363   if (ctx->flags & SR_FD) {                                     \
364       tcg_gen_movi_i32(cpu_pc, ctx->pc);                        \
365       if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) { \
366           gen_helper_raise_slot_fpu_disable(cpu_env);           \
367       } else {                                                  \
368           gen_helper_raise_fpu_disable(cpu_env);                \
369       }                                                         \
370       ctx->bstate = BS_BRANCH;                                  \
371       return;                                                   \
372   }
373
374 static void _decode_opc(DisasContext * ctx)
375 {
376     /* This code tries to make movcal emulation sufficiently
377        accurate for Linux purposes.  This instruction writes
378        memory, and prior to that, always allocates a cache line.
379        It is used in two contexts:
380        - in memcpy, where data is copied in blocks, the first write
381        of to a block uses movca.l for performance.
382        - in arch/sh/mm/cache-sh4.c, movcal.l + ocbi combination is used
383        to flush the cache. Here, the data written by movcal.l is never
384        written to memory, and the data written is just bogus.
385
386        To simulate this, we simulate movcal.l, we store the value to memory,
387        but we also remember the previous content. If we see ocbi, we check
388        if movcal.l for that address was done previously. If so, the write should
389        not have hit the memory, so we restore the previous content.
390        When we see an instruction that is neither movca.l
391        nor ocbi, the previous content is discarded.
392
393        To optimize, we only try to flush stores when we're at the start of
394        TB, or if we already saw movca.l in this TB and did not flush stores
395        yet.  */
396     if (ctx->has_movcal)
397         {
398           int opcode = ctx->opcode & 0xf0ff;
399           if (opcode != 0x0093 /* ocbi */
400               && opcode != 0x00c3 /* movca.l */)
401               {
402                   gen_helper_discard_movcal_backup(cpu_env);
403                   ctx->has_movcal = 0;
404               }
405         }
406
407 #if 0
408     fprintf(stderr, "Translating opcode 0x%04x\n", ctx->opcode);
409 #endif
410
411     switch (ctx->opcode) {
412     case 0x0019:                /* div0u */
413         tcg_gen_andi_i32(cpu_sr, cpu_sr, ~(SR_M | SR_Q | SR_T));
414         return;
415     case 0x000b:                /* rts */
416         CHECK_NOT_DELAY_SLOT
417         tcg_gen_mov_i32(cpu_delayed_pc, cpu_pr);
418         ctx->flags |= DELAY_SLOT;
419         ctx->delayed_pc = (uint32_t) - 1;
420         return;
421     case 0x0028:                /* clrmac */
422         tcg_gen_movi_i32(cpu_mach, 0);
423         tcg_gen_movi_i32(cpu_macl, 0);
424         return;
425     case 0x0048:                /* clrs */
426         tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_S);
427         return;
428     case 0x0008:                /* clrt */
429         tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
430         return;
431     case 0x0038:                /* ldtlb */
432         CHECK_PRIVILEGED
433         gen_helper_ldtlb(cpu_env);
434         return;
435     case 0x002b:                /* rte */
436         CHECK_PRIVILEGED
437         CHECK_NOT_DELAY_SLOT
438         tcg_gen_mov_i32(cpu_sr, cpu_ssr);
439         tcg_gen_mov_i32(cpu_delayed_pc, cpu_spc);
440         ctx->flags |= DELAY_SLOT;
441         ctx->delayed_pc = (uint32_t) - 1;
442         return;
443     case 0x0058:                /* sets */
444         tcg_gen_ori_i32(cpu_sr, cpu_sr, SR_S);
445         return;
446     case 0x0018:                /* sett */
447         tcg_gen_ori_i32(cpu_sr, cpu_sr, SR_T);
448         return;
449     case 0xfbfd:                /* frchg */
450         tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_FR);
451         ctx->bstate = BS_STOP;
452         return;
453     case 0xf3fd:                /* fschg */
454         tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_SZ);
455         ctx->bstate = BS_STOP;
456         return;
457     case 0x0009:                /* nop */
458         return;
459     case 0x001b:                /* sleep */
460         CHECK_PRIVILEGED
461         tcg_gen_movi_i32(cpu_pc, ctx->pc + 2);
462         gen_helper_sleep(cpu_env);
463         return;
464     }
465
466     switch (ctx->opcode & 0xf000) {
467     case 0x1000:                /* mov.l Rm,@(disp,Rn) */
468         {
469             TCGv addr = tcg_temp_new();
470             tcg_gen_addi_i32(addr, REG(B11_8), B3_0 * 4);
471             tcg_gen_qemu_st32(REG(B7_4), addr, ctx->memidx);
472             tcg_temp_free(addr);
473         }
474         return;
475     case 0x5000:                /* mov.l @(disp,Rm),Rn */
476         {
477             TCGv addr = tcg_temp_new();
478             tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 4);
479             tcg_gen_qemu_ld32s(REG(B11_8), addr, ctx->memidx);
480             tcg_temp_free(addr);
481         }
482         return;
483     case 0xe000:                /* mov #imm,Rn */
484         tcg_gen_movi_i32(REG(B11_8), B7_0s);
485         return;
486     case 0x9000:                /* mov.w @(disp,PC),Rn */
487         {
488             TCGv addr = tcg_const_i32(ctx->pc + 4 + B7_0 * 2);
489             tcg_gen_qemu_ld16s(REG(B11_8), addr, ctx->memidx);
490             tcg_temp_free(addr);
491         }
492         return;
493     case 0xd000:                /* mov.l @(disp,PC),Rn */
494         {
495             TCGv addr = tcg_const_i32((ctx->pc + 4 + B7_0 * 4) & ~3);
496             tcg_gen_qemu_ld32s(REG(B11_8), addr, ctx->memidx);
497             tcg_temp_free(addr);
498         }
499         return;
500     case 0x7000:                /* add #imm,Rn */
501         tcg_gen_addi_i32(REG(B11_8), REG(B11_8), B7_0s);
502         return;
503     case 0xa000:                /* bra disp */
504         CHECK_NOT_DELAY_SLOT
505         ctx->delayed_pc = ctx->pc + 4 + B11_0s * 2;
506         tcg_gen_movi_i32(cpu_delayed_pc, ctx->delayed_pc);
507         ctx->flags |= DELAY_SLOT;
508         return;
509     case 0xb000:                /* bsr disp */
510         CHECK_NOT_DELAY_SLOT
511         tcg_gen_movi_i32(cpu_pr, ctx->pc + 4);
512         ctx->delayed_pc = ctx->pc + 4 + B11_0s * 2;
513         tcg_gen_movi_i32(cpu_delayed_pc, ctx->delayed_pc);
514         ctx->flags |= DELAY_SLOT;
515         return;
516     }
517
518     switch (ctx->opcode & 0xf00f) {
519     case 0x6003:                /* mov Rm,Rn */
520         tcg_gen_mov_i32(REG(B11_8), REG(B7_4));
521         return;
522     case 0x2000:                /* mov.b Rm,@Rn */
523         tcg_gen_qemu_st8(REG(B7_4), REG(B11_8), ctx->memidx);
524         return;
525     case 0x2001:                /* mov.w Rm,@Rn */
526         tcg_gen_qemu_st16(REG(B7_4), REG(B11_8), ctx->memidx);
527         return;
528     case 0x2002:                /* mov.l Rm,@Rn */
529         tcg_gen_qemu_st32(REG(B7_4), REG(B11_8), ctx->memidx);
530         return;
531     case 0x6000:                /* mov.b @Rm,Rn */
532         tcg_gen_qemu_ld8s(REG(B11_8), REG(B7_4), ctx->memidx);
533         return;
534     case 0x6001:                /* mov.w @Rm,Rn */
535         tcg_gen_qemu_ld16s(REG(B11_8), REG(B7_4), ctx->memidx);
536         return;
537     case 0x6002:                /* mov.l @Rm,Rn */
538         tcg_gen_qemu_ld32s(REG(B11_8), REG(B7_4), ctx->memidx);
539         return;
540     case 0x2004:                /* mov.b Rm,@-Rn */
541         {
542             TCGv addr = tcg_temp_new();
543             tcg_gen_subi_i32(addr, REG(B11_8), 1);
544             tcg_gen_qemu_st8(REG(B7_4), addr, ctx->memidx);     /* might cause re-execution */
545             tcg_gen_mov_i32(REG(B11_8), addr);                  /* modify register status */
546             tcg_temp_free(addr);
547         }
548         return;
549     case 0x2005:                /* mov.w Rm,@-Rn */
550         {
551             TCGv addr = tcg_temp_new();
552             tcg_gen_subi_i32(addr, REG(B11_8), 2);
553             tcg_gen_qemu_st16(REG(B7_4), addr, ctx->memidx);
554             tcg_gen_mov_i32(REG(B11_8), addr);
555             tcg_temp_free(addr);
556         }
557         return;
558     case 0x2006:                /* mov.l Rm,@-Rn */
559         {
560             TCGv addr = tcg_temp_new();
561             tcg_gen_subi_i32(addr, REG(B11_8), 4);
562             tcg_gen_qemu_st32(REG(B7_4), addr, ctx->memidx);
563             tcg_gen_mov_i32(REG(B11_8), addr);
564         }
565         return;
566     case 0x6004:                /* mov.b @Rm+,Rn */
567         tcg_gen_qemu_ld8s(REG(B11_8), REG(B7_4), ctx->memidx);
568         if ( B11_8 != B7_4 )
569                 tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 1);
570         return;
571     case 0x6005:                /* mov.w @Rm+,Rn */
572         tcg_gen_qemu_ld16s(REG(B11_8), REG(B7_4), ctx->memidx);
573         if ( B11_8 != B7_4 )
574                 tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 2);
575         return;
576     case 0x6006:                /* mov.l @Rm+,Rn */
577         tcg_gen_qemu_ld32s(REG(B11_8), REG(B7_4), ctx->memidx);
578         if ( B11_8 != B7_4 )
579                 tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4);
580         return;
581     case 0x0004:                /* mov.b Rm,@(R0,Rn) */
582         {
583             TCGv addr = tcg_temp_new();
584             tcg_gen_add_i32(addr, REG(B11_8), REG(0));
585             tcg_gen_qemu_st8(REG(B7_4), addr, ctx->memidx);
586             tcg_temp_free(addr);
587         }
588         return;
589     case 0x0005:                /* mov.w Rm,@(R0,Rn) */
590         {
591             TCGv addr = tcg_temp_new();
592             tcg_gen_add_i32(addr, REG(B11_8), REG(0));
593             tcg_gen_qemu_st16(REG(B7_4), addr, ctx->memidx);
594             tcg_temp_free(addr);
595         }
596         return;
597     case 0x0006:                /* mov.l Rm,@(R0,Rn) */
598         {
599             TCGv addr = tcg_temp_new();
600             tcg_gen_add_i32(addr, REG(B11_8), REG(0));
601             tcg_gen_qemu_st32(REG(B7_4), addr, ctx->memidx);
602             tcg_temp_free(addr);
603         }
604         return;
605     case 0x000c:                /* mov.b @(R0,Rm),Rn */
606         {
607             TCGv addr = tcg_temp_new();
608             tcg_gen_add_i32(addr, REG(B7_4), REG(0));
609             tcg_gen_qemu_ld8s(REG(B11_8), addr, ctx->memidx);
610             tcg_temp_free(addr);
611         }
612         return;
613     case 0x000d:                /* mov.w @(R0,Rm),Rn */
614         {
615             TCGv addr = tcg_temp_new();
616             tcg_gen_add_i32(addr, REG(B7_4), REG(0));
617             tcg_gen_qemu_ld16s(REG(B11_8), addr, ctx->memidx);
618             tcg_temp_free(addr);
619         }
620         return;
621     case 0x000e:                /* mov.l @(R0,Rm),Rn */
622         {
623             TCGv addr = tcg_temp_new();
624             tcg_gen_add_i32(addr, REG(B7_4), REG(0));
625             tcg_gen_qemu_ld32s(REG(B11_8), addr, ctx->memidx);
626             tcg_temp_free(addr);
627         }
628         return;
629     case 0x6008:                /* swap.b Rm,Rn */
630         {
631             TCGv high, low;
632             high = tcg_temp_new();
633             tcg_gen_andi_i32(high, REG(B7_4), 0xffff0000);
634             low = tcg_temp_new();
635             tcg_gen_ext16u_i32(low, REG(B7_4));
636             tcg_gen_bswap16_i32(low, low);
637             tcg_gen_or_i32(REG(B11_8), high, low);
638             tcg_temp_free(low);
639             tcg_temp_free(high);
640         }
641         return;
642     case 0x6009:                /* swap.w Rm,Rn */
643         tcg_gen_rotli_i32(REG(B11_8), REG(B7_4), 16);
644         return;
645     case 0x200d:                /* xtrct Rm,Rn */
646         {
647             TCGv high, low;
648             high = tcg_temp_new();
649             tcg_gen_shli_i32(high, REG(B7_4), 16);
650             low = tcg_temp_new();
651             tcg_gen_shri_i32(low, REG(B11_8), 16);
652             tcg_gen_or_i32(REG(B11_8), high, low);
653             tcg_temp_free(low);
654             tcg_temp_free(high);
655         }
656         return;
657     case 0x300c:                /* add Rm,Rn */
658         tcg_gen_add_i32(REG(B11_8), REG(B11_8), REG(B7_4));
659         return;
660     case 0x300e:                /* addc Rm,Rn */
661         {
662             TCGv t0, t1, t2;
663             t0 = tcg_temp_new();
664             tcg_gen_andi_i32(t0, cpu_sr, SR_T);
665             t1 = tcg_temp_new();
666             tcg_gen_add_i32(t1, REG(B7_4), REG(B11_8));
667             tcg_gen_add_i32(t0, t0, t1);
668             t2 = tcg_temp_new();
669             tcg_gen_setcond_i32(TCG_COND_GTU, t2, REG(B11_8), t1);
670             tcg_gen_setcond_i32(TCG_COND_GTU, t1, t1, t0);
671             tcg_gen_or_i32(t1, t1, t2);
672             tcg_temp_free(t2);
673             tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
674             tcg_gen_or_i32(cpu_sr, cpu_sr, t1);
675             tcg_temp_free(t1);
676             tcg_gen_mov_i32(REG(B11_8), t0);
677             tcg_temp_free(t0);
678         }
679         return;
680     case 0x300f:                /* addv Rm,Rn */
681         {
682             TCGv t0, t1, t2;
683             t0 = tcg_temp_new();
684             tcg_gen_add_i32(t0, REG(B7_4), REG(B11_8));
685             t1 = tcg_temp_new();
686             tcg_gen_xor_i32(t1, t0, REG(B11_8));
687             t2 = tcg_temp_new();
688             tcg_gen_xor_i32(t2, REG(B7_4), REG(B11_8));
689             tcg_gen_andc_i32(t1, t1, t2);
690             tcg_temp_free(t2);
691             tcg_gen_shri_i32(t1, t1, 31);
692             tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
693             tcg_gen_or_i32(cpu_sr, cpu_sr, t1);
694             tcg_temp_free(t1);
695             tcg_gen_mov_i32(REG(B7_4), t0);
696             tcg_temp_free(t0);
697         }
698         return;
699     case 0x2009:                /* and Rm,Rn */
700         tcg_gen_and_i32(REG(B11_8), REG(B11_8), REG(B7_4));
701         return;
702     case 0x3000:                /* cmp/eq Rm,Rn */
703         gen_cmp(TCG_COND_EQ, REG(B7_4), REG(B11_8));
704         return;
705     case 0x3003:                /* cmp/ge Rm,Rn */
706         gen_cmp(TCG_COND_GE, REG(B7_4), REG(B11_8));
707         return;
708     case 0x3007:                /* cmp/gt Rm,Rn */
709         gen_cmp(TCG_COND_GT, REG(B7_4), REG(B11_8));
710         return;
711     case 0x3006:                /* cmp/hi Rm,Rn */
712         gen_cmp(TCG_COND_GTU, REG(B7_4), REG(B11_8));
713         return;
714     case 0x3002:                /* cmp/hs Rm,Rn */
715         gen_cmp(TCG_COND_GEU, REG(B7_4), REG(B11_8));
716         return;
717     case 0x200c:                /* cmp/str Rm,Rn */
718         {
719             TCGv cmp1 = tcg_temp_new();
720             TCGv cmp2 = tcg_temp_new();
721             tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
722             tcg_gen_xor_i32(cmp1, REG(B7_4), REG(B11_8));
723             tcg_gen_andi_i32(cmp2, cmp1, 0xff000000);
724             tcg_gen_setcondi_i32(TCG_COND_EQ, cmp2, cmp2, 0);
725             tcg_gen_or_i32(cpu_sr, cpu_sr, cmp2);
726             tcg_gen_andi_i32(cmp2, cmp1, 0x00ff0000);
727             tcg_gen_setcondi_i32(TCG_COND_EQ, cmp2, cmp2, 0);
728             tcg_gen_or_i32(cpu_sr, cpu_sr, cmp2);
729             tcg_gen_andi_i32(cmp2, cmp1, 0x0000ff00);
730             tcg_gen_setcondi_i32(TCG_COND_EQ, cmp2, cmp2, 0);
731             tcg_gen_or_i32(cpu_sr, cpu_sr, cmp2);
732             tcg_gen_andi_i32(cmp2, cmp1, 0x000000ff);
733             tcg_gen_setcondi_i32(TCG_COND_EQ, cmp2, cmp2, 0);
734             tcg_gen_or_i32(cpu_sr, cpu_sr, cmp2);
735             tcg_temp_free(cmp2);
736             tcg_temp_free(cmp1);
737         }
738         return;
739     case 0x2007:                /* div0s Rm,Rn */
740         {
741             gen_copy_bit_i32(cpu_sr, 8, REG(B11_8), 31);        /* SR_Q */
742             gen_copy_bit_i32(cpu_sr, 9, REG(B7_4), 31);         /* SR_M */
743             TCGv val = tcg_temp_new();
744             tcg_gen_xor_i32(val, REG(B7_4), REG(B11_8));
745             gen_copy_bit_i32(cpu_sr, 0, val, 31);               /* SR_T */
746             tcg_temp_free(val);
747         }
748         return;
749     case 0x3004:                /* div1 Rm,Rn */
750         gen_helper_div1(REG(B11_8), cpu_env, REG(B7_4), REG(B11_8));
751         return;
752     case 0x300d:                /* dmuls.l Rm,Rn */
753         tcg_gen_muls2_i32(cpu_macl, cpu_mach, REG(B7_4), REG(B11_8));
754         return;
755     case 0x3005:                /* dmulu.l Rm,Rn */
756         tcg_gen_mulu2_i32(cpu_macl, cpu_mach, REG(B7_4), REG(B11_8));
757         return;
758     case 0x600e:                /* exts.b Rm,Rn */
759         tcg_gen_ext8s_i32(REG(B11_8), REG(B7_4));
760         return;
761     case 0x600f:                /* exts.w Rm,Rn */
762         tcg_gen_ext16s_i32(REG(B11_8), REG(B7_4));
763         return;
764     case 0x600c:                /* extu.b Rm,Rn */
765         tcg_gen_ext8u_i32(REG(B11_8), REG(B7_4));
766         return;
767     case 0x600d:                /* extu.w Rm,Rn */
768         tcg_gen_ext16u_i32(REG(B11_8), REG(B7_4));
769         return;
770     case 0x000f:                /* mac.l @Rm+,@Rn+ */
771         {
772             TCGv arg0, arg1;
773             arg0 = tcg_temp_new();
774             tcg_gen_qemu_ld32s(arg0, REG(B7_4), ctx->memidx);
775             arg1 = tcg_temp_new();
776             tcg_gen_qemu_ld32s(arg1, REG(B11_8), ctx->memidx);
777             gen_helper_macl(cpu_env, arg0, arg1);
778             tcg_temp_free(arg1);
779             tcg_temp_free(arg0);
780             tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4);
781             tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
782         }
783         return;
784     case 0x400f:                /* mac.w @Rm+,@Rn+ */
785         {
786             TCGv arg0, arg1;
787             arg0 = tcg_temp_new();
788             tcg_gen_qemu_ld32s(arg0, REG(B7_4), ctx->memidx);
789             arg1 = tcg_temp_new();
790             tcg_gen_qemu_ld32s(arg1, REG(B11_8), ctx->memidx);
791             gen_helper_macw(cpu_env, arg0, arg1);
792             tcg_temp_free(arg1);
793             tcg_temp_free(arg0);
794             tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 2);
795             tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 2);
796         }
797         return;
798     case 0x0007:                /* mul.l Rm,Rn */
799         tcg_gen_mul_i32(cpu_macl, REG(B7_4), REG(B11_8));
800         return;
801     case 0x200f:                /* muls.w Rm,Rn */
802         {
803             TCGv arg0, arg1;
804             arg0 = tcg_temp_new();
805             tcg_gen_ext16s_i32(arg0, REG(B7_4));
806             arg1 = tcg_temp_new();
807             tcg_gen_ext16s_i32(arg1, REG(B11_8));
808             tcg_gen_mul_i32(cpu_macl, arg0, arg1);
809             tcg_temp_free(arg1);
810             tcg_temp_free(arg0);
811         }
812         return;
813     case 0x200e:                /* mulu.w Rm,Rn */
814         {
815             TCGv arg0, arg1;
816             arg0 = tcg_temp_new();
817             tcg_gen_ext16u_i32(arg0, REG(B7_4));
818             arg1 = tcg_temp_new();
819             tcg_gen_ext16u_i32(arg1, REG(B11_8));
820             tcg_gen_mul_i32(cpu_macl, arg0, arg1);
821             tcg_temp_free(arg1);
822             tcg_temp_free(arg0);
823         }
824         return;
825     case 0x600b:                /* neg Rm,Rn */
826         tcg_gen_neg_i32(REG(B11_8), REG(B7_4));
827         return;
828     case 0x600a:                /* negc Rm,Rn */
829         {
830             TCGv t0, t1;
831             t0 = tcg_temp_new();
832             tcg_gen_neg_i32(t0, REG(B7_4));
833             t1 = tcg_temp_new();
834             tcg_gen_andi_i32(t1, cpu_sr, SR_T);
835             tcg_gen_sub_i32(REG(B11_8), t0, t1);
836             tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
837             tcg_gen_setcondi_i32(TCG_COND_GTU, t1, t0, 0);
838             tcg_gen_or_i32(cpu_sr, cpu_sr, t1);
839             tcg_gen_setcond_i32(TCG_COND_GTU, t1, REG(B11_8), t0);
840             tcg_gen_or_i32(cpu_sr, cpu_sr, t1);
841             tcg_temp_free(t0);
842             tcg_temp_free(t1);
843         }
844         return;
845     case 0x6007:                /* not Rm,Rn */
846         tcg_gen_not_i32(REG(B11_8), REG(B7_4));
847         return;
848     case 0x200b:                /* or Rm,Rn */
849         tcg_gen_or_i32(REG(B11_8), REG(B11_8), REG(B7_4));
850         return;
851     case 0x400c:                /* shad Rm,Rn */
852         {
853             int label1 = gen_new_label();
854             int label2 = gen_new_label();
855             int label3 = gen_new_label();
856             int label4 = gen_new_label();
857             TCGv shift;
858             tcg_gen_brcondi_i32(TCG_COND_LT, REG(B7_4), 0, label1);
859             /* Rm positive, shift to the left */
860             shift = tcg_temp_new();
861             tcg_gen_andi_i32(shift, REG(B7_4), 0x1f);
862             tcg_gen_shl_i32(REG(B11_8), REG(B11_8), shift);
863             tcg_temp_free(shift);
864             tcg_gen_br(label4);
865             /* Rm negative, shift to the right */
866             gen_set_label(label1);
867             shift = tcg_temp_new();
868             tcg_gen_andi_i32(shift, REG(B7_4), 0x1f);
869             tcg_gen_brcondi_i32(TCG_COND_EQ, shift, 0, label2);
870             tcg_gen_not_i32(shift, REG(B7_4));
871             tcg_gen_andi_i32(shift, shift, 0x1f);
872             tcg_gen_addi_i32(shift, shift, 1);
873             tcg_gen_sar_i32(REG(B11_8), REG(B11_8), shift);
874             tcg_temp_free(shift);
875             tcg_gen_br(label4);
876             /* Rm = -32 */
877             gen_set_label(label2);
878             tcg_gen_brcondi_i32(TCG_COND_LT, REG(B11_8), 0, label3);
879             tcg_gen_movi_i32(REG(B11_8), 0);
880             tcg_gen_br(label4);
881             gen_set_label(label3);
882             tcg_gen_movi_i32(REG(B11_8), 0xffffffff);
883             gen_set_label(label4);
884         }
885         return;
886     case 0x400d:                /* shld Rm,Rn */
887         {
888             int label1 = gen_new_label();
889             int label2 = gen_new_label();
890             int label3 = gen_new_label();
891             TCGv shift;
892             tcg_gen_brcondi_i32(TCG_COND_LT, REG(B7_4), 0, label1);
893             /* Rm positive, shift to the left */
894             shift = tcg_temp_new();
895             tcg_gen_andi_i32(shift, REG(B7_4), 0x1f);
896             tcg_gen_shl_i32(REG(B11_8), REG(B11_8), shift);
897             tcg_temp_free(shift);
898             tcg_gen_br(label3);
899             /* Rm negative, shift to the right */
900             gen_set_label(label1);
901             shift = tcg_temp_new();
902             tcg_gen_andi_i32(shift, REG(B7_4), 0x1f);
903             tcg_gen_brcondi_i32(TCG_COND_EQ, shift, 0, label2);
904             tcg_gen_not_i32(shift, REG(B7_4));
905             tcg_gen_andi_i32(shift, shift, 0x1f);
906             tcg_gen_addi_i32(shift, shift, 1);
907             tcg_gen_shr_i32(REG(B11_8), REG(B11_8), shift);
908             tcg_temp_free(shift);
909             tcg_gen_br(label3);
910             /* Rm = -32 */
911             gen_set_label(label2);
912             tcg_gen_movi_i32(REG(B11_8), 0);
913             gen_set_label(label3);
914         }
915         return;
916     case 0x3008:                /* sub Rm,Rn */
917         tcg_gen_sub_i32(REG(B11_8), REG(B11_8), REG(B7_4));
918         return;
919     case 0x300a:                /* subc Rm,Rn */
920         {
921             TCGv t0, t1, t2;
922             t0 = tcg_temp_new();
923             tcg_gen_andi_i32(t0, cpu_sr, SR_T);
924             t1 = tcg_temp_new();
925             tcg_gen_sub_i32(t1, REG(B11_8), REG(B7_4));
926             tcg_gen_sub_i32(t0, t1, t0);
927             t2 = tcg_temp_new();
928             tcg_gen_setcond_i32(TCG_COND_LTU, t2, REG(B11_8), t1);
929             tcg_gen_setcond_i32(TCG_COND_LTU, t1, t1, t0);
930             tcg_gen_or_i32(t1, t1, t2);
931             tcg_temp_free(t2);
932             tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
933             tcg_gen_or_i32(cpu_sr, cpu_sr, t1);
934             tcg_temp_free(t1);
935             tcg_gen_mov_i32(REG(B11_8), t0);
936             tcg_temp_free(t0);
937         }
938         return;
939     case 0x300b:                /* subv Rm,Rn */
940         {
941             TCGv t0, t1, t2;
942             t0 = tcg_temp_new();
943             tcg_gen_sub_i32(t0, REG(B11_8), REG(B7_4));
944             t1 = tcg_temp_new();
945             tcg_gen_xor_i32(t1, t0, REG(B7_4));
946             t2 = tcg_temp_new();
947             tcg_gen_xor_i32(t2, REG(B11_8), REG(B7_4));
948             tcg_gen_and_i32(t1, t1, t2);
949             tcg_temp_free(t2);
950             tcg_gen_shri_i32(t1, t1, 31);
951             tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
952             tcg_gen_or_i32(cpu_sr, cpu_sr, t1);
953             tcg_temp_free(t1);
954             tcg_gen_mov_i32(REG(B11_8), t0);
955             tcg_temp_free(t0);
956         }
957         return;
958     case 0x2008:                /* tst Rm,Rn */
959         {
960             TCGv val = tcg_temp_new();
961             tcg_gen_and_i32(val, REG(B7_4), REG(B11_8));
962             gen_cmp_imm(TCG_COND_EQ, val, 0);
963             tcg_temp_free(val);
964         }
965         return;
966     case 0x200a:                /* xor Rm,Rn */
967         tcg_gen_xor_i32(REG(B11_8), REG(B11_8), REG(B7_4));
968         return;
969     case 0xf00c: /* fmov {F,D,X}Rm,{F,D,X}Rn - FPSCR: Nothing */
970         CHECK_FPU_ENABLED
971         if (ctx->flags & FPSCR_SZ) {
972             TCGv_i64 fp = tcg_temp_new_i64();
973             gen_load_fpr64(fp, XREG(B7_4));
974             gen_store_fpr64(fp, XREG(B11_8));
975             tcg_temp_free_i64(fp);
976         } else {
977             tcg_gen_mov_i32(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
978         }
979         return;
980     case 0xf00a: /* fmov {F,D,X}Rm,@Rn - FPSCR: Nothing */
981         CHECK_FPU_ENABLED
982         if (ctx->flags & FPSCR_SZ) {
983             TCGv addr_hi = tcg_temp_new();
984             int fr = XREG(B7_4);
985             tcg_gen_addi_i32(addr_hi, REG(B11_8), 4);
986             tcg_gen_qemu_st32(cpu_fregs[fr  ], REG(B11_8), ctx->memidx);
987             tcg_gen_qemu_st32(cpu_fregs[fr+1], addr_hi,    ctx->memidx);
988             tcg_temp_free(addr_hi);
989         } else {
990             tcg_gen_qemu_st32(cpu_fregs[FREG(B7_4)], REG(B11_8), ctx->memidx);
991         }
992         return;
993     case 0xf008: /* fmov @Rm,{F,D,X}Rn - FPSCR: Nothing */
994         CHECK_FPU_ENABLED
995         if (ctx->flags & FPSCR_SZ) {
996             TCGv addr_hi = tcg_temp_new();
997             int fr = XREG(B11_8);
998             tcg_gen_addi_i32(addr_hi, REG(B7_4), 4);
999             tcg_gen_qemu_ld32u(cpu_fregs[fr  ], REG(B7_4), ctx->memidx);
1000             tcg_gen_qemu_ld32u(cpu_fregs[fr+1], addr_hi,   ctx->memidx);
1001             tcg_temp_free(addr_hi);
1002         } else {
1003             tcg_gen_qemu_ld32u(cpu_fregs[FREG(B11_8)], REG(B7_4), ctx->memidx);
1004         }
1005         return;
1006     case 0xf009: /* fmov @Rm+,{F,D,X}Rn - FPSCR: Nothing */
1007         CHECK_FPU_ENABLED
1008         if (ctx->flags & FPSCR_SZ) {
1009             TCGv addr_hi = tcg_temp_new();
1010             int fr = XREG(B11_8);
1011             tcg_gen_addi_i32(addr_hi, REG(B7_4), 4);
1012             tcg_gen_qemu_ld32u(cpu_fregs[fr  ], REG(B7_4), ctx->memidx);
1013             tcg_gen_qemu_ld32u(cpu_fregs[fr+1], addr_hi,   ctx->memidx);
1014             tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 8);
1015             tcg_temp_free(addr_hi);
1016         } else {
1017             tcg_gen_qemu_ld32u(cpu_fregs[FREG(B11_8)], REG(B7_4), ctx->memidx);
1018             tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4);
1019         }
1020         return;
1021     case 0xf00b: /* fmov {F,D,X}Rm,@-Rn - FPSCR: Nothing */
1022         CHECK_FPU_ENABLED
1023         if (ctx->flags & FPSCR_SZ) {
1024             TCGv addr = tcg_temp_new_i32();
1025             int fr = XREG(B7_4);
1026             tcg_gen_subi_i32(addr, REG(B11_8), 4);
1027             tcg_gen_qemu_st32(cpu_fregs[fr+1], addr, ctx->memidx);
1028             tcg_gen_subi_i32(addr, addr, 4);
1029             tcg_gen_qemu_st32(cpu_fregs[fr  ], addr, ctx->memidx);
1030             tcg_gen_mov_i32(REG(B11_8), addr);
1031             tcg_temp_free(addr);
1032         } else {
1033             TCGv addr;
1034             addr = tcg_temp_new_i32();
1035             tcg_gen_subi_i32(addr, REG(B11_8), 4);
1036             tcg_gen_qemu_st32(cpu_fregs[FREG(B7_4)], addr, ctx->memidx);
1037             tcg_gen_mov_i32(REG(B11_8), addr);
1038             tcg_temp_free(addr);
1039         }
1040         return;
1041     case 0xf006: /* fmov @(R0,Rm),{F,D,X}Rm - FPSCR: Nothing */
1042         CHECK_FPU_ENABLED
1043         {
1044             TCGv addr = tcg_temp_new_i32();
1045             tcg_gen_add_i32(addr, REG(B7_4), REG(0));
1046             if (ctx->flags & FPSCR_SZ) {
1047                 int fr = XREG(B11_8);
1048                 tcg_gen_qemu_ld32u(cpu_fregs[fr  ], addr, ctx->memidx);
1049                 tcg_gen_addi_i32(addr, addr, 4);
1050                 tcg_gen_qemu_ld32u(cpu_fregs[fr+1], addr, ctx->memidx);
1051             } else {
1052                 tcg_gen_qemu_ld32u(cpu_fregs[FREG(B11_8)], addr, ctx->memidx);
1053             }
1054             tcg_temp_free(addr);
1055         }
1056         return;
1057     case 0xf007: /* fmov {F,D,X}Rn,@(R0,Rn) - FPSCR: Nothing */
1058         CHECK_FPU_ENABLED
1059         {
1060             TCGv addr = tcg_temp_new();
1061             tcg_gen_add_i32(addr, REG(B11_8), REG(0));
1062             if (ctx->flags & FPSCR_SZ) {
1063                 int fr = XREG(B7_4);
1064                 tcg_gen_qemu_ld32u(cpu_fregs[fr  ], addr, ctx->memidx);
1065                 tcg_gen_addi_i32(addr, addr, 4);
1066                 tcg_gen_qemu_ld32u(cpu_fregs[fr+1], addr, ctx->memidx);
1067             } else {
1068                 tcg_gen_qemu_st32(cpu_fregs[FREG(B7_4)], addr, ctx->memidx);
1069             }
1070             tcg_temp_free(addr);
1071         }
1072         return;
1073     case 0xf000: /* fadd Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1074     case 0xf001: /* fsub Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1075     case 0xf002: /* fmul Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1076     case 0xf003: /* fdiv Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1077     case 0xf004: /* fcmp/eq Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1078     case 0xf005: /* fcmp/gt Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1079         {
1080             CHECK_FPU_ENABLED
1081             if (ctx->flags & FPSCR_PR) {
1082                 TCGv_i64 fp0, fp1;
1083
1084                 if (ctx->opcode & 0x0110)
1085                     break; /* illegal instruction */
1086                 fp0 = tcg_temp_new_i64();
1087                 fp1 = tcg_temp_new_i64();
1088                 gen_load_fpr64(fp0, DREG(B11_8));
1089                 gen_load_fpr64(fp1, DREG(B7_4));
1090                 switch (ctx->opcode & 0xf00f) {
1091                 case 0xf000:            /* fadd Rm,Rn */
1092                     gen_helper_fadd_DT(fp0, cpu_env, fp0, fp1);
1093                     break;
1094                 case 0xf001:            /* fsub Rm,Rn */
1095                     gen_helper_fsub_DT(fp0, cpu_env, fp0, fp1);
1096                     break;
1097                 case 0xf002:            /* fmul Rm,Rn */
1098                     gen_helper_fmul_DT(fp0, cpu_env, fp0, fp1);
1099                     break;
1100                 case 0xf003:            /* fdiv Rm,Rn */
1101                     gen_helper_fdiv_DT(fp0, cpu_env, fp0, fp1);
1102                     break;
1103                 case 0xf004:            /* fcmp/eq Rm,Rn */
1104                     gen_helper_fcmp_eq_DT(cpu_env, fp0, fp1);
1105                     return;
1106                 case 0xf005:            /* fcmp/gt Rm,Rn */
1107                     gen_helper_fcmp_gt_DT(cpu_env, fp0, fp1);
1108                     return;
1109                 }
1110                 gen_store_fpr64(fp0, DREG(B11_8));
1111                 tcg_temp_free_i64(fp0);
1112                 tcg_temp_free_i64(fp1);
1113             } else {
1114                 switch (ctx->opcode & 0xf00f) {
1115                 case 0xf000:            /* fadd Rm,Rn */
1116                     gen_helper_fadd_FT(cpu_fregs[FREG(B11_8)], cpu_env,
1117                                        cpu_fregs[FREG(B11_8)],
1118                                        cpu_fregs[FREG(B7_4)]);
1119                     break;
1120                 case 0xf001:            /* fsub Rm,Rn */
1121                     gen_helper_fsub_FT(cpu_fregs[FREG(B11_8)], cpu_env,
1122                                        cpu_fregs[FREG(B11_8)],
1123                                        cpu_fregs[FREG(B7_4)]);
1124                     break;
1125                 case 0xf002:            /* fmul Rm,Rn */
1126                     gen_helper_fmul_FT(cpu_fregs[FREG(B11_8)], cpu_env,
1127                                        cpu_fregs[FREG(B11_8)],
1128                                        cpu_fregs[FREG(B7_4)]);
1129                     break;
1130                 case 0xf003:            /* fdiv Rm,Rn */
1131                     gen_helper_fdiv_FT(cpu_fregs[FREG(B11_8)], cpu_env,
1132                                        cpu_fregs[FREG(B11_8)],
1133                                        cpu_fregs[FREG(B7_4)]);
1134                     break;
1135                 case 0xf004:            /* fcmp/eq Rm,Rn */
1136                     gen_helper_fcmp_eq_FT(cpu_env, cpu_fregs[FREG(B11_8)],
1137                                           cpu_fregs[FREG(B7_4)]);
1138                     return;
1139                 case 0xf005:            /* fcmp/gt Rm,Rn */
1140                     gen_helper_fcmp_gt_FT(cpu_env, cpu_fregs[FREG(B11_8)],
1141                                           cpu_fregs[FREG(B7_4)]);
1142                     return;
1143                 }
1144             }
1145         }
1146         return;
1147     case 0xf00e: /* fmac FR0,RM,Rn */
1148         {
1149             CHECK_FPU_ENABLED
1150             if (ctx->flags & FPSCR_PR) {
1151                 break; /* illegal instruction */
1152             } else {
1153                 gen_helper_fmac_FT(cpu_fregs[FREG(B11_8)], cpu_env,
1154                                    cpu_fregs[FREG(0)], cpu_fregs[FREG(B7_4)],
1155                                    cpu_fregs[FREG(B11_8)]);
1156                 return;
1157             }
1158         }
1159     }
1160
1161     switch (ctx->opcode & 0xff00) {
1162     case 0xc900:                /* and #imm,R0 */
1163         tcg_gen_andi_i32(REG(0), REG(0), B7_0);
1164         return;
1165     case 0xcd00:                /* and.b #imm,@(R0,GBR) */
1166         {
1167             TCGv addr, val;
1168             addr = tcg_temp_new();
1169             tcg_gen_add_i32(addr, REG(0), cpu_gbr);
1170             val = tcg_temp_new();
1171             tcg_gen_qemu_ld8u(val, addr, ctx->memidx);
1172             tcg_gen_andi_i32(val, val, B7_0);
1173             tcg_gen_qemu_st8(val, addr, ctx->memidx);
1174             tcg_temp_free(val);
1175             tcg_temp_free(addr);
1176         }
1177         return;
1178     case 0x8b00:                /* bf label */
1179         CHECK_NOT_DELAY_SLOT
1180             gen_conditional_jump(ctx, ctx->pc + 2,
1181                                  ctx->pc + 4 + B7_0s * 2);
1182         ctx->bstate = BS_BRANCH;
1183         return;
1184     case 0x8f00:                /* bf/s label */
1185         CHECK_NOT_DELAY_SLOT
1186         gen_branch_slot(ctx->delayed_pc = ctx->pc + 4 + B7_0s * 2, 0);
1187         ctx->flags |= DELAY_SLOT_CONDITIONAL;
1188         return;
1189     case 0x8900:                /* bt label */
1190         CHECK_NOT_DELAY_SLOT
1191             gen_conditional_jump(ctx, ctx->pc + 4 + B7_0s * 2,
1192                                  ctx->pc + 2);
1193         ctx->bstate = BS_BRANCH;
1194         return;
1195     case 0x8d00:                /* bt/s label */
1196         CHECK_NOT_DELAY_SLOT
1197         gen_branch_slot(ctx->delayed_pc = ctx->pc + 4 + B7_0s * 2, 1);
1198         ctx->flags |= DELAY_SLOT_CONDITIONAL;
1199         return;
1200     case 0x8800:                /* cmp/eq #imm,R0 */
1201         gen_cmp_imm(TCG_COND_EQ, REG(0), B7_0s);
1202         return;
1203     case 0xc400:                /* mov.b @(disp,GBR),R0 */
1204         {
1205             TCGv addr = tcg_temp_new();
1206             tcg_gen_addi_i32(addr, cpu_gbr, B7_0);
1207             tcg_gen_qemu_ld8s(REG(0), addr, ctx->memidx);
1208             tcg_temp_free(addr);
1209         }
1210         return;
1211     case 0xc500:                /* mov.w @(disp,GBR),R0 */
1212         {
1213             TCGv addr = tcg_temp_new();
1214             tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 2);
1215             tcg_gen_qemu_ld16s(REG(0), addr, ctx->memidx);
1216             tcg_temp_free(addr);
1217         }
1218         return;
1219     case 0xc600:                /* mov.l @(disp,GBR),R0 */
1220         {
1221             TCGv addr = tcg_temp_new();
1222             tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 4);
1223             tcg_gen_qemu_ld32s(REG(0), addr, ctx->memidx);
1224             tcg_temp_free(addr);
1225         }
1226         return;
1227     case 0xc000:                /* mov.b R0,@(disp,GBR) */
1228         {
1229             TCGv addr = tcg_temp_new();
1230             tcg_gen_addi_i32(addr, cpu_gbr, B7_0);
1231             tcg_gen_qemu_st8(REG(0), addr, ctx->memidx);
1232             tcg_temp_free(addr);
1233         }
1234         return;
1235     case 0xc100:                /* mov.w R0,@(disp,GBR) */
1236         {
1237             TCGv addr = tcg_temp_new();
1238             tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 2);
1239             tcg_gen_qemu_st16(REG(0), addr, ctx->memidx);
1240             tcg_temp_free(addr);
1241         }
1242         return;
1243     case 0xc200:                /* mov.l R0,@(disp,GBR) */
1244         {
1245             TCGv addr = tcg_temp_new();
1246             tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 4);
1247             tcg_gen_qemu_st32(REG(0), addr, ctx->memidx);
1248             tcg_temp_free(addr);
1249         }
1250         return;
1251     case 0x8000:                /* mov.b R0,@(disp,Rn) */
1252         {
1253             TCGv addr = tcg_temp_new();
1254             tcg_gen_addi_i32(addr, REG(B7_4), B3_0);
1255             tcg_gen_qemu_st8(REG(0), addr, ctx->memidx);
1256             tcg_temp_free(addr);
1257         }
1258         return;
1259     case 0x8100:                /* mov.w R0,@(disp,Rn) */
1260         {
1261             TCGv addr = tcg_temp_new();
1262             tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 2);
1263             tcg_gen_qemu_st16(REG(0), addr, ctx->memidx);
1264             tcg_temp_free(addr);
1265         }
1266         return;
1267     case 0x8400:                /* mov.b @(disp,Rn),R0 */
1268         {
1269             TCGv addr = tcg_temp_new();
1270             tcg_gen_addi_i32(addr, REG(B7_4), B3_0);
1271             tcg_gen_qemu_ld8s(REG(0), addr, ctx->memidx);
1272             tcg_temp_free(addr);
1273         }
1274         return;
1275     case 0x8500:                /* mov.w @(disp,Rn),R0 */
1276         {
1277             TCGv addr = tcg_temp_new();
1278             tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 2);
1279             tcg_gen_qemu_ld16s(REG(0), addr, ctx->memidx);
1280             tcg_temp_free(addr);
1281         }
1282         return;
1283     case 0xc700:                /* mova @(disp,PC),R0 */
1284         tcg_gen_movi_i32(REG(0), ((ctx->pc & 0xfffffffc) + 4 + B7_0 * 4) & ~3);
1285         return;
1286     case 0xcb00:                /* or #imm,R0 */
1287         tcg_gen_ori_i32(REG(0), REG(0), B7_0);
1288         return;
1289     case 0xcf00:                /* or.b #imm,@(R0,GBR) */
1290         {
1291             TCGv addr, val;
1292             addr = tcg_temp_new();
1293             tcg_gen_add_i32(addr, REG(0), cpu_gbr);
1294             val = tcg_temp_new();
1295             tcg_gen_qemu_ld8u(val, addr, ctx->memidx);
1296             tcg_gen_ori_i32(val, val, B7_0);
1297             tcg_gen_qemu_st8(val, addr, ctx->memidx);
1298             tcg_temp_free(val);
1299             tcg_temp_free(addr);
1300         }
1301         return;
1302     case 0xc300:                /* trapa #imm */
1303         {
1304             TCGv imm;
1305             CHECK_NOT_DELAY_SLOT
1306             tcg_gen_movi_i32(cpu_pc, ctx->pc);
1307             imm = tcg_const_i32(B7_0);
1308             gen_helper_trapa(cpu_env, imm);
1309             tcg_temp_free(imm);
1310             ctx->bstate = BS_BRANCH;
1311         }
1312         return;
1313     case 0xc800:                /* tst #imm,R0 */
1314         {
1315             TCGv val = tcg_temp_new();
1316             tcg_gen_andi_i32(val, REG(0), B7_0);
1317             gen_cmp_imm(TCG_COND_EQ, val, 0);
1318             tcg_temp_free(val);
1319         }
1320         return;
1321     case 0xcc00:                /* tst.b #imm,@(R0,GBR) */
1322         {
1323             TCGv val = tcg_temp_new();
1324             tcg_gen_add_i32(val, REG(0), cpu_gbr);
1325             tcg_gen_qemu_ld8u(val, val, ctx->memidx);
1326             tcg_gen_andi_i32(val, val, B7_0);
1327             gen_cmp_imm(TCG_COND_EQ, val, 0);
1328             tcg_temp_free(val);
1329         }
1330         return;
1331     case 0xca00:                /* xor #imm,R0 */
1332         tcg_gen_xori_i32(REG(0), REG(0), B7_0);
1333         return;
1334     case 0xce00:                /* xor.b #imm,@(R0,GBR) */
1335         {
1336             TCGv addr, val;
1337             addr = tcg_temp_new();
1338             tcg_gen_add_i32(addr, REG(0), cpu_gbr);
1339             val = tcg_temp_new();
1340             tcg_gen_qemu_ld8u(val, addr, ctx->memidx);
1341             tcg_gen_xori_i32(val, val, B7_0);
1342             tcg_gen_qemu_st8(val, addr, ctx->memidx);
1343             tcg_temp_free(val);
1344             tcg_temp_free(addr);
1345         }
1346         return;
1347     }
1348
1349     switch (ctx->opcode & 0xf08f) {
1350     case 0x408e:                /* ldc Rm,Rn_BANK */
1351         CHECK_PRIVILEGED
1352         tcg_gen_mov_i32(ALTREG(B6_4), REG(B11_8));
1353         return;
1354     case 0x4087:                /* ldc.l @Rm+,Rn_BANK */
1355         CHECK_PRIVILEGED
1356         tcg_gen_qemu_ld32s(ALTREG(B6_4), REG(B11_8), ctx->memidx);
1357         tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
1358         return;
1359     case 0x0082:                /* stc Rm_BANK,Rn */
1360         CHECK_PRIVILEGED
1361         tcg_gen_mov_i32(REG(B11_8), ALTREG(B6_4));
1362         return;
1363     case 0x4083:                /* stc.l Rm_BANK,@-Rn */
1364         CHECK_PRIVILEGED
1365         {
1366             TCGv addr = tcg_temp_new();
1367             tcg_gen_subi_i32(addr, REG(B11_8), 4);
1368             tcg_gen_qemu_st32(ALTREG(B6_4), addr, ctx->memidx);
1369             tcg_gen_mov_i32(REG(B11_8), addr);
1370             tcg_temp_free(addr);
1371         }
1372         return;
1373     }
1374
1375     switch (ctx->opcode & 0xf0ff) {
1376     case 0x0023:                /* braf Rn */
1377         CHECK_NOT_DELAY_SLOT
1378         tcg_gen_addi_i32(cpu_delayed_pc, REG(B11_8), ctx->pc + 4);
1379         ctx->flags |= DELAY_SLOT;
1380         ctx->delayed_pc = (uint32_t) - 1;
1381         return;
1382     case 0x0003:                /* bsrf Rn */
1383         CHECK_NOT_DELAY_SLOT
1384         tcg_gen_movi_i32(cpu_pr, ctx->pc + 4);
1385         tcg_gen_add_i32(cpu_delayed_pc, REG(B11_8), cpu_pr);
1386         ctx->flags |= DELAY_SLOT;
1387         ctx->delayed_pc = (uint32_t) - 1;
1388         return;
1389     case 0x4015:                /* cmp/pl Rn */
1390         gen_cmp_imm(TCG_COND_GT, REG(B11_8), 0);
1391         return;
1392     case 0x4011:                /* cmp/pz Rn */
1393         gen_cmp_imm(TCG_COND_GE, REG(B11_8), 0);
1394         return;
1395     case 0x4010:                /* dt Rn */
1396         tcg_gen_subi_i32(REG(B11_8), REG(B11_8), 1);
1397         gen_cmp_imm(TCG_COND_EQ, REG(B11_8), 0);
1398         return;
1399     case 0x402b:                /* jmp @Rn */
1400         CHECK_NOT_DELAY_SLOT
1401         tcg_gen_mov_i32(cpu_delayed_pc, REG(B11_8));
1402         ctx->flags |= DELAY_SLOT;
1403         ctx->delayed_pc = (uint32_t) - 1;
1404         return;
1405     case 0x400b:                /* jsr @Rn */
1406         CHECK_NOT_DELAY_SLOT
1407         tcg_gen_movi_i32(cpu_pr, ctx->pc + 4);
1408         tcg_gen_mov_i32(cpu_delayed_pc, REG(B11_8));
1409         ctx->flags |= DELAY_SLOT;
1410         ctx->delayed_pc = (uint32_t) - 1;
1411         return;
1412     case 0x400e:                /* ldc Rm,SR */
1413         CHECK_PRIVILEGED
1414         tcg_gen_andi_i32(cpu_sr, REG(B11_8), 0x700083f3);
1415         ctx->bstate = BS_STOP;
1416         return;
1417     case 0x4007:                /* ldc.l @Rm+,SR */
1418         CHECK_PRIVILEGED
1419         {
1420             TCGv val = tcg_temp_new();
1421             tcg_gen_qemu_ld32s(val, REG(B11_8), ctx->memidx);
1422             tcg_gen_andi_i32(cpu_sr, val, 0x700083f3);
1423             tcg_temp_free(val);
1424             tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
1425             ctx->bstate = BS_STOP;
1426         }
1427         return;
1428     case 0x0002:                /* stc SR,Rn */
1429         CHECK_PRIVILEGED
1430         tcg_gen_mov_i32(REG(B11_8), cpu_sr);
1431         return;
1432     case 0x4003:                /* stc SR,@-Rn */
1433         CHECK_PRIVILEGED
1434         {
1435             TCGv addr = tcg_temp_new();
1436             tcg_gen_subi_i32(addr, REG(B11_8), 4);
1437             tcg_gen_qemu_st32(cpu_sr, addr, ctx->memidx);
1438             tcg_gen_mov_i32(REG(B11_8), addr);
1439             tcg_temp_free(addr);
1440         }
1441         return;
1442 #define LD(reg,ldnum,ldpnum,prechk)             \
1443   case ldnum:                                                   \
1444     prechk                                                      \
1445     tcg_gen_mov_i32 (cpu_##reg, REG(B11_8));                    \
1446     return;                                                     \
1447   case ldpnum:                                                  \
1448     prechk                                                      \
1449     tcg_gen_qemu_ld32s (cpu_##reg, REG(B11_8), ctx->memidx);    \
1450     tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);                \
1451     return;
1452 #define ST(reg,stnum,stpnum,prechk)             \
1453   case stnum:                                                   \
1454     prechk                                                      \
1455     tcg_gen_mov_i32 (REG(B11_8), cpu_##reg);                    \
1456     return;                                                     \
1457   case stpnum:                                                  \
1458     prechk                                                      \
1459     {                                                           \
1460         TCGv addr = tcg_temp_new();                             \
1461         tcg_gen_subi_i32(addr, REG(B11_8), 4);                  \
1462         tcg_gen_qemu_st32 (cpu_##reg, addr, ctx->memidx);       \
1463         tcg_gen_mov_i32(REG(B11_8), addr);                      \
1464         tcg_temp_free(addr);                                    \
1465     }                                                           \
1466     return;
1467 #define LDST(reg,ldnum,ldpnum,stnum,stpnum,prechk)              \
1468         LD(reg,ldnum,ldpnum,prechk)                             \
1469         ST(reg,stnum,stpnum,prechk)
1470         LDST(gbr,  0x401e, 0x4017, 0x0012, 0x4013, {})
1471         LDST(vbr,  0x402e, 0x4027, 0x0022, 0x4023, CHECK_PRIVILEGED)
1472         LDST(ssr,  0x403e, 0x4037, 0x0032, 0x4033, CHECK_PRIVILEGED)
1473         LDST(spc,  0x404e, 0x4047, 0x0042, 0x4043, CHECK_PRIVILEGED)
1474         ST(sgr,  0x003a, 0x4032, CHECK_PRIVILEGED)
1475         LD(sgr,  0x403a, 0x4036, CHECK_PRIVILEGED if (!(ctx->features & SH_FEATURE_SH4A)) break;)
1476         LDST(dbr,  0x40fa, 0x40f6, 0x00fa, 0x40f2, CHECK_PRIVILEGED)
1477         LDST(mach, 0x400a, 0x4006, 0x000a, 0x4002, {})
1478         LDST(macl, 0x401a, 0x4016, 0x001a, 0x4012, {})
1479         LDST(pr,   0x402a, 0x4026, 0x002a, 0x4022, {})
1480         LDST(fpul, 0x405a, 0x4056, 0x005a, 0x4052, {CHECK_FPU_ENABLED})
1481     case 0x406a:                /* lds Rm,FPSCR */
1482         CHECK_FPU_ENABLED
1483         gen_helper_ld_fpscr(cpu_env, REG(B11_8));
1484         ctx->bstate = BS_STOP;
1485         return;
1486     case 0x4066:                /* lds.l @Rm+,FPSCR */
1487         CHECK_FPU_ENABLED
1488         {
1489             TCGv addr = tcg_temp_new();
1490             tcg_gen_qemu_ld32s(addr, REG(B11_8), ctx->memidx);
1491             tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
1492             gen_helper_ld_fpscr(cpu_env, addr);
1493             tcg_temp_free(addr);
1494             ctx->bstate = BS_STOP;
1495         }
1496         return;
1497     case 0x006a:                /* sts FPSCR,Rn */
1498         CHECK_FPU_ENABLED
1499         tcg_gen_andi_i32(REG(B11_8), cpu_fpscr, 0x003fffff);
1500         return;
1501     case 0x4062:                /* sts FPSCR,@-Rn */
1502         CHECK_FPU_ENABLED
1503         {
1504             TCGv addr, val;
1505             val = tcg_temp_new();
1506             tcg_gen_andi_i32(val, cpu_fpscr, 0x003fffff);
1507             addr = tcg_temp_new();
1508             tcg_gen_subi_i32(addr, REG(B11_8), 4);
1509             tcg_gen_qemu_st32(val, addr, ctx->memidx);
1510             tcg_gen_mov_i32(REG(B11_8), addr);
1511             tcg_temp_free(addr);
1512             tcg_temp_free(val);
1513         }
1514         return;
1515     case 0x00c3:                /* movca.l R0,@Rm */
1516         {
1517             TCGv val = tcg_temp_new();
1518             tcg_gen_qemu_ld32u(val, REG(B11_8), ctx->memidx);
1519             gen_helper_movcal(cpu_env, REG(B11_8), val);
1520             tcg_gen_qemu_st32(REG(0), REG(B11_8), ctx->memidx);
1521         }
1522         ctx->has_movcal = 1;
1523         return;
1524     case 0x40a9:
1525         /* MOVUA.L @Rm,R0 (Rm) -> R0
1526            Load non-boundary-aligned data */
1527         tcg_gen_qemu_ld32u(REG(0), REG(B11_8), ctx->memidx);
1528         return;
1529     case 0x40e9:
1530         /* MOVUA.L @Rm+,R0   (Rm) -> R0, Rm + 4 -> Rm
1531            Load non-boundary-aligned data */
1532         tcg_gen_qemu_ld32u(REG(0), REG(B11_8), ctx->memidx);
1533         tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
1534         return;
1535     case 0x0029:                /* movt Rn */
1536         tcg_gen_andi_i32(REG(B11_8), cpu_sr, SR_T);
1537         return;
1538     case 0x0073:
1539         /* MOVCO.L
1540                LDST -> T
1541                If (T == 1) R0 -> (Rn)
1542                0 -> LDST
1543         */
1544         if (ctx->features & SH_FEATURE_SH4A) {
1545             int label = gen_new_label();
1546             tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
1547             tcg_gen_or_i32(cpu_sr, cpu_sr, cpu_ldst);
1548             tcg_gen_brcondi_i32(TCG_COND_EQ, cpu_ldst, 0, label);
1549             tcg_gen_qemu_st32(REG(0), REG(B11_8), ctx->memidx);
1550             gen_set_label(label);
1551             tcg_gen_movi_i32(cpu_ldst, 0);
1552             return;
1553         } else
1554             break;
1555     case 0x0063:
1556         /* MOVLI.L @Rm,R0
1557                1 -> LDST
1558                (Rm) -> R0
1559                When interrupt/exception
1560                occurred 0 -> LDST
1561         */
1562         if (ctx->features & SH_FEATURE_SH4A) {
1563             tcg_gen_movi_i32(cpu_ldst, 0);
1564             tcg_gen_qemu_ld32s(REG(0), REG(B11_8), ctx->memidx);
1565             tcg_gen_movi_i32(cpu_ldst, 1);
1566             return;
1567         } else
1568             break;
1569     case 0x0093:                /* ocbi @Rn */
1570         {
1571             gen_helper_ocbi(cpu_env, REG(B11_8));
1572         }
1573         return;
1574     case 0x00a3:                /* ocbp @Rn */
1575     case 0x00b3:                /* ocbwb @Rn */
1576         /* These instructions are supposed to do nothing in case of
1577            a cache miss. Given that we only partially emulate caches
1578            it is safe to simply ignore them. */
1579         return;
1580     case 0x0083:                /* pref @Rn */
1581         return;
1582     case 0x00d3:                /* prefi @Rn */
1583         if (ctx->features & SH_FEATURE_SH4A)
1584             return;
1585         else
1586             break;
1587     case 0x00e3:                /* icbi @Rn */
1588         if (ctx->features & SH_FEATURE_SH4A)
1589             return;
1590         else
1591             break;
1592     case 0x00ab:                /* synco */
1593         if (ctx->features & SH_FEATURE_SH4A)
1594             return;
1595         else
1596             break;
1597     case 0x4024:                /* rotcl Rn */
1598         {
1599             TCGv tmp = tcg_temp_new();
1600             tcg_gen_mov_i32(tmp, cpu_sr);
1601             gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 31);
1602             tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1);
1603             gen_copy_bit_i32(REG(B11_8), 0, tmp, 0);
1604             tcg_temp_free(tmp);
1605         }
1606         return;
1607     case 0x4025:                /* rotcr Rn */
1608         {
1609             TCGv tmp = tcg_temp_new();
1610             tcg_gen_mov_i32(tmp, cpu_sr);
1611             gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1612             tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 1);
1613             gen_copy_bit_i32(REG(B11_8), 31, tmp, 0);
1614             tcg_temp_free(tmp);
1615         }
1616         return;
1617     case 0x4004:                /* rotl Rn */
1618         tcg_gen_rotli_i32(REG(B11_8), REG(B11_8), 1);
1619         gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1620         return;
1621     case 0x4005:                /* rotr Rn */
1622         gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1623         tcg_gen_rotri_i32(REG(B11_8), REG(B11_8), 1);
1624         return;
1625     case 0x4000:                /* shll Rn */
1626     case 0x4020:                /* shal Rn */
1627         gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 31);
1628         tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1);
1629         return;
1630     case 0x4021:                /* shar Rn */
1631         gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1632         tcg_gen_sari_i32(REG(B11_8), REG(B11_8), 1);
1633         return;
1634     case 0x4001:                /* shlr Rn */
1635         gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1636         tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 1);
1637         return;
1638     case 0x4008:                /* shll2 Rn */
1639         tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 2);
1640         return;
1641     case 0x4018:                /* shll8 Rn */
1642         tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 8);
1643         return;
1644     case 0x4028:                /* shll16 Rn */
1645         tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 16);
1646         return;
1647     case 0x4009:                /* shlr2 Rn */
1648         tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 2);
1649         return;
1650     case 0x4019:                /* shlr8 Rn */
1651         tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 8);
1652         return;
1653     case 0x4029:                /* shlr16 Rn */
1654         tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 16);
1655         return;
1656     case 0x401b:                /* tas.b @Rn */
1657         {
1658             TCGv addr, val;
1659             addr = tcg_temp_local_new();
1660             tcg_gen_mov_i32(addr, REG(B11_8));
1661             val = tcg_temp_local_new();
1662             tcg_gen_qemu_ld8u(val, addr, ctx->memidx);
1663             gen_cmp_imm(TCG_COND_EQ, val, 0);
1664             tcg_gen_ori_i32(val, val, 0x80);
1665             tcg_gen_qemu_st8(val, addr, ctx->memidx);
1666             tcg_temp_free(val);
1667             tcg_temp_free(addr);
1668         }
1669         return;
1670     case 0xf00d: /* fsts FPUL,FRn - FPSCR: Nothing */
1671         CHECK_FPU_ENABLED
1672         tcg_gen_mov_i32(cpu_fregs[FREG(B11_8)], cpu_fpul);
1673         return;
1674     case 0xf01d: /* flds FRm,FPUL - FPSCR: Nothing */
1675         CHECK_FPU_ENABLED
1676         tcg_gen_mov_i32(cpu_fpul, cpu_fregs[FREG(B11_8)]);
1677         return;
1678     case 0xf02d: /* float FPUL,FRn/DRn - FPSCR: R[PR,Enable.I]/W[Cause,Flag] */
1679         CHECK_FPU_ENABLED
1680         if (ctx->flags & FPSCR_PR) {
1681             TCGv_i64 fp;
1682             if (ctx->opcode & 0x0100)
1683                 break; /* illegal instruction */
1684             fp = tcg_temp_new_i64();
1685             gen_helper_float_DT(fp, cpu_env, cpu_fpul);
1686             gen_store_fpr64(fp, DREG(B11_8));
1687             tcg_temp_free_i64(fp);
1688         }
1689         else {
1690             gen_helper_float_FT(cpu_fregs[FREG(B11_8)], cpu_env, cpu_fpul);
1691         }
1692         return;
1693     case 0xf03d: /* ftrc FRm/DRm,FPUL - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1694         CHECK_FPU_ENABLED
1695         if (ctx->flags & FPSCR_PR) {
1696             TCGv_i64 fp;
1697             if (ctx->opcode & 0x0100)
1698                 break; /* illegal instruction */
1699             fp = tcg_temp_new_i64();
1700             gen_load_fpr64(fp, DREG(B11_8));
1701             gen_helper_ftrc_DT(cpu_fpul, cpu_env, fp);
1702             tcg_temp_free_i64(fp);
1703         }
1704         else {
1705             gen_helper_ftrc_FT(cpu_fpul, cpu_env, cpu_fregs[FREG(B11_8)]);
1706         }
1707         return;
1708     case 0xf04d: /* fneg FRn/DRn - FPSCR: Nothing */
1709         CHECK_FPU_ENABLED
1710         {
1711             gen_helper_fneg_T(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)]);
1712         }
1713         return;
1714     case 0xf05d: /* fabs FRn/DRn */
1715         CHECK_FPU_ENABLED
1716         if (ctx->flags & FPSCR_PR) {
1717             if (ctx->opcode & 0x0100)
1718                 break; /* illegal instruction */
1719             TCGv_i64 fp = tcg_temp_new_i64();
1720             gen_load_fpr64(fp, DREG(B11_8));
1721             gen_helper_fabs_DT(fp, fp);
1722             gen_store_fpr64(fp, DREG(B11_8));
1723             tcg_temp_free_i64(fp);
1724         } else {
1725             gen_helper_fabs_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)]);
1726         }
1727         return;
1728     case 0xf06d: /* fsqrt FRn */
1729         CHECK_FPU_ENABLED
1730         if (ctx->flags & FPSCR_PR) {
1731             if (ctx->opcode & 0x0100)
1732                 break; /* illegal instruction */
1733             TCGv_i64 fp = tcg_temp_new_i64();
1734             gen_load_fpr64(fp, DREG(B11_8));
1735             gen_helper_fsqrt_DT(fp, cpu_env, fp);
1736             gen_store_fpr64(fp, DREG(B11_8));
1737             tcg_temp_free_i64(fp);
1738         } else {
1739             gen_helper_fsqrt_FT(cpu_fregs[FREG(B11_8)], cpu_env,
1740                                 cpu_fregs[FREG(B11_8)]);
1741         }
1742         return;
1743     case 0xf07d: /* fsrra FRn */
1744         CHECK_FPU_ENABLED
1745         break;
1746     case 0xf08d: /* fldi0 FRn - FPSCR: R[PR] */
1747         CHECK_FPU_ENABLED
1748         if (!(ctx->flags & FPSCR_PR)) {
1749             tcg_gen_movi_i32(cpu_fregs[FREG(B11_8)], 0);
1750         }
1751         return;
1752     case 0xf09d: /* fldi1 FRn - FPSCR: R[PR] */
1753         CHECK_FPU_ENABLED
1754         if (!(ctx->flags & FPSCR_PR)) {
1755             tcg_gen_movi_i32(cpu_fregs[FREG(B11_8)], 0x3f800000);
1756         }
1757         return;
1758     case 0xf0ad: /* fcnvsd FPUL,DRn */
1759         CHECK_FPU_ENABLED
1760         {
1761             TCGv_i64 fp = tcg_temp_new_i64();
1762             gen_helper_fcnvsd_FT_DT(fp, cpu_env, cpu_fpul);
1763             gen_store_fpr64(fp, DREG(B11_8));
1764             tcg_temp_free_i64(fp);
1765         }
1766         return;
1767     case 0xf0bd: /* fcnvds DRn,FPUL */
1768         CHECK_FPU_ENABLED
1769         {
1770             TCGv_i64 fp = tcg_temp_new_i64();
1771             gen_load_fpr64(fp, DREG(B11_8));
1772             gen_helper_fcnvds_DT_FT(cpu_fpul, cpu_env, fp);
1773             tcg_temp_free_i64(fp);
1774         }
1775         return;
1776     case 0xf0ed: /* fipr FVm,FVn */
1777         CHECK_FPU_ENABLED
1778         if ((ctx->flags & FPSCR_PR) == 0) {
1779             TCGv m, n;
1780             m = tcg_const_i32((ctx->opcode >> 8) & 3);
1781             n = tcg_const_i32((ctx->opcode >> 10) & 3);
1782             gen_helper_fipr(cpu_env, m, n);
1783             tcg_temp_free(m);
1784             tcg_temp_free(n);
1785             return;
1786         }
1787         break;
1788     case 0xf0fd: /* ftrv XMTRX,FVn */
1789         CHECK_FPU_ENABLED
1790         if ((ctx->opcode & 0x0300) == 0x0100 &&
1791             (ctx->flags & FPSCR_PR) == 0) {
1792             TCGv n;
1793             n = tcg_const_i32((ctx->opcode >> 10) & 3);
1794             gen_helper_ftrv(cpu_env, n);
1795             tcg_temp_free(n);
1796             return;
1797         }
1798         break;
1799     }
1800 #if 0
1801     fprintf(stderr, "unknown instruction 0x%04x at pc 0x%08x\n",
1802             ctx->opcode, ctx->pc);
1803     fflush(stderr);
1804 #endif
1805     tcg_gen_movi_i32(cpu_pc, ctx->pc);
1806     if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) {
1807         gen_helper_raise_slot_illegal_instruction(cpu_env);
1808     } else {
1809         gen_helper_raise_illegal_instruction(cpu_env);
1810     }
1811     ctx->bstate = BS_BRANCH;
1812 }
1813
1814 static void decode_opc(DisasContext * ctx)
1815 {
1816     uint32_t old_flags = ctx->flags;
1817
1818     if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP | CPU_LOG_TB_OP_OPT))) {
1819         tcg_gen_debug_insn_start(ctx->pc);
1820     }
1821
1822     _decode_opc(ctx);
1823
1824     if (old_flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) {
1825         if (ctx->flags & DELAY_SLOT_CLEARME) {
1826             gen_store_flags(0);
1827         } else {
1828             /* go out of the delay slot */
1829             uint32_t new_flags = ctx->flags;
1830             new_flags &= ~(DELAY_SLOT | DELAY_SLOT_CONDITIONAL);
1831             gen_store_flags(new_flags);
1832         }
1833         ctx->flags = 0;
1834         ctx->bstate = BS_BRANCH;
1835         if (old_flags & DELAY_SLOT_CONDITIONAL) {
1836             gen_delayed_conditional_jump(ctx);
1837         } else if (old_flags & DELAY_SLOT) {
1838             gen_jump(ctx);
1839         }
1840
1841     }
1842
1843     /* go into a delay slot */
1844     if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL))
1845         gen_store_flags(ctx->flags);
1846 }
1847
1848 static inline void
1849 gen_intermediate_code_internal(SuperHCPU *cpu, TranslationBlock *tb,
1850                                bool search_pc)
1851 {
1852     CPUSH4State *env = &cpu->env;
1853     DisasContext ctx;
1854     target_ulong pc_start;
1855     static uint16_t *gen_opc_end;
1856     CPUBreakpoint *bp;
1857     int i, ii;
1858     int num_insns;
1859     int max_insns;
1860
1861     pc_start = tb->pc;
1862     gen_opc_end = tcg_ctx.gen_opc_buf + OPC_MAX_SIZE;
1863     ctx.pc = pc_start;
1864     ctx.flags = (uint32_t)tb->flags;
1865     ctx.bstate = BS_NONE;
1866     ctx.memidx = (ctx.flags & SR_MD) == 0 ? 1 : 0;
1867     /* We don't know if the delayed pc came from a dynamic or static branch,
1868        so assume it is a dynamic branch.  */
1869     ctx.delayed_pc = -1; /* use delayed pc from env pointer */
1870     ctx.tb = tb;
1871     ctx.singlestep_enabled = env->singlestep_enabled;
1872     ctx.features = env->features;
1873     ctx.has_movcal = (ctx.flags & TB_FLAG_PENDING_MOVCA);
1874
1875     ii = -1;
1876     num_insns = 0;
1877     max_insns = tb->cflags & CF_COUNT_MASK;
1878     if (max_insns == 0)
1879         max_insns = CF_COUNT_MASK;
1880     gen_tb_start();
1881     while (ctx.bstate == BS_NONE && tcg_ctx.gen_opc_ptr < gen_opc_end) {
1882         if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
1883             QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
1884                 if (ctx.pc == bp->pc) {
1885                     /* We have hit a breakpoint - make sure PC is up-to-date */
1886                     tcg_gen_movi_i32(cpu_pc, ctx.pc);
1887                     gen_helper_debug(cpu_env);
1888                     ctx.bstate = BS_BRANCH;
1889                     break;
1890                 }
1891             }
1892         }
1893         if (search_pc) {
1894             i = tcg_ctx.gen_opc_ptr - tcg_ctx.gen_opc_buf;
1895             if (ii < i) {
1896                 ii++;
1897                 while (ii < i)
1898                     tcg_ctx.gen_opc_instr_start[ii++] = 0;
1899             }
1900             tcg_ctx.gen_opc_pc[ii] = ctx.pc;
1901             gen_opc_hflags[ii] = ctx.flags;
1902             tcg_ctx.gen_opc_instr_start[ii] = 1;
1903             tcg_ctx.gen_opc_icount[ii] = num_insns;
1904         }
1905         if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
1906             gen_io_start();
1907 #if 0
1908         fprintf(stderr, "Loading opcode at address 0x%08x\n", ctx.pc);
1909         fflush(stderr);
1910 #endif
1911         ctx.opcode = cpu_lduw_code(env, ctx.pc);
1912         decode_opc(&ctx);
1913         num_insns++;
1914         ctx.pc += 2;
1915         if ((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0)
1916             break;
1917         if (env->singlestep_enabled)
1918             break;
1919         if (num_insns >= max_insns)
1920             break;
1921         if (singlestep)
1922             break;
1923     }
1924     if (tb->cflags & CF_LAST_IO)
1925         gen_io_end();
1926     if (env->singlestep_enabled) {
1927         tcg_gen_movi_i32(cpu_pc, ctx.pc);
1928         gen_helper_debug(cpu_env);
1929     } else {
1930         switch (ctx.bstate) {
1931         case BS_STOP:
1932             /* gen_op_interrupt_restart(); */
1933             /* fall through */
1934         case BS_NONE:
1935             if (ctx.flags) {
1936                 gen_store_flags(ctx.flags | DELAY_SLOT_CLEARME);
1937             }
1938             gen_goto_tb(&ctx, 0, ctx.pc);
1939             break;
1940         case BS_EXCP:
1941             /* gen_op_interrupt_restart(); */
1942             tcg_gen_exit_tb(0);
1943             break;
1944         case BS_BRANCH:
1945         default:
1946             break;
1947         }
1948     }
1949
1950     gen_tb_end(tb, num_insns);
1951     *tcg_ctx.gen_opc_ptr = INDEX_op_end;
1952     if (search_pc) {
1953         i = tcg_ctx.gen_opc_ptr - tcg_ctx.gen_opc_buf;
1954         ii++;
1955         while (ii <= i)
1956             tcg_ctx.gen_opc_instr_start[ii++] = 0;
1957     } else {
1958         tb->size = ctx.pc - pc_start;
1959         tb->icount = num_insns;
1960     }
1961
1962 #ifdef DEBUG_DISAS
1963     if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
1964         qemu_log("IN:\n");      /* , lookup_symbol(pc_start)); */
1965         log_target_disas(env, pc_start, ctx.pc - pc_start, 0);
1966         qemu_log("\n");
1967     }
1968 #endif
1969 }
1970
1971 void gen_intermediate_code(CPUSH4State * env, struct TranslationBlock *tb)
1972 {
1973     gen_intermediate_code_internal(sh_env_get_cpu(env), tb, false);
1974 }
1975
1976 void gen_intermediate_code_pc(CPUSH4State * env, struct TranslationBlock *tb)
1977 {
1978     gen_intermediate_code_internal(sh_env_get_cpu(env), tb, true);
1979 }
1980
1981 void restore_state_to_opc(CPUSH4State *env, TranslationBlock *tb, int pc_pos)
1982 {
1983     env->pc = tcg_ctx.gen_opc_pc[pc_pos];
1984     env->flags = gen_opc_hflags[pc_pos];
1985 }
This page took 0.136272 seconds and 4 git commands to generate.