7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
21 #include "qemu/osdep.h"
23 #include "exec/exec-all.h"
24 #include "disas/disas.h"
26 #include "qemu-common.h"
28 #include "qemu/bitops.h"
29 #include "exec/cpu_ldst.h"
31 #include "exec/helper-proto.h"
32 #include "exec/helper-gen.h"
34 #include "trace-tcg.h"
38 #define OPENRISC_DISAS
41 # define LOG_DIS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
43 # define LOG_DIS(...) do { } while (0)
46 typedef struct DisasContext {
48 target_ulong pc, ppc, npc;
49 uint32_t tb_flags, synced_flags, flags;
52 int singlestep_enabled;
53 uint32_t delayed_branch;
56 static TCGv_env cpu_env;
58 static TCGv cpu_R[32];
60 static TCGv jmp_pc; /* l.jr/l.jalr temp pc */
63 static TCGv_i32 env_btaken; /* bf/bnf , F flag taken */
64 static TCGv_i32 fpcsr;
65 static TCGv machi, maclo;
66 static TCGv fpmaddhi, fpmaddlo;
67 static TCGv_i32 env_flags;
68 #include "exec/gen-icount.h"
70 void openrisc_translate_init(void)
72 static const char * const regnames[] = {
73 "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
74 "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
75 "r16", "r17", "r18", "r19", "r20", "r21", "r22", "r23",
76 "r24", "r25", "r26", "r27", "r28", "r29", "r30", "r31",
80 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
81 cpu_sr = tcg_global_mem_new(cpu_env,
82 offsetof(CPUOpenRISCState, sr), "sr");
83 env_flags = tcg_global_mem_new_i32(cpu_env,
84 offsetof(CPUOpenRISCState, flags),
86 cpu_pc = tcg_global_mem_new(cpu_env,
87 offsetof(CPUOpenRISCState, pc), "pc");
88 cpu_npc = tcg_global_mem_new(cpu_env,
89 offsetof(CPUOpenRISCState, npc), "npc");
90 cpu_ppc = tcg_global_mem_new(cpu_env,
91 offsetof(CPUOpenRISCState, ppc), "ppc");
92 jmp_pc = tcg_global_mem_new(cpu_env,
93 offsetof(CPUOpenRISCState, jmp_pc), "jmp_pc");
94 env_btaken = tcg_global_mem_new_i32(cpu_env,
95 offsetof(CPUOpenRISCState, btaken),
97 fpcsr = tcg_global_mem_new_i32(cpu_env,
98 offsetof(CPUOpenRISCState, fpcsr),
100 machi = tcg_global_mem_new(cpu_env,
101 offsetof(CPUOpenRISCState, machi),
103 maclo = tcg_global_mem_new(cpu_env,
104 offsetof(CPUOpenRISCState, maclo),
106 fpmaddhi = tcg_global_mem_new(cpu_env,
107 offsetof(CPUOpenRISCState, fpmaddhi),
109 fpmaddlo = tcg_global_mem_new(cpu_env,
110 offsetof(CPUOpenRISCState, fpmaddlo),
112 for (i = 0; i < 32; i++) {
113 cpu_R[i] = tcg_global_mem_new(cpu_env,
114 offsetof(CPUOpenRISCState, gpr[i]),
119 /* Writeback SR_F translation space to execution space. */
120 static inline void wb_SR_F(void)
122 TCGLabel *label = gen_new_label();
123 tcg_gen_andi_tl(cpu_sr, cpu_sr, ~SR_F);
124 tcg_gen_brcondi_tl(TCG_COND_EQ, env_btaken, 0, label);
125 tcg_gen_ori_tl(cpu_sr, cpu_sr, SR_F);
126 gen_set_label(label);
129 static inline int zero_extend(unsigned int val, int width)
131 return val & ((1 << width) - 1);
134 static inline int sign_extend(unsigned int val, int width)
139 val <<= TARGET_LONG_BITS - width;
142 sval >>= TARGET_LONG_BITS - width;
146 static inline void gen_sync_flags(DisasContext *dc)
148 /* Sync the tb dependent flag between translate and runtime. */
149 if (dc->tb_flags != dc->synced_flags) {
150 tcg_gen_movi_tl(env_flags, dc->tb_flags);
151 dc->synced_flags = dc->tb_flags;
155 static void gen_exception(DisasContext *dc, unsigned int excp)
157 TCGv_i32 tmp = tcg_const_i32(excp);
158 gen_helper_exception(cpu_env, tmp);
159 tcg_temp_free_i32(tmp);
162 static void gen_illegal_exception(DisasContext *dc)
164 tcg_gen_movi_tl(cpu_pc, dc->pc);
165 gen_exception(dc, EXCP_ILLEGAL);
166 dc->is_jmp = DISAS_UPDATE;
169 /* not used yet, open it when we need or64. */
170 /*#ifdef TARGET_OPENRISC64
171 static void check_ob64s(DisasContext *dc)
173 if (!(dc->flags & CPUCFGR_OB64S)) {
174 gen_illegal_exception(dc);
178 static void check_of64s(DisasContext *dc)
180 if (!(dc->flags & CPUCFGR_OF64S)) {
181 gen_illegal_exception(dc);
185 static void check_ov64s(DisasContext *dc)
187 if (!(dc->flags & CPUCFGR_OV64S)) {
188 gen_illegal_exception(dc);
193 static void gen_goto_tb(DisasContext *dc, int n, target_ulong dest)
195 TranslationBlock *tb;
197 if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) &&
198 likely(!dc->singlestep_enabled)) {
199 tcg_gen_movi_tl(cpu_pc, dest);
201 tcg_gen_exit_tb((uintptr_t)tb + n);
203 tcg_gen_movi_tl(cpu_pc, dest);
204 if (dc->singlestep_enabled) {
205 gen_exception(dc, EXCP_DEBUG);
211 static void gen_jump(DisasContext *dc, uint32_t imm, uint32_t reg, uint32_t op0)
214 /* N26, 26bits imm */
215 tmp_pc = sign_extend((imm<<2), 26) + dc->pc;
219 tcg_gen_movi_tl(jmp_pc, tmp_pc);
221 case 0x01: /* l.jal */
222 tcg_gen_movi_tl(cpu_R[9], (dc->pc + 8));
223 tcg_gen_movi_tl(jmp_pc, tmp_pc);
225 case 0x03: /* l.bnf */
226 case 0x04: /* l.bf */
228 TCGLabel *lab = gen_new_label();
229 TCGv sr_f = tcg_temp_new();
230 tcg_gen_movi_tl(jmp_pc, dc->pc+8);
231 tcg_gen_andi_tl(sr_f, cpu_sr, SR_F);
232 tcg_gen_brcondi_i32(op0 == 0x03 ? TCG_COND_EQ : TCG_COND_NE,
234 tcg_gen_movi_tl(jmp_pc, tmp_pc);
239 case 0x11: /* l.jr */
240 tcg_gen_mov_tl(jmp_pc, cpu_R[reg]);
242 case 0x12: /* l.jalr */
243 tcg_gen_movi_tl(cpu_R[9], (dc->pc + 8));
244 tcg_gen_mov_tl(jmp_pc, cpu_R[reg]);
247 gen_illegal_exception(dc);
251 dc->delayed_branch = 2;
252 dc->tb_flags |= D_FLAG;
257 static void dec_calc(DisasContext *dc, uint32_t insn)
259 uint32_t op0, op1, op2;
261 op0 = extract32(insn, 0, 4);
262 op1 = extract32(insn, 8, 2);
263 op2 = extract32(insn, 6, 2);
264 ra = extract32(insn, 16, 5);
265 rb = extract32(insn, 11, 5);
266 rd = extract32(insn, 21, 5);
271 case 0x00: /* l.add */
272 LOG_DIS("l.add r%d, r%d, r%d\n", rd, ra, rb);
274 TCGLabel *lab = gen_new_label();
275 TCGv_i64 ta = tcg_temp_new_i64();
276 TCGv_i64 tb = tcg_temp_new_i64();
277 TCGv_i64 td = tcg_temp_local_new_i64();
278 TCGv_i32 res = tcg_temp_local_new_i32();
279 TCGv_i32 sr_ove = tcg_temp_local_new_i32();
280 tcg_gen_extu_i32_i64(ta, cpu_R[ra]);
281 tcg_gen_extu_i32_i64(tb, cpu_R[rb]);
282 tcg_gen_add_i64(td, ta, tb);
283 tcg_gen_extrl_i64_i32(res, td);
284 tcg_gen_shri_i64(td, td, 31);
285 tcg_gen_andi_i64(td, td, 0x3);
286 /* Jump to lab when no overflow. */
287 tcg_gen_brcondi_i64(TCG_COND_EQ, td, 0x0, lab);
288 tcg_gen_brcondi_i64(TCG_COND_EQ, td, 0x3, lab);
289 tcg_gen_ori_i32(cpu_sr, cpu_sr, (SR_OV | SR_CY));
290 tcg_gen_andi_i32(sr_ove, cpu_sr, SR_OVE);
291 tcg_gen_brcondi_i32(TCG_COND_NE, sr_ove, SR_OVE, lab);
292 gen_exception(dc, EXCP_RANGE);
294 tcg_gen_mov_i32(cpu_R[rd], res);
295 tcg_temp_free_i64(ta);
296 tcg_temp_free_i64(tb);
297 tcg_temp_free_i64(td);
298 tcg_temp_free_i32(res);
299 tcg_temp_free_i32(sr_ove);
303 gen_illegal_exception(dc);
308 case 0x0001: /* l.addc */
311 LOG_DIS("l.addc r%d, r%d, r%d\n", rd, ra, rb);
313 TCGLabel *lab = gen_new_label();
314 TCGv_i64 ta = tcg_temp_new_i64();
315 TCGv_i64 tb = tcg_temp_new_i64();
316 TCGv_i64 tcy = tcg_temp_local_new_i64();
317 TCGv_i64 td = tcg_temp_local_new_i64();
318 TCGv_i32 res = tcg_temp_local_new_i32();
319 TCGv_i32 sr_cy = tcg_temp_local_new_i32();
320 TCGv_i32 sr_ove = tcg_temp_local_new_i32();
321 tcg_gen_extu_i32_i64(ta, cpu_R[ra]);
322 tcg_gen_extu_i32_i64(tb, cpu_R[rb]);
323 tcg_gen_andi_i32(sr_cy, cpu_sr, SR_CY);
324 tcg_gen_extu_i32_i64(tcy, sr_cy);
325 tcg_gen_shri_i64(tcy, tcy, 10);
326 tcg_gen_add_i64(td, ta, tb);
327 tcg_gen_add_i64(td, td, tcy);
328 tcg_gen_extrl_i64_i32(res, td);
329 tcg_gen_shri_i64(td, td, 32);
330 tcg_gen_andi_i64(td, td, 0x3);
331 /* Jump to lab when no overflow. */
332 tcg_gen_brcondi_i64(TCG_COND_EQ, td, 0x0, lab);
333 tcg_gen_brcondi_i64(TCG_COND_EQ, td, 0x3, lab);
334 tcg_gen_ori_i32(cpu_sr, cpu_sr, (SR_OV | SR_CY));
335 tcg_gen_andi_i32(sr_ove, cpu_sr, SR_OVE);
336 tcg_gen_brcondi_i32(TCG_COND_NE, sr_ove, SR_OVE, lab);
337 gen_exception(dc, EXCP_RANGE);
339 tcg_gen_mov_i32(cpu_R[rd], res);
340 tcg_temp_free_i64(ta);
341 tcg_temp_free_i64(tb);
342 tcg_temp_free_i64(tcy);
343 tcg_temp_free_i64(td);
344 tcg_temp_free_i32(res);
345 tcg_temp_free_i32(sr_cy);
346 tcg_temp_free_i32(sr_ove);
350 gen_illegal_exception(dc);
355 case 0x0002: /* l.sub */
358 LOG_DIS("l.sub r%d, r%d, r%d\n", rd, ra, rb);
360 TCGLabel *lab = gen_new_label();
361 TCGv_i64 ta = tcg_temp_new_i64();
362 TCGv_i64 tb = tcg_temp_new_i64();
363 TCGv_i64 td = tcg_temp_local_new_i64();
364 TCGv_i32 res = tcg_temp_local_new_i32();
365 TCGv_i32 sr_ove = tcg_temp_local_new_i32();
367 tcg_gen_extu_i32_i64(ta, cpu_R[ra]);
368 tcg_gen_extu_i32_i64(tb, cpu_R[rb]);
369 tcg_gen_sub_i64(td, ta, tb);
370 tcg_gen_extrl_i64_i32(res, td);
371 tcg_gen_shri_i64(td, td, 31);
372 tcg_gen_andi_i64(td, td, 0x3);
373 /* Jump to lab when no overflow. */
374 tcg_gen_brcondi_i64(TCG_COND_EQ, td, 0x0, lab);
375 tcg_gen_brcondi_i64(TCG_COND_EQ, td, 0x3, lab);
376 tcg_gen_ori_i32(cpu_sr, cpu_sr, (SR_OV | SR_CY));
377 tcg_gen_andi_i32(sr_ove, cpu_sr, SR_OVE);
378 tcg_gen_brcondi_i32(TCG_COND_NE, sr_ove, SR_OVE, lab);
379 gen_exception(dc, EXCP_RANGE);
381 tcg_gen_mov_i32(cpu_R[rd], res);
382 tcg_temp_free_i64(ta);
383 tcg_temp_free_i64(tb);
384 tcg_temp_free_i64(td);
385 tcg_temp_free_i32(res);
386 tcg_temp_free_i32(sr_ove);
390 gen_illegal_exception(dc);
395 case 0x0003: /* l.and */
398 LOG_DIS("l.and r%d, r%d, r%d\n", rd, ra, rb);
399 tcg_gen_and_tl(cpu_R[rd], cpu_R[ra], cpu_R[rb]);
402 gen_illegal_exception(dc);
407 case 0x0004: /* l.or */
410 LOG_DIS("l.or r%d, r%d, r%d\n", rd, ra, rb);
411 tcg_gen_or_tl(cpu_R[rd], cpu_R[ra], cpu_R[rb]);
414 gen_illegal_exception(dc);
421 case 0x00: /* l.xor */
422 LOG_DIS("l.xor r%d, r%d, r%d\n", rd, ra, rb);
423 tcg_gen_xor_tl(cpu_R[rd], cpu_R[ra], cpu_R[rb]);
426 gen_illegal_exception(dc);
433 case 0x03: /* l.mul */
434 LOG_DIS("l.mul r%d, r%d, r%d\n", rd, ra, rb);
435 if (ra != 0 && rb != 0) {
436 gen_helper_mul32(cpu_R[rd], cpu_env, cpu_R[ra], cpu_R[rb]);
438 tcg_gen_movi_tl(cpu_R[rd], 0x0);
442 gen_illegal_exception(dc);
449 case 0x03: /* l.div */
450 LOG_DIS("l.div r%d, r%d, r%d\n", rd, ra, rb);
452 TCGLabel *lab0 = gen_new_label();
453 TCGLabel *lab1 = gen_new_label();
454 TCGLabel *lab2 = gen_new_label();
455 TCGLabel *lab3 = gen_new_label();
456 TCGv_i32 sr_ove = tcg_temp_local_new_i32();
458 tcg_gen_ori_tl(cpu_sr, cpu_sr, (SR_OV | SR_CY));
459 tcg_gen_andi_tl(sr_ove, cpu_sr, SR_OVE);
460 tcg_gen_brcondi_tl(TCG_COND_NE, sr_ove, SR_OVE, lab0);
461 gen_exception(dc, EXCP_RANGE);
464 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_R[rb],
466 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_R[ra],
468 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_R[rb],
471 tcg_gen_ori_tl(cpu_sr, cpu_sr, (SR_OV | SR_CY));
472 tcg_gen_andi_tl(sr_ove, cpu_sr, SR_OVE);
473 tcg_gen_brcondi_tl(TCG_COND_NE, sr_ove, SR_OVE, lab3);
474 gen_exception(dc, EXCP_RANGE);
476 tcg_gen_div_tl(cpu_R[rd], cpu_R[ra], cpu_R[rb]);
479 tcg_temp_free_i32(sr_ove);
484 gen_illegal_exception(dc);
491 case 0x03: /* l.divu */
492 LOG_DIS("l.divu r%d, r%d, r%d\n", rd, ra, rb);
494 TCGLabel *lab0 = gen_new_label();
495 TCGLabel *lab1 = gen_new_label();
496 TCGLabel *lab2 = gen_new_label();
497 TCGv_i32 sr_ove = tcg_temp_local_new_i32();
499 tcg_gen_ori_tl(cpu_sr, cpu_sr, (SR_OV | SR_CY));
500 tcg_gen_andi_tl(sr_ove, cpu_sr, SR_OVE);
501 tcg_gen_brcondi_tl(TCG_COND_NE, sr_ove, SR_OVE, lab0);
502 gen_exception(dc, EXCP_RANGE);
505 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_R[rb],
507 tcg_gen_ori_tl(cpu_sr, cpu_sr, (SR_OV | SR_CY));
508 tcg_gen_andi_tl(sr_ove, cpu_sr, SR_OVE);
509 tcg_gen_brcondi_tl(TCG_COND_NE, sr_ove, SR_OVE, lab2);
510 gen_exception(dc, EXCP_RANGE);
512 tcg_gen_divu_tl(cpu_R[rd], cpu_R[ra], cpu_R[rb]);
515 tcg_temp_free_i32(sr_ove);
520 gen_illegal_exception(dc);
527 case 0x03: /* l.mulu */
528 LOG_DIS("l.mulu r%d, r%d, r%d\n", rd, ra, rb);
529 if (rb != 0 && ra != 0) {
530 TCGv_i64 result = tcg_temp_local_new_i64();
531 TCGv_i64 tra = tcg_temp_local_new_i64();
532 TCGv_i64 trb = tcg_temp_local_new_i64();
533 TCGv_i64 high = tcg_temp_new_i64();
534 TCGv_i32 sr_ove = tcg_temp_local_new_i32();
535 TCGLabel *lab = gen_new_label();
536 /* Calculate each result. */
537 tcg_gen_extu_i32_i64(tra, cpu_R[ra]);
538 tcg_gen_extu_i32_i64(trb, cpu_R[rb]);
539 tcg_gen_mul_i64(result, tra, trb);
540 tcg_temp_free_i64(tra);
541 tcg_temp_free_i64(trb);
542 tcg_gen_shri_i64(high, result, TARGET_LONG_BITS);
543 /* Overflow or not. */
544 tcg_gen_brcondi_i64(TCG_COND_EQ, high, 0x00000000, lab);
545 tcg_gen_ori_tl(cpu_sr, cpu_sr, (SR_OV | SR_CY));
546 tcg_gen_andi_tl(sr_ove, cpu_sr, SR_OVE);
547 tcg_gen_brcondi_tl(TCG_COND_NE, sr_ove, SR_OVE, lab);
548 gen_exception(dc, EXCP_RANGE);
550 tcg_temp_free_i64(high);
551 tcg_gen_trunc_i64_tl(cpu_R[rd], result);
552 tcg_temp_free_i64(result);
553 tcg_temp_free_i32(sr_ove);
555 tcg_gen_movi_tl(cpu_R[rd], 0);
560 gen_illegal_exception(dc);
567 case 0x00: /* l.cmov */
568 LOG_DIS("l.cmov r%d, r%d, r%d\n", rd, ra, rb);
570 TCGLabel *lab = gen_new_label();
571 TCGv res = tcg_temp_local_new();
572 TCGv sr_f = tcg_temp_new();
573 tcg_gen_andi_tl(sr_f, cpu_sr, SR_F);
574 tcg_gen_mov_tl(res, cpu_R[rb]);
575 tcg_gen_brcondi_tl(TCG_COND_NE, sr_f, SR_F, lab);
576 tcg_gen_mov_tl(res, cpu_R[ra]);
578 tcg_gen_mov_tl(cpu_R[rd], res);
585 gen_illegal_exception(dc);
592 case 0x00: /* l.ff1 */
593 LOG_DIS("l.ff1 r%d, r%d, r%d\n", rd, ra, rb);
594 gen_helper_ff1(cpu_R[rd], cpu_R[ra]);
596 case 0x01: /* l.fl1 */
597 LOG_DIS("l.fl1 r%d, r%d, r%d\n", rd, ra, rb);
598 gen_helper_fl1(cpu_R[rd], cpu_R[ra]);
602 gen_illegal_exception(dc);
611 case 0x00: /* l.sll */
612 LOG_DIS("l.sll r%d, r%d, r%d\n", rd, ra, rb);
613 tcg_gen_shl_tl(cpu_R[rd], cpu_R[ra], cpu_R[rb]);
615 case 0x01: /* l.srl */
616 LOG_DIS("l.srl r%d, r%d, r%d\n", rd, ra, rb);
617 tcg_gen_shr_tl(cpu_R[rd], cpu_R[ra], cpu_R[rb]);
619 case 0x02: /* l.sra */
620 LOG_DIS("l.sra r%d, r%d, r%d\n", rd, ra, rb);
621 tcg_gen_sar_tl(cpu_R[rd], cpu_R[ra], cpu_R[rb]);
623 case 0x03: /* l.ror */
624 LOG_DIS("l.ror r%d, r%d, r%d\n", rd, ra, rb);
625 tcg_gen_rotr_tl(cpu_R[rd], cpu_R[ra], cpu_R[rb]);
629 gen_illegal_exception(dc);
635 gen_illegal_exception(dc);
644 case 0x00: /* l.exths */
645 LOG_DIS("l.exths r%d, r%d\n", rd, ra);
646 tcg_gen_ext16s_tl(cpu_R[rd], cpu_R[ra]);
648 case 0x01: /* l.extbs */
649 LOG_DIS("l.extbs r%d, r%d\n", rd, ra);
650 tcg_gen_ext8s_tl(cpu_R[rd], cpu_R[ra]);
652 case 0x02: /* l.exthz */
653 LOG_DIS("l.exthz r%d, r%d\n", rd, ra);
654 tcg_gen_ext16u_tl(cpu_R[rd], cpu_R[ra]);
656 case 0x03: /* l.extbz */
657 LOG_DIS("l.extbz r%d, r%d\n", rd, ra);
658 tcg_gen_ext8u_tl(cpu_R[rd], cpu_R[ra]);
662 gen_illegal_exception(dc);
668 gen_illegal_exception(dc);
677 case 0x00: /* l.extws */
678 LOG_DIS("l.extws r%d, r%d\n", rd, ra);
679 tcg_gen_ext32s_tl(cpu_R[rd], cpu_R[ra]);
681 case 0x01: /* l.extwz */
682 LOG_DIS("l.extwz r%d, r%d\n", rd, ra);
683 tcg_gen_ext32u_tl(cpu_R[rd], cpu_R[ra]);
687 gen_illegal_exception(dc);
693 gen_illegal_exception(dc);
699 gen_illegal_exception(dc);
704 static void dec_misc(DisasContext *dc, uint32_t insn)
708 #ifdef OPENRISC_DISAS
711 uint32_t I16, I5, I11, N26, tmp;
714 op0 = extract32(insn, 26, 6);
715 op1 = extract32(insn, 24, 2);
716 ra = extract32(insn, 16, 5);
717 rb = extract32(insn, 11, 5);
718 rd = extract32(insn, 21, 5);
719 #ifdef OPENRISC_DISAS
720 L6 = extract32(insn, 5, 6);
721 K5 = extract32(insn, 0, 5);
723 I16 = extract32(insn, 0, 16);
724 I5 = extract32(insn, 21, 5);
725 I11 = extract32(insn, 0, 11);
726 N26 = extract32(insn, 0, 26);
727 tmp = (I5<<11) + I11;
731 LOG_DIS("l.j %d\n", N26);
732 gen_jump(dc, N26, 0, op0);
735 case 0x01: /* l.jal */
736 LOG_DIS("l.jal %d\n", N26);
737 gen_jump(dc, N26, 0, op0);
740 case 0x03: /* l.bnf */
741 LOG_DIS("l.bnf %d\n", N26);
742 gen_jump(dc, N26, 0, op0);
745 case 0x04: /* l.bf */
746 LOG_DIS("l.bf %d\n", N26);
747 gen_jump(dc, N26, 0, op0);
752 case 0x01: /* l.nop */
753 LOG_DIS("l.nop %d\n", I16);
757 gen_illegal_exception(dc);
762 case 0x11: /* l.jr */
763 LOG_DIS("l.jr r%d\n", rb);
764 gen_jump(dc, 0, rb, op0);
767 case 0x12: /* l.jalr */
768 LOG_DIS("l.jalr r%d\n", rb);
769 gen_jump(dc, 0, rb, op0);
772 case 0x13: /* l.maci */
773 LOG_DIS("l.maci %d, r%d, %d\n", I5, ra, I11);
775 TCGv_i64 t1 = tcg_temp_new_i64();
776 TCGv_i64 t2 = tcg_temp_new_i64();
777 TCGv_i32 dst = tcg_temp_new_i32();
778 TCGv ttmp = tcg_const_tl(tmp);
779 tcg_gen_mul_tl(dst, cpu_R[ra], ttmp);
780 tcg_gen_ext_i32_i64(t1, dst);
781 tcg_gen_concat_i32_i64(t2, maclo, machi);
782 tcg_gen_add_i64(t2, t2, t1);
783 tcg_gen_extrl_i64_i32(maclo, t2);
784 tcg_gen_shri_i64(t2, t2, 32);
785 tcg_gen_extrl_i64_i32(machi, t2);
786 tcg_temp_free_i32(dst);
788 tcg_temp_free_i64(t1);
789 tcg_temp_free_i64(t2);
793 case 0x09: /* l.rfe */
796 #if defined(CONFIG_USER_ONLY)
799 if (dc->mem_idx == MMU_USER_IDX) {
800 gen_illegal_exception(dc);
803 gen_helper_rfe(cpu_env);
804 dc->is_jmp = DISAS_UPDATE;
809 case 0x1c: /* l.cust1 */
810 LOG_DIS("l.cust1\n");
813 case 0x1d: /* l.cust2 */
814 LOG_DIS("l.cust2\n");
817 case 0x1e: /* l.cust3 */
818 LOG_DIS("l.cust3\n");
821 case 0x1f: /* l.cust4 */
822 LOG_DIS("l.cust4\n");
825 case 0x3c: /* l.cust5 */
826 LOG_DIS("l.cust5 r%d, r%d, r%d, %d, %d\n", rd, ra, rb, L6, K5);
829 case 0x3d: /* l.cust6 */
830 LOG_DIS("l.cust6\n");
833 case 0x3e: /* l.cust7 */
834 LOG_DIS("l.cust7\n");
837 case 0x3f: /* l.cust8 */
838 LOG_DIS("l.cust8\n");
841 /* not used yet, open it when we need or64. */
842 /*#ifdef TARGET_OPENRISC64
844 LOG_DIS("l.ld r%d, r%d, %d\n", rd, ra, I16);
850 case 0x21: /* l.lwz */
851 LOG_DIS("l.lwz r%d, r%d, %d\n", rd, ra, I16);
855 case 0x22: /* l.lws */
856 LOG_DIS("l.lws r%d, r%d, %d\n", rd, ra, I16);
860 case 0x23: /* l.lbz */
861 LOG_DIS("l.lbz r%d, r%d, %d\n", rd, ra, I16);
865 case 0x24: /* l.lbs */
866 LOG_DIS("l.lbs r%d, r%d, %d\n", rd, ra, I16);
870 case 0x25: /* l.lhz */
871 LOG_DIS("l.lhz r%d, r%d, %d\n", rd, ra, I16);
875 case 0x26: /* l.lhs */
876 LOG_DIS("l.lhs r%d, r%d, %d\n", rd, ra, I16);
882 TCGv t0 = tcg_temp_new();
883 tcg_gen_addi_tl(t0, cpu_R[ra], sign_extend(I16, 16));
884 tcg_gen_qemu_ld_tl(cpu_R[rd], t0, dc->mem_idx, mop);
889 case 0x27: /* l.addi */
890 LOG_DIS("l.addi r%d, r%d, %d\n", rd, ra, I16);
893 tcg_gen_mov_tl(cpu_R[rd], cpu_R[ra]);
895 TCGLabel *lab = gen_new_label();
896 TCGv_i64 ta = tcg_temp_new_i64();
897 TCGv_i64 td = tcg_temp_local_new_i64();
898 TCGv_i32 res = tcg_temp_local_new_i32();
899 TCGv_i32 sr_ove = tcg_temp_local_new_i32();
900 tcg_gen_extu_i32_i64(ta, cpu_R[ra]);
901 tcg_gen_addi_i64(td, ta, sign_extend(I16, 16));
902 tcg_gen_extrl_i64_i32(res, td);
903 tcg_gen_shri_i64(td, td, 32);
904 tcg_gen_andi_i64(td, td, 0x3);
905 /* Jump to lab when no overflow. */
906 tcg_gen_brcondi_i64(TCG_COND_EQ, td, 0x0, lab);
907 tcg_gen_brcondi_i64(TCG_COND_EQ, td, 0x3, lab);
908 tcg_gen_ori_i32(cpu_sr, cpu_sr, (SR_OV | SR_CY));
909 tcg_gen_andi_i32(sr_ove, cpu_sr, SR_OVE);
910 tcg_gen_brcondi_i32(TCG_COND_NE, sr_ove, SR_OVE, lab);
911 gen_exception(dc, EXCP_RANGE);
913 tcg_gen_mov_i32(cpu_R[rd], res);
914 tcg_temp_free_i64(ta);
915 tcg_temp_free_i64(td);
916 tcg_temp_free_i32(res);
917 tcg_temp_free_i32(sr_ove);
922 case 0x28: /* l.addic */
923 LOG_DIS("l.addic r%d, r%d, %d\n", rd, ra, I16);
925 TCGLabel *lab = gen_new_label();
926 TCGv_i64 ta = tcg_temp_new_i64();
927 TCGv_i64 td = tcg_temp_local_new_i64();
928 TCGv_i64 tcy = tcg_temp_local_new_i64();
929 TCGv_i32 res = tcg_temp_local_new_i32();
930 TCGv_i32 sr_cy = tcg_temp_local_new_i32();
931 TCGv_i32 sr_ove = tcg_temp_local_new_i32();
932 tcg_gen_extu_i32_i64(ta, cpu_R[ra]);
933 tcg_gen_andi_i32(sr_cy, cpu_sr, SR_CY);
934 tcg_gen_shri_i32(sr_cy, sr_cy, 10);
935 tcg_gen_extu_i32_i64(tcy, sr_cy);
936 tcg_gen_addi_i64(td, ta, sign_extend(I16, 16));
937 tcg_gen_add_i64(td, td, tcy);
938 tcg_gen_extrl_i64_i32(res, td);
939 tcg_gen_shri_i64(td, td, 32);
940 tcg_gen_andi_i64(td, td, 0x3);
941 /* Jump to lab when no overflow. */
942 tcg_gen_brcondi_i64(TCG_COND_EQ, td, 0x0, lab);
943 tcg_gen_brcondi_i64(TCG_COND_EQ, td, 0x3, lab);
944 tcg_gen_ori_i32(cpu_sr, cpu_sr, (SR_OV | SR_CY));
945 tcg_gen_andi_i32(sr_ove, cpu_sr, SR_OVE);
946 tcg_gen_brcondi_i32(TCG_COND_NE, sr_ove, SR_OVE, lab);
947 gen_exception(dc, EXCP_RANGE);
949 tcg_gen_mov_i32(cpu_R[rd], res);
950 tcg_temp_free_i64(ta);
951 tcg_temp_free_i64(td);
952 tcg_temp_free_i64(tcy);
953 tcg_temp_free_i32(res);
954 tcg_temp_free_i32(sr_cy);
955 tcg_temp_free_i32(sr_ove);
959 case 0x29: /* l.andi */
960 LOG_DIS("l.andi r%d, r%d, %d\n", rd, ra, I16);
961 tcg_gen_andi_tl(cpu_R[rd], cpu_R[ra], zero_extend(I16, 16));
964 case 0x2a: /* l.ori */
965 LOG_DIS("l.ori r%d, r%d, %d\n", rd, ra, I16);
966 tcg_gen_ori_tl(cpu_R[rd], cpu_R[ra], zero_extend(I16, 16));
969 case 0x2b: /* l.xori */
970 LOG_DIS("l.xori r%d, r%d, %d\n", rd, ra, I16);
971 tcg_gen_xori_tl(cpu_R[rd], cpu_R[ra], sign_extend(I16, 16));
974 case 0x2c: /* l.muli */
975 LOG_DIS("l.muli r%d, r%d, %d\n", rd, ra, I16);
976 if (ra != 0 && I16 != 0) {
977 TCGv_i32 im = tcg_const_i32(I16);
978 gen_helper_mul32(cpu_R[rd], cpu_env, cpu_R[ra], im);
979 tcg_temp_free_i32(im);
981 tcg_gen_movi_tl(cpu_R[rd], 0x0);
985 case 0x2d: /* l.mfspr */
986 LOG_DIS("l.mfspr r%d, r%d, %d\n", rd, ra, I16);
988 #if defined(CONFIG_USER_ONLY)
991 TCGv_i32 ti = tcg_const_i32(I16);
992 if (dc->mem_idx == MMU_USER_IDX) {
993 gen_illegal_exception(dc);
996 gen_helper_mfspr(cpu_R[rd], cpu_env, cpu_R[rd], cpu_R[ra], ti);
997 tcg_temp_free_i32(ti);
1002 case 0x30: /* l.mtspr */
1003 LOG_DIS("l.mtspr %d, r%d, r%d, %d\n", I5, ra, rb, I11);
1005 #if defined(CONFIG_USER_ONLY)
1008 TCGv_i32 im = tcg_const_i32(tmp);
1009 if (dc->mem_idx == MMU_USER_IDX) {
1010 gen_illegal_exception(dc);
1013 gen_helper_mtspr(cpu_env, cpu_R[ra], cpu_R[rb], im);
1014 tcg_temp_free_i32(im);
1019 /* not used yet, open it when we need or64. */
1020 /*#ifdef TARGET_OPENRISC64
1022 LOG_DIS("l.sd %d, r%d, r%d, %d\n", I5, ra, rb, I11);
1028 case 0x35: /* l.sw */
1029 LOG_DIS("l.sw %d, r%d, r%d, %d\n", I5, ra, rb, I11);
1033 case 0x36: /* l.sb */
1034 LOG_DIS("l.sb %d, r%d, r%d, %d\n", I5, ra, rb, I11);
1038 case 0x37: /* l.sh */
1039 LOG_DIS("l.sh %d, r%d, r%d, %d\n", I5, ra, rb, I11);
1045 TCGv t0 = tcg_temp_new();
1046 tcg_gen_addi_tl(t0, cpu_R[ra], sign_extend(tmp, 16));
1047 tcg_gen_qemu_st_tl(cpu_R[rb], t0, dc->mem_idx, mop);
1053 gen_illegal_exception(dc);
1058 static void dec_mac(DisasContext *dc, uint32_t insn)
1062 op0 = extract32(insn, 0, 4);
1063 ra = extract32(insn, 16, 5);
1064 rb = extract32(insn, 11, 5);
1067 case 0x0001: /* l.mac */
1068 LOG_DIS("l.mac r%d, r%d\n", ra, rb);
1070 TCGv_i32 t0 = tcg_temp_new_i32();
1071 TCGv_i64 t1 = tcg_temp_new_i64();
1072 TCGv_i64 t2 = tcg_temp_new_i64();
1073 tcg_gen_mul_tl(t0, cpu_R[ra], cpu_R[rb]);
1074 tcg_gen_ext_i32_i64(t1, t0);
1075 tcg_gen_concat_i32_i64(t2, maclo, machi);
1076 tcg_gen_add_i64(t2, t2, t1);
1077 tcg_gen_extrl_i64_i32(maclo, t2);
1078 tcg_gen_shri_i64(t2, t2, 32);
1079 tcg_gen_extrl_i64_i32(machi, t2);
1080 tcg_temp_free_i32(t0);
1081 tcg_temp_free_i64(t1);
1082 tcg_temp_free_i64(t2);
1086 case 0x0002: /* l.msb */
1087 LOG_DIS("l.msb r%d, r%d\n", ra, rb);
1089 TCGv_i32 t0 = tcg_temp_new_i32();
1090 TCGv_i64 t1 = tcg_temp_new_i64();
1091 TCGv_i64 t2 = tcg_temp_new_i64();
1092 tcg_gen_mul_tl(t0, cpu_R[ra], cpu_R[rb]);
1093 tcg_gen_ext_i32_i64(t1, t0);
1094 tcg_gen_concat_i32_i64(t2, maclo, machi);
1095 tcg_gen_sub_i64(t2, t2, t1);
1096 tcg_gen_extrl_i64_i32(maclo, t2);
1097 tcg_gen_shri_i64(t2, t2, 32);
1098 tcg_gen_extrl_i64_i32(machi, t2);
1099 tcg_temp_free_i32(t0);
1100 tcg_temp_free_i64(t1);
1101 tcg_temp_free_i64(t2);
1106 gen_illegal_exception(dc);
1111 static void dec_logic(DisasContext *dc, uint32_t insn)
1114 uint32_t rd, ra, L6;
1115 op0 = extract32(insn, 6, 2);
1116 rd = extract32(insn, 21, 5);
1117 ra = extract32(insn, 16, 5);
1118 L6 = extract32(insn, 0, 6);
1121 case 0x00: /* l.slli */
1122 LOG_DIS("l.slli r%d, r%d, %d\n", rd, ra, L6);
1123 tcg_gen_shli_tl(cpu_R[rd], cpu_R[ra], (L6 & 0x1f));
1126 case 0x01: /* l.srli */
1127 LOG_DIS("l.srli r%d, r%d, %d\n", rd, ra, L6);
1128 tcg_gen_shri_tl(cpu_R[rd], cpu_R[ra], (L6 & 0x1f));
1131 case 0x02: /* l.srai */
1132 LOG_DIS("l.srai r%d, r%d, %d\n", rd, ra, L6);
1133 tcg_gen_sari_tl(cpu_R[rd], cpu_R[ra], (L6 & 0x1f)); break;
1135 case 0x03: /* l.rori */
1136 LOG_DIS("l.rori r%d, r%d, %d\n", rd, ra, L6);
1137 tcg_gen_rotri_tl(cpu_R[rd], cpu_R[ra], (L6 & 0x1f));
1141 gen_illegal_exception(dc);
1146 static void dec_M(DisasContext *dc, uint32_t insn)
1151 op0 = extract32(insn, 16, 1);
1152 rd = extract32(insn, 21, 5);
1153 K16 = extract32(insn, 0, 16);
1156 case 0x0: /* l.movhi */
1157 LOG_DIS("l.movhi r%d, %d\n", rd, K16);
1158 tcg_gen_movi_tl(cpu_R[rd], (K16 << 16));
1161 case 0x1: /* l.macrc */
1162 LOG_DIS("l.macrc r%d\n", rd);
1163 tcg_gen_mov_tl(cpu_R[rd], maclo);
1164 tcg_gen_movi_tl(maclo, 0x0);
1165 tcg_gen_movi_tl(machi, 0x0);
1169 gen_illegal_exception(dc);
1174 static void dec_comp(DisasContext *dc, uint32_t insn)
1179 op0 = extract32(insn, 21, 5);
1180 ra = extract32(insn, 16, 5);
1181 rb = extract32(insn, 11, 5);
1183 tcg_gen_movi_i32(env_btaken, 0x0);
1184 /* unsigned integers */
1185 tcg_gen_ext32u_tl(cpu_R[ra], cpu_R[ra]);
1186 tcg_gen_ext32u_tl(cpu_R[rb], cpu_R[rb]);
1189 case 0x0: /* l.sfeq */
1190 LOG_DIS("l.sfeq r%d, r%d\n", ra, rb);
1191 tcg_gen_setcond_tl(TCG_COND_EQ, env_btaken, cpu_R[ra], cpu_R[rb]);
1194 case 0x1: /* l.sfne */
1195 LOG_DIS("l.sfne r%d, r%d\n", ra, rb);
1196 tcg_gen_setcond_tl(TCG_COND_NE, env_btaken, cpu_R[ra], cpu_R[rb]);
1199 case 0x2: /* l.sfgtu */
1200 LOG_DIS("l.sfgtu r%d, r%d\n", ra, rb);
1201 tcg_gen_setcond_tl(TCG_COND_GTU, env_btaken, cpu_R[ra], cpu_R[rb]);
1204 case 0x3: /* l.sfgeu */
1205 LOG_DIS("l.sfgeu r%d, r%d\n", ra, rb);
1206 tcg_gen_setcond_tl(TCG_COND_GEU, env_btaken, cpu_R[ra], cpu_R[rb]);
1209 case 0x4: /* l.sfltu */
1210 LOG_DIS("l.sfltu r%d, r%d\n", ra, rb);
1211 tcg_gen_setcond_tl(TCG_COND_LTU, env_btaken, cpu_R[ra], cpu_R[rb]);
1214 case 0x5: /* l.sfleu */
1215 LOG_DIS("l.sfleu r%d, r%d\n", ra, rb);
1216 tcg_gen_setcond_tl(TCG_COND_LEU, env_btaken, cpu_R[ra], cpu_R[rb]);
1219 case 0xa: /* l.sfgts */
1220 LOG_DIS("l.sfgts r%d, r%d\n", ra, rb);
1221 tcg_gen_setcond_tl(TCG_COND_GT, env_btaken, cpu_R[ra], cpu_R[rb]);
1224 case 0xb: /* l.sfges */
1225 LOG_DIS("l.sfges r%d, r%d\n", ra, rb);
1226 tcg_gen_setcond_tl(TCG_COND_GE, env_btaken, cpu_R[ra], cpu_R[rb]);
1229 case 0xc: /* l.sflts */
1230 LOG_DIS("l.sflts r%d, r%d\n", ra, rb);
1231 tcg_gen_setcond_tl(TCG_COND_LT, env_btaken, cpu_R[ra], cpu_R[rb]);
1234 case 0xd: /* l.sfles */
1235 LOG_DIS("l.sfles r%d, r%d\n", ra, rb);
1236 tcg_gen_setcond_tl(TCG_COND_LE, env_btaken, cpu_R[ra], cpu_R[rb]);
1240 gen_illegal_exception(dc);
1246 static void dec_compi(DisasContext *dc, uint32_t insn)
1251 op0 = extract32(insn, 21, 5);
1252 ra = extract32(insn, 16, 5);
1253 I16 = extract32(insn, 0, 16);
1255 tcg_gen_movi_i32(env_btaken, 0x0);
1256 I16 = sign_extend(I16, 16);
1259 case 0x0: /* l.sfeqi */
1260 LOG_DIS("l.sfeqi r%d, %d\n", ra, I16);
1261 tcg_gen_setcondi_tl(TCG_COND_EQ, env_btaken, cpu_R[ra], I16);
1264 case 0x1: /* l.sfnei */
1265 LOG_DIS("l.sfnei r%d, %d\n", ra, I16);
1266 tcg_gen_setcondi_tl(TCG_COND_NE, env_btaken, cpu_R[ra], I16);
1269 case 0x2: /* l.sfgtui */
1270 LOG_DIS("l.sfgtui r%d, %d\n", ra, I16);
1271 tcg_gen_setcondi_tl(TCG_COND_GTU, env_btaken, cpu_R[ra], I16);
1274 case 0x3: /* l.sfgeui */
1275 LOG_DIS("l.sfgeui r%d, %d\n", ra, I16);
1276 tcg_gen_setcondi_tl(TCG_COND_GEU, env_btaken, cpu_R[ra], I16);
1279 case 0x4: /* l.sfltui */
1280 LOG_DIS("l.sfltui r%d, %d\n", ra, I16);
1281 tcg_gen_setcondi_tl(TCG_COND_LTU, env_btaken, cpu_R[ra], I16);
1284 case 0x5: /* l.sfleui */
1285 LOG_DIS("l.sfleui r%d, %d\n", ra, I16);
1286 tcg_gen_setcondi_tl(TCG_COND_LEU, env_btaken, cpu_R[ra], I16);
1289 case 0xa: /* l.sfgtsi */
1290 LOG_DIS("l.sfgtsi r%d, %d\n", ra, I16);
1291 tcg_gen_setcondi_tl(TCG_COND_GT, env_btaken, cpu_R[ra], I16);
1294 case 0xb: /* l.sfgesi */
1295 LOG_DIS("l.sfgesi r%d, %d\n", ra, I16);
1296 tcg_gen_setcondi_tl(TCG_COND_GE, env_btaken, cpu_R[ra], I16);
1299 case 0xc: /* l.sfltsi */
1300 LOG_DIS("l.sfltsi r%d, %d\n", ra, I16);
1301 tcg_gen_setcondi_tl(TCG_COND_LT, env_btaken, cpu_R[ra], I16);
1304 case 0xd: /* l.sflesi */
1305 LOG_DIS("l.sflesi r%d, %d\n", ra, I16);
1306 tcg_gen_setcondi_tl(TCG_COND_LE, env_btaken, cpu_R[ra], I16);
1310 gen_illegal_exception(dc);
1316 static void dec_sys(DisasContext *dc, uint32_t insn)
1319 #ifdef OPENRISC_DISAS
1322 op0 = extract32(insn, 16, 10);
1323 #ifdef OPENRISC_DISAS
1324 K16 = extract32(insn, 0, 16);
1328 case 0x000: /* l.sys */
1329 LOG_DIS("l.sys %d\n", K16);
1330 tcg_gen_movi_tl(cpu_pc, dc->pc);
1331 gen_exception(dc, EXCP_SYSCALL);
1332 dc->is_jmp = DISAS_UPDATE;
1335 case 0x100: /* l.trap */
1336 LOG_DIS("l.trap %d\n", K16);
1337 #if defined(CONFIG_USER_ONLY)
1340 if (dc->mem_idx == MMU_USER_IDX) {
1341 gen_illegal_exception(dc);
1344 tcg_gen_movi_tl(cpu_pc, dc->pc);
1345 gen_exception(dc, EXCP_TRAP);
1349 case 0x300: /* l.csync */
1350 LOG_DIS("l.csync\n");
1351 #if defined(CONFIG_USER_ONLY)
1354 if (dc->mem_idx == MMU_USER_IDX) {
1355 gen_illegal_exception(dc);
1361 case 0x200: /* l.msync */
1362 LOG_DIS("l.msync\n");
1363 #if defined(CONFIG_USER_ONLY)
1366 if (dc->mem_idx == MMU_USER_IDX) {
1367 gen_illegal_exception(dc);
1373 case 0x270: /* l.psync */
1374 LOG_DIS("l.psync\n");
1375 #if defined(CONFIG_USER_ONLY)
1378 if (dc->mem_idx == MMU_USER_IDX) {
1379 gen_illegal_exception(dc);
1386 gen_illegal_exception(dc);
1391 static void dec_float(DisasContext *dc, uint32_t insn)
1394 uint32_t ra, rb, rd;
1395 op0 = extract32(insn, 0, 8);
1396 ra = extract32(insn, 16, 5);
1397 rb = extract32(insn, 11, 5);
1398 rd = extract32(insn, 21, 5);
1401 case 0x00: /* lf.add.s */
1402 LOG_DIS("lf.add.s r%d, r%d, r%d\n", rd, ra, rb);
1403 gen_helper_float_add_s(cpu_R[rd], cpu_env, cpu_R[ra], cpu_R[rb]);
1406 case 0x01: /* lf.sub.s */
1407 LOG_DIS("lf.sub.s r%d, r%d, r%d\n", rd, ra, rb);
1408 gen_helper_float_sub_s(cpu_R[rd], cpu_env, cpu_R[ra], cpu_R[rb]);
1412 case 0x02: /* lf.mul.s */
1413 LOG_DIS("lf.mul.s r%d, r%d, r%d\n", rd, ra, rb);
1414 if (ra != 0 && rb != 0) {
1415 gen_helper_float_mul_s(cpu_R[rd], cpu_env, cpu_R[ra], cpu_R[rb]);
1417 tcg_gen_ori_tl(fpcsr, fpcsr, FPCSR_ZF);
1418 tcg_gen_movi_i32(cpu_R[rd], 0x0);
1422 case 0x03: /* lf.div.s */
1423 LOG_DIS("lf.div.s r%d, r%d, r%d\n", rd, ra, rb);
1424 gen_helper_float_div_s(cpu_R[rd], cpu_env, cpu_R[ra], cpu_R[rb]);
1427 case 0x04: /* lf.itof.s */
1428 LOG_DIS("lf.itof r%d, r%d\n", rd, ra);
1429 gen_helper_itofs(cpu_R[rd], cpu_env, cpu_R[ra]);
1432 case 0x05: /* lf.ftoi.s */
1433 LOG_DIS("lf.ftoi r%d, r%d\n", rd, ra);
1434 gen_helper_ftois(cpu_R[rd], cpu_env, cpu_R[ra]);
1437 case 0x06: /* lf.rem.s */
1438 LOG_DIS("lf.rem.s r%d, r%d, r%d\n", rd, ra, rb);
1439 gen_helper_float_rem_s(cpu_R[rd], cpu_env, cpu_R[ra], cpu_R[rb]);
1442 case 0x07: /* lf.madd.s */
1443 LOG_DIS("lf.madd.s r%d, r%d, r%d\n", rd, ra, rb);
1444 gen_helper_float_muladd_s(cpu_R[rd], cpu_env, cpu_R[ra], cpu_R[rb]);
1447 case 0x08: /* lf.sfeq.s */
1448 LOG_DIS("lf.sfeq.s r%d, r%d\n", ra, rb);
1449 gen_helper_float_eq_s(env_btaken, cpu_env, cpu_R[ra], cpu_R[rb]);
1452 case 0x09: /* lf.sfne.s */
1453 LOG_DIS("lf.sfne.s r%d, r%d\n", ra, rb);
1454 gen_helper_float_ne_s(env_btaken, cpu_env, cpu_R[ra], cpu_R[rb]);
1457 case 0x0a: /* lf.sfgt.s */
1458 LOG_DIS("lf.sfgt.s r%d, r%d\n", ra, rb);
1459 gen_helper_float_gt_s(env_btaken, cpu_env, cpu_R[ra], cpu_R[rb]);
1462 case 0x0b: /* lf.sfge.s */
1463 LOG_DIS("lf.sfge.s r%d, r%d\n", ra, rb);
1464 gen_helper_float_ge_s(env_btaken, cpu_env, cpu_R[ra], cpu_R[rb]);
1467 case 0x0c: /* lf.sflt.s */
1468 LOG_DIS("lf.sflt.s r%d, r%d\n", ra, rb);
1469 gen_helper_float_lt_s(env_btaken, cpu_env, cpu_R[ra], cpu_R[rb]);
1472 case 0x0d: /* lf.sfle.s */
1473 LOG_DIS("lf.sfle.s r%d, r%d\n", ra, rb);
1474 gen_helper_float_le_s(env_btaken, cpu_env, cpu_R[ra], cpu_R[rb]);
1477 /* not used yet, open it when we need or64. */
1478 /*#ifdef TARGET_OPENRISC64
1480 LOG_DIS("lf.add.d r%d, r%d, r%d\n", rd, ra, rb);
1482 gen_helper_float_add_d(cpu_R[rd], cpu_env, cpu_R[ra], cpu_R[rb]);
1486 LOG_DIS("lf.sub.d r%d, r%d, r%d\n", rd, ra, rb);
1488 gen_helper_float_sub_d(cpu_R[rd], cpu_env, cpu_R[ra], cpu_R[rb]);
1492 LOG_DIS("lf.mul.d r%d, r%d, r%d\n", rd, ra, rb);
1494 if (ra != 0 && rb != 0) {
1495 gen_helper_float_mul_d(cpu_R[rd], cpu_env, cpu_R[ra], cpu_R[rb]);
1497 tcg_gen_ori_tl(fpcsr, fpcsr, FPCSR_ZF);
1498 tcg_gen_movi_i64(cpu_R[rd], 0x0);
1503 LOG_DIS("lf.div.d r%d, r%d, r%d\n", rd, ra, rb);
1505 gen_helper_float_div_d(cpu_R[rd], cpu_env, cpu_R[ra], cpu_R[rb]);
1508 case 0x14: lf.itof.d
1509 LOG_DIS("lf.itof r%d, r%d\n", rd, ra);
1511 gen_helper_itofd(cpu_R[rd], cpu_env, cpu_R[ra]);
1514 case 0x15: lf.ftoi.d
1515 LOG_DIS("lf.ftoi r%d, r%d\n", rd, ra);
1517 gen_helper_ftoid(cpu_R[rd], cpu_env, cpu_R[ra]);
1521 LOG_DIS("lf.rem.d r%d, r%d, r%d\n", rd, ra, rb);
1523 gen_helper_float_rem_d(cpu_R[rd], cpu_env, cpu_R[ra], cpu_R[rb]);
1526 case 0x17: lf.madd.d
1527 LOG_DIS("lf.madd.d r%d, r%d, r%d\n", rd, ra, rb);
1529 gen_helper_float_muladd_d(cpu_R[rd], cpu_env, cpu_R[ra], cpu_R[rb]);
1532 case 0x18: lf.sfeq.d
1533 LOG_DIS("lf.sfeq.d r%d, r%d\n", ra, rb);
1535 gen_helper_float_eq_d(env_btaken, cpu_env, cpu_R[ra], cpu_R[rb]);
1538 case 0x1a: lf.sfgt.d
1539 LOG_DIS("lf.sfgt.d r%d, r%d\n", ra, rb);
1541 gen_helper_float_gt_d(env_btaken, cpu_env, cpu_R[ra], cpu_R[rb]);
1544 case 0x1b: lf.sfge.d
1545 LOG_DIS("lf.sfge.d r%d, r%d\n", ra, rb);
1547 gen_helper_float_ge_d(env_btaken, cpu_env, cpu_R[ra], cpu_R[rb]);
1550 case 0x19: lf.sfne.d
1551 LOG_DIS("lf.sfne.d r%d, r%d\n", ra, rb);
1553 gen_helper_float_ne_d(env_btaken, cpu_env, cpu_R[ra], cpu_R[rb]);
1556 case 0x1c: lf.sflt.d
1557 LOG_DIS("lf.sflt.d r%d, r%d\n", ra, rb);
1559 gen_helper_float_lt_d(env_btaken, cpu_env, cpu_R[ra], cpu_R[rb]);
1562 case 0x1d: lf.sfle.d
1563 LOG_DIS("lf.sfle.d r%d, r%d\n", ra, rb);
1565 gen_helper_float_le_d(env_btaken, cpu_env, cpu_R[ra], cpu_R[rb]);
1570 gen_illegal_exception(dc);
1576 static void disas_openrisc_insn(DisasContext *dc, OpenRISCCPU *cpu)
1580 insn = cpu_ldl_code(&cpu->env, dc->pc);
1581 op0 = extract32(insn, 26, 6);
1593 dec_logic(dc, insn);
1597 dec_compi(dc, insn);
1605 dec_float(dc, insn);
1622 void gen_intermediate_code(CPUOpenRISCState *env, struct TranslationBlock *tb)
1624 OpenRISCCPU *cpu = openrisc_env_get_cpu(env);
1625 CPUState *cs = CPU(cpu);
1626 struct DisasContext ctx, *dc = &ctx;
1628 uint32_t next_page_start;
1635 dc->is_jmp = DISAS_NEXT;
1638 dc->flags = cpu->env.cpucfgr;
1639 dc->mem_idx = cpu_mmu_index(&cpu->env, false);
1640 dc->synced_flags = dc->tb_flags = tb->flags;
1641 dc->delayed_branch = !!(dc->tb_flags & D_FLAG);
1642 dc->singlestep_enabled = cs->singlestep_enabled;
1643 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
1644 qemu_log("-----------------------------------------\n");
1645 log_cpu_state(CPU(cpu), 0);
1648 next_page_start = (pc_start & TARGET_PAGE_MASK) + TARGET_PAGE_SIZE;
1650 max_insns = tb->cflags & CF_COUNT_MASK;
1652 if (max_insns == 0) {
1653 max_insns = CF_COUNT_MASK;
1655 if (max_insns > TCG_MAX_INSNS) {
1656 max_insns = TCG_MAX_INSNS;
1662 tcg_gen_insn_start(dc->pc);
1665 if (unlikely(cpu_breakpoint_test(cs, dc->pc, BP_ANY))) {
1666 tcg_gen_movi_tl(cpu_pc, dc->pc);
1667 gen_exception(dc, EXCP_DEBUG);
1668 dc->is_jmp = DISAS_UPDATE;
1669 /* The address covered by the breakpoint must be included in
1670 [tb->pc, tb->pc + tb->size) in order to for it to be
1671 properly cleared -- thus we increment the PC here so that
1672 the logic setting tb->size below does the right thing. */
1677 if (num_insns == max_insns && (tb->cflags & CF_LAST_IO)) {
1680 dc->ppc = dc->pc - 4;
1681 dc->npc = dc->pc + 4;
1682 tcg_gen_movi_tl(cpu_ppc, dc->ppc);
1683 tcg_gen_movi_tl(cpu_npc, dc->npc);
1684 disas_openrisc_insn(dc, cpu);
1687 if (dc->delayed_branch) {
1688 dc->delayed_branch--;
1689 if (!dc->delayed_branch) {
1690 dc->tb_flags &= ~D_FLAG;
1692 tcg_gen_mov_tl(cpu_pc, jmp_pc);
1693 tcg_gen_mov_tl(cpu_npc, jmp_pc);
1694 tcg_gen_movi_tl(jmp_pc, 0);
1696 dc->is_jmp = DISAS_JUMP;
1700 } while (!dc->is_jmp
1701 && !tcg_op_buf_full()
1702 && !cs->singlestep_enabled
1704 && (dc->pc < next_page_start)
1705 && num_insns < max_insns);
1707 if (tb->cflags & CF_LAST_IO) {
1710 if (dc->is_jmp == DISAS_NEXT) {
1711 dc->is_jmp = DISAS_UPDATE;
1712 tcg_gen_movi_tl(cpu_pc, dc->pc);
1714 if (unlikely(cs->singlestep_enabled)) {
1715 if (dc->is_jmp == DISAS_NEXT) {
1716 tcg_gen_movi_tl(cpu_pc, dc->pc);
1718 gen_exception(dc, EXCP_DEBUG);
1720 switch (dc->is_jmp) {
1722 gen_goto_tb(dc, 0, dc->pc);
1728 /* indicate that the hash table must be used
1729 to find the next TB */
1733 /* nothing more to generate */
1738 gen_tb_end(tb, num_insns);
1740 tb->size = dc->pc - pc_start;
1741 tb->icount = num_insns;
1744 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
1746 log_target_disas(cs, pc_start, dc->pc - pc_start, 0);
1747 qemu_log("\nisize=%d osize=%d\n",
1748 dc->pc - pc_start, tcg_op_buf_count());
1753 void openrisc_cpu_dump_state(CPUState *cs, FILE *f,
1754 fprintf_function cpu_fprintf,
1757 OpenRISCCPU *cpu = OPENRISC_CPU(cs);
1758 CPUOpenRISCState *env = &cpu->env;
1761 cpu_fprintf(f, "PC=%08x\n", env->pc);
1762 for (i = 0; i < 32; ++i) {
1763 cpu_fprintf(f, "R%02d=%08x%c", i, env->gpr[i],
1764 (i % 4) == 3 ? '\n' : ' ');
1768 void restore_state_to_opc(CPUOpenRISCState *env, TranslationBlock *tb,